code
stringlengths
501
5.19M
package
stringlengths
2
81
path
stringlengths
9
304
filename
stringlengths
4
145
from parser import AossTowerParse from WriteToNetCDF import AossTowerWrite as ATW from Util import Util from datetime import datetime as date from datetime import timedelta as delta import os import shutil # The purpose of this function is to move the newly # created file. # Also moves the file into directories based upon # the file's date # @param path # no returns def moveFile(writer): #get path myUtil = Util() path = myUtil.destinationPath(writer.date) #if the path does not exist, create it if(not os.path.exists(path)): print "creating path..." os.makedirs(path) print "path created" #notify user print "path loaded" #moves file shutil.move(writer.ncFileName, path + writer.ncFileName) print "file moved" # The purpose of this function is to write data based upon # a start or end date, then move folders based upon # a user given path # @param starting datetime obj, ending datetime obj, path # no return #has some sort of bug. File Not Found exception was thrown def writeRange(startDate, endDate): #holds the current date time in the loop cur_dt = startDate #for each day in the range, write #a netcdf file for day in range((endDate - startDate).days + 1): #writes to file myParser = AossTowerParse(cur_dt) dictData = myParser.storeValues() writer = ATW(dictData, cur_dt) writer.write() moveFile(writer) #forwards current datetime by a day cur_dt += delta(days = 1) # The purpose of this function is to get the user to input # a start and end date and convert all ascii files to netcdf files based # within that range # no parameters # no returns # The purpose of this function is to convert # all ascii files into netcdf files # runs for as long as memory holds # no paramters or returns def all(): writeRange(date(2003, 5, 28, 20, 30, 46), date.today()) # The purpose of this function is to take yesterday's # ascii file and convert it to netcdf def convertYesterdayFile(): myUtil = Util() yesterdaysDate = myUtil.getYesterdaysDTobj() myParser = AossTowerParse(yesterdaysDate) dictData = myParser.storeValues() myWriter = ATW(dictData, yesterdaysDate) myWriter.write() moveFile(myWriter) #writeRange(date(2003,5,28,20,30,46), date(2003,7,15,0,0,0)) #writeRange(date(2003,7,27,0,0,0), date(2003,7,27,0,0,0)) #all()
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/bundler.py
bundler.py
from netCDF4 import Dataset import numpy as np from parser import AossTowerParse as parser from aosstower.l00 import parser as atParser from Util import Util as u import sys import shutil import os import errno from datetime import datetime as dt from aosstower import station as stion import platform class AossTowerWrite(object): # The purpose of this function is to use date # to get path and create file # also initializes values # takes parser as a parameter def __init__(self, dictData, date): self.Util = u() self.dictData = dictData if(len(self.dictData['stamp']) > 0): self.date = self.dictData['stamp'][1] else: self.date = date self.ncFileName = self.Util.ncFileName(self.date) self.ncFile = Dataset(self.ncFileName, 'w') # The purpose of this function is to write the dimensions # for the nc file # no parameters # no returns def writeDim(self): #creates dimensions self.ncFile.createDimension('time', len(self.dictData['stamp'])) self.ncFile.createDimension('strlen', 256) # The purpose of this function is to write variable atrributes to the file # no parameters # returns whether or not the ascii file was empty or not # if it was, then the netcdf file is corrupted def writeVarAttributes(self): database = atParser.database #create coordinate var lon lon = self.ncFile.createVariable('lon', np.float32, fill_value = float(-999)) lon.valid_min = '-180L' lon.standard_name = 'longitude' lon.units = 'degrees_east' lon.valid_max = '180L' #create coordinate var lat lat = self.ncFile.createVariable('lat', np.float32, fill_value = float(-999)) lat.valid_min = '-90L' lat.standard_name = 'latitude' lat.units = 'degrees_north' lat.valid_max = '90L' #create var dependent on strlen stationName = self.ncFile.createVariable('station_name', 'c', dimensions=('strlen'), fill_value = "-") stationName.long_name = 'station name' stationName.cf_role = 'timeseries_id' #create coordinate var alt alt = self.ncFile.createVariable('alt', np.float32, fill_value = float(-999)) alt.positive = 'up' alt.long_name = 'vertical distance' alt.standard_name = 'height' alt.units = 'm' alt.axis = 'Z' #create base_time baseTime = self.ncFile.createVariable('base_time', np.float32, fill_value = float(-999)) baseTime.long_name = 'base time as unix timestamp' baseTime.standard_name = 'time' baseTime.units = 'seconds since 1970-01-01 00:00:00 0:00' baseTime.string = self.Util.dateFormat(self.date) + ' 00:00:00Z' #create time time = self.ncFile.createVariable('time', np.float32,dimensions = ('time'), fill_value = float(-999)) time.long_name = 'time offset from midnight UTC' time.standard_name = 'time' Z = ' 00:00:00Z' time.units = 'seconds since ' + self.Util.dateFormat(self.date) + Z #creates variable for each key in the database #uses database's information for key in database: if key == 'stamp': continue var = database[key] printString = self.ncFile.createVariable(key, np.float32, dimensions=('time'), fill_value = float(-999)) printString.standard_name = var[1] printString.description = var[3] printString.units = var[4] #create global attributes #these might change self.ncFile.source = 'surface observation' self.ncFile.conventions = 'CF-1.6' self.ncFile.institution = 'UW SSEC' self.ncFile.featureType = 'timeSeries' #generate history self.ncFile.history = ' '.join(platform.uname()) + " " + os.path.basename(__file__) # The purpose of this function is to write # in the fill values for an empty ascii file # fills all data variables with -999.f #no parameters or returns def fillValues(self): year = self.date.year month = self.date.month day = self.date.day #creates new datetime at start of day baseTimeValue = dt(year, month, day) #find out how much time #elapsed since the start of the day, #to the first time stamp baseTimeValue = baseTimeValue - baseTimeValue.fromtimestamp(0) #calculates the total seconds of that difference baseTimeValue = baseTimeValue.total_seconds() fileVar = self.ncFile.variables fileVar['lon'].assignValue(stion.LONGITUDE) fileVar['lat'].assignValue(stion.LATITUDE) fileVar['alt'].assignValue(self.Util.ALTITUDE()) #this name might change later stationsName = ("AOSS Tower") #transfers string to numpy array of type S1 #same type as the var station name arrayOfChars = list(stationsName) toNumpy = np.asarray(arrayOfChars) #writes station name to file fileVar['station_name'][0:len(toNumpy)] = toNumpy #writes data into file for key in atParser.database: #fills in numbers inFileVar = fileVar[key] inFileVar[0] = float(-999) #writes in base time and time inFileVar = fileVar['base_time'] inFileVar.assignValue(baseTimeValue) inFileVar = fileVar['time'] inFileVar[0] = float(-999) # The purpose of this function is to take the data # from a full ascii file and write it into the netcdf file # @param the big dictionary of data # no returns def writeData(self): #gets dict of date time objects stamp = self.dictData['stamp'] # create new stamp numpy timeNumpy = np.empty(len(stamp), dtype = 'float32') #get date time object baseTimeValue = dt(stamp[0].year, stamp[0].month, stamp[0].day) #find out how much time #elapsed since the start of the day, #to the first time stamp baseTimeValue = baseTimeValue - baseTimeValue.fromtimestamp(0) #calculates the total seconds of that difference baseTimeValue = baseTimeValue.total_seconds() #keep track of the frame number counter = 0 #for each frame number #calculates how much time has elapsed since base time #stores that value in time numpy for key in stamp: timeValue = stamp[key] - dt(stamp[0].year, stamp[0].month, stamp[0].day) timeValue = timeValue.total_seconds() timeNumpy[counter] = timeValue counter = counter + 1 fileVar = self.ncFile.variables #write corrdinate variable values to file fileVar['lon'].assignValue(stion.LONGITUDE) fileVar['lat'].assignValue(stion.LATITUDE) fileVar['alt'].assignValue(self.Util.ALTITUDE()) #this name might change later stationsName = ("AOSS Tower") #transfers string to numpy array of type S1 #same type as the var station name arrayOfChars = list(stationsName) toNumpy = np.asarray(arrayOfChars) #writes station name to file fileVar['station_name'][0:len(toNumpy)] = toNumpy #writes data into file for key in self.dictData: #writes in base time and time if key == 'stamp': inFileVar = fileVar['base_time'] inFileVar.assignValue(baseTimeValue) inFileVar = fileVar['time'] inFileVar[:] = timeNumpy continue inFileVar = fileVar[key] inFileVar[:] = self.dictData[key] # The purpose of this function is to write all data into an nc file # no parameters # no returns def write(self): self.writeDim() self.writeVarAttributes() if(not self.dictData['stamp']): self.fillValues() else: self.writeData() #tell user data succeeded print "data written succesfully" #closes file self.ncFile.close()
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/WriteToNetCDF.py
WriteToNetCDF.py
from aosstower.l00 import parser import numpy as np import abc from Util import Util as u class AossTowerParse(object): def __init__(self, date): self.Util = u() self.FILENAME = self.Util.FILENAME(date) # The purpose of this function is to # update the data dictionary # # @param out of date dictionary of data, the frame the parser is on # and a counter which is used to tell the frame's number # Handles missing data by filling in the numpy with -999.f # # @return updated dictionary of data def saveData(self, storeData, frame, counter): #save parameters #counter var c = counter #dict frame f = frame #store Dictionary obj s = storeData #for each key inside the dict of data #this loop will add a new value to the data's numpy #array for key in s: #this method will not handle stamp #stamp is handled in another method if key == 'stamp': continue #gets old numpy array dataNumpy = s[key] #updates numpy array try: dataNumpy[counter] = f[key] #error means no data was found #so a fill value of -999.f is created except(KeyError): dataNumpy[counter] = float(-999) #updates the data s[key] = dataNumpy #returns the data return s # the purpose of this function is to # update a date time dictionary, # which holds all the stamp values # # @param the current frame, the frame's number, and # an outdated date time dict # # @return updated date time dict def createStampDict(self, frame, counter, stampDict): #stamp dict sd = stampDict #retrieves stamp from the frame stamp = frame['stamp'] #updates stamp dict sd[counter] = stamp # The purpose of this function # is to calculate the number of iterations # needed for the length of numpies # # @param the filename the program parses # # @return the total number of frames for that file def calcNumItr(self): returnCounter = 0 #update the counter for frame in parser.read_frames(self.FILENAME): returnCounter = returnCounter + 1 #return the total number of iterations return returnCounter # no parameters # The purpose of this function is to # parse all frames into a huge set of frames # # @return huge set of frames def storeValues(self): #calculate total amount of frames numItr = self.calcNumItr() #create the dic of data storeData = {} for key in parser.database: storeData[key] = np.arange(numItr, dtype = 'float32') #create new stamp dic stampDict = {} #create counter counter = 0 #for every frame, store data and stamp for frame in parser.read_frames(self.FILENAME): storeData = self.saveData(storeData, frame, counter) self.createStampDict(frame, counter, stampDict) #increment counter so it can traverse and fill numpy arrays counter = counter + 1 #tack stamp dict into the store data numpy storeData["stamp"] = stampDict #return the whole lot of data return storeData
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/parser.py
parser.py
from datetime import datetime from datetime import timedelta """ This class takes dates and generates paths based upon those date time objects. """ class Util(object): # the purpose of this function is to return # the filename that the parser is going to use # based upon a datetime object # @param datetime object # # @return filename def FILENAME(self, date): month = date.month if(month < 10): month = "0" + (str)(month) else: month = (str)(month) day = date.day if(day < 10): day = "0" + (str)(day) else: day = (str)(day) #get total date totalDate = (str)(date.year) + "-" + month + "-" + day #returns file name return '/mnt/inst-data/aoss-tower/' + (str)(date.year) + '/' + month + '/rig_tower.' + totalDate + '.ascii' # create path based on the date # @return filepath # @param date def destinationPath(self, date): year = str(date.year) if date.month < 10: month = "0" + str(date.month) else: month = str(date.month) if(date.day < 10): day = "0" + str(date.day) else: day = str(date.day) #all file paths start with startofPath = "/data3/kgao/testAll15/" #next part of path is year + year-month eOPath = year + "/" + year + "-" + month + "/" return startofPath + eOPath # create netCDF4 file name # @return file name # @param date def ncFileName(self, date): year = str(date.year) if date.month < 10: month = "0" + str(date.month) else: month = str(date.month) if(date.day < 10): day = "0" + str(date.day) else: day = str(date.day) #create netCDF name netCDFName = "rig-tower." + year + "-" + month + "-" + day + ".nc" #returns newly created name return netCDFName # altitude value is not exact # return altitude value # @return altitude value # no parameters def ALTITUDE(self): return 328 # create a date format from datetime # @param datetime obj # @return YYYY-MM-DD def dateFormat(self, date): year = str(date.year) if date.month < 10: month = "0" + str(date.month) else: month = str(date.month) if(date.day < 10): day = "0" + str(date.day) else: day = str(date.day) #return YYYY-MM-DD return year + "-" + month + "-" + day # The purpose of this function is to generate yesterday's datetime # obj. # no parameters # @return yesterday's datetime object def getYesterdaysDTobj(self): #time difference of 1 day td = timedelta(1) return datetime.today() - td
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/Util.py
Util.py
from netCDF4 import Dataset import numpy as np from ATP import AossTowerParse as parser from aosstower.l00 import parser as atParser from Util import Util as u import sys import shutil import os import errno from datetime import datetime as dt from aosstower import station as stion import platform class AossTowerWrite(object): # The purpose of this function is to use date # to get path and create file # also initializes values # takes parser as a parameter def __init__(self, dictData, date): self.Util = u() self.dictData = dictData if(len(self.dictData['stamp']) > 0): self.date = self.dictData['stamp'][1] else: self.date = date self.ncFileName = self.Util.ncFileName(self.date) self.ncFile = Dataset(self.ncFileName, 'w') # The purpose of this function is to write the dimensions # for the nc file # no parameters # no returns def writeDim(self): #creates dimensions self.ncFile.createDimension('time', len(self.dictData['stamp'])) self.ncFile.createDimension('strlen', 256) # The purpose of this function is to write variable atrributes to the file # no parameters # returns whether or not the ascii file was empty or not # if it was, then the netcdf file is corrupted def writeVarAttributes(self): database = atParser.database #create coordinate var lon lon = self.ncFile.createVariable('lon', np.float32, fill_value = float(-999)) lon.valid_min = '-180L' lon.standard_name = 'longitude' lon.units = 'degrees_east' lon.valid_max = '180L' #create coordinate var lat lat = self.ncFile.createVariable('lat', np.float32, fill_value = float(-999)) lat.valid_min = '-90L' lat.standard_name = 'latitude' lat.units = 'degrees_north' lat.valid_max = '90L' #create var dependent on strlen stationName = self.ncFile.createVariable('station_name', 'c', dimensions=('strlen'), fill_value = "-") stationName.long_name = 'station name' stationName.cf_role = 'timeseries_id' #create coordinate var alt alt = self.ncFile.createVariable('alt', np.float32, fill_value = float(-999)) alt.positive = 'up' alt.long_name = 'vertical distance' alt.standard_name = 'height' alt.units = 'm' alt.axis = 'Z' #create base_time baseTime = self.ncFile.createVariable('base_time', np.float32, fill_value = float(-999)) baseTime.long_name = 'base time as unix timestamp' baseTime.standard_name = 'time' baseTime.units = 'seconds since 1970-01-01 00:00:00 0:00' baseTime.string = self.Util.dateFormat(self.date) + ' 00:00:00Z' #create time time = self.ncFile.createVariable('time', np.float32,dimensions = ('time'), fill_value = float(-999)) time.long_name = 'time offset from midnight UTC' time.standard_name = 'time' Z = ' 00:00:00Z' time.units = 'seconds since ' + self.Util.dateFormat(self.date) + Z #creates variable for each key in the database #uses database's information for key in database: if key == 'stamp': continue var = database[key] printString = self.ncFile.createVariable(key, np.float32, dimensions=('time'), fill_value = float(-999)) printString.standard_name = var[1] printString.description = var[3] printString.units = var[4] #create global attributes #these might change self.ncFile.source = 'surface observation' self.ncFile.conventions = 'CF-1.6' self.ncFile.institution = 'UW SSEC' self.ncFile.featureType = 'timeSeries' #generate history self.ncFile.history = ' '.join(platform.uname()) + " " + os.path.basename(__file__) # The purpose of this function is to write # in the fill values for an empty ascii file # fills all data variables with -999.f #no parameters or returns def fillValues(self): year = self.date.year month = self.date.month day = self.date.day #creates new datetime at start of day baseTimeValue = dt(year, month, day) #find out how much time #elapsed since the start of the day, #to the first time stamp baseTimeValue = baseTimeValue - baseTimeValue.fromtimestamp(0) #calculates the total seconds of that difference baseTimeValue = baseTimeValue.total_seconds() fileVar = self.ncFile.variables fileVar['lon'].assignValue(stion.LONGITUDE) fileVar['lat'].assignValue(stion.LATITUDE) fileVar['alt'].assignValue(self.Util.ALTITUDE()) #this name might change later stationsName = ("AOSS Tower") #transfers string to numpy array of type S1 #same type as the var station name arrayOfChars = list(stationsName) toNumpy = np.asarray(arrayOfChars) #writes station name to file fileVar['station_name'][0:len(toNumpy)] = toNumpy #writes data into file for key in atParser.database: #fills in numbers inFileVar = fileVar[key] inFileVar[0] = float(-999) #writes in base time and time inFileVar = fileVar['base_time'] inFileVar.assignValue(baseTimeValue) inFileVar = fileVar['time'] inFileVar[0] = float(-999) # The purpose of this function is to take the data # from a full ascii file and write it into the netcdf file # @param the big dictionary of data # no returns def writeData(self): #gets dict of date time objects stamp = self.dictData['stamp'] # create new stamp numpy timeNumpy = np.empty(len(stamp), dtype = 'float32') #get date time object baseTimeValue = dt(stamp[0].year, stamp[0].month, stamp[0].day) #find out how much time #elapsed since the start of the day, #to the first time stamp baseTimeValue = baseTimeValue - baseTimeValue.fromtimestamp(0) #calculates the total seconds of that difference baseTimeValue = baseTimeValue.total_seconds() #keep track of the frame number counter = 0 #for each frame number #calculates how much time has elapsed since base time #stores that value in time numpy for key in stamp: timeValue = stamp[key] - dt(stamp[0].year, stamp[0].month, stamp[0].day) timeValue = timeValue.total_seconds() timeNumpy[counter] = timeValue counter = counter + 1 fileVar = self.ncFile.variables #write corrdinate variable values to file fileVar['lon'].assignValue(stion.LONGITUDE) fileVar['lat'].assignValue(stion.LATITUDE) fileVar['alt'].assignValue(self.Util.ALTITUDE()) #this name might change later stationsName = ("AOSS Tower") #transfers string to numpy array of type S1 #same type as the var station name arrayOfChars = list(stationsName) toNumpy = np.asarray(arrayOfChars) #writes station name to file fileVar['station_name'][0:len(toNumpy)] = toNumpy #writes data into file for key in self.dictData: #writes in base time and time if key == 'stamp': inFileVar = fileVar['base_time'] inFileVar.assignValue(baseTimeValue) inFileVar = fileVar['time'] inFileVar[:] = timeNumpy continue inFileVar = fileVar[key] inFileVar[:] = self.dictData[key] # The purpose of this function is to write all data into an nc file # no parameters # no returns def write(self): self.writeDim() self.writeVarAttributes() if(not self.dictData['stamp']): self.fillValues() else: self.writeData() #tell user data succeeded print "data written succesfully" #closes file self.ncFile.close()
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/test/ATW.py
ATW.py
from ATP import AossTowerParse from ATW import AossTowerWrite as ATW from Util import Util from datetime import datetime as date from datetime import timedelta as delta import os import shutil # The purpose of this function is to move the newly # created file. # Also moves the file into directories based upon # the file's date # @param path # no returns def moveFile(writer): #get path myUtil = Util() path = myUtil.destinationPath(writer.date) #if the path does not exist, create it if(not os.path.exists(path)): print "creating path..." os.makedirs(path) print "path created" #notify user print "path loaded" #moves file shutil.move(writer.ncFileName, path + writer.ncFileName) print "file moved" # The purpose of this function is to write data based upon # a start or end date, then move folders based upon # a user given path # @param starting datetime obj, ending datetime obj, path # no return #has some sort of bug. File Not Found exception was thrown def writeRange(startDate, endDate): #holds the current date time in the loop cur_dt = startDate #for each day in the range, write #a netcdf file for day in range((endDate - startDate).days + 1): #writes to file myParser = AossTowerParse(cur_dt) dictData = myParser.storeValues() writer = ATW(dictData, cur_dt) writer.write() moveFile(writer) #forwards current datetime by a day cur_dt += delta(days = 1) # The purpose of this function is to get the user to input # a start and end date and convert all ascii files to netcdf files based # within that range # no parameters # no returns # The purpose of this function is to convert # all ascii files into netcdf files # runs for as long as memory holds # no paramters or returns def all(): writeRange(date(2003, 5, 28, 20, 30, 46), date.today()) # The purpose of this function is to take yesterday's # ascii file and convert it to netcdf def convertYesterdayFile(): myUtil = Util() yesterdaysDate = myUtil.getYesterdaysDTobj() myParser = AossTowerParse(yesterdaysDate) dictData = myParser.storeValues() myWriter = ATW(dictData, yesterdaysDate) myWriter.write() moveFile(myWriter) #writeRange(date(2003,5,28,20,30,46), date(2003,7,15,0,0,0)) #writeRange(date(2003,7,27,0,0,0), date(2003,7,27,0,0,0)) #all()
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/test/bundler.py
bundler.py
from aosstower.l00 import parser import numpy as np import abc from Util import Util as u class AossTowerParse(object): def __init__(self, date): self.Util = u() self.FILENAME = self.Util.FILENAME(date) # The purpose of this function is to # update the data dictionary # # @param out of date dictionary of data, the frame the parser is on # and a counter which is used to tell the frame's number # Handles missing data by filling in the numpy with -999.f # # @return updated dictionary of data def saveData(self, storeData, frame, counter): #save parameters #counter var c = counter #dict frame f = frame #store Dictionary obj s = storeData #for each key inside the dict of data #this loop will add a new value to the data's numpy #array for key in s: #this method will not handle stamp #stamp is handled in another method if key == 'stamp': continue #gets old numpy array dataNumpy = s[key] #updates numpy array try: dataNumpy[counter] = f[key] #error means no data was found #so a fill value of -999.f is created except(KeyError): dataNumpy[counter] = float(-999) #updates the data s[key] = dataNumpy #returns the data return s # the purpose of this function is to # update a date time dictionary, # which holds all the stamp values # # @param the current frame, the frame's number, and # an outdated date time dict # # @return updated date time dict def createStampDict(self, frame, counter, stampDict): #stamp dict sd = stampDict #retrieves stamp from the frame stamp = frame['stamp'] #updates stamp dict sd[counter] = stamp # The purpose of this function # is to calculate the number of iterations # needed for the length of numpies # # @param the filename the program parses # # @return the total number of frames for that file def calcNumItr(self): returnCounter = 0 #update the counter for frame in parser.read_frames(self.FILENAME): returnCounter = returnCounter + 1 #return the total number of iterations return returnCounter # no parameters # The purpose of this function is to # parse all frames into a huge set of frames # # @return huge set of frames def storeValues(self): #calculate total amount of frames numItr = self.calcNumItr() #create the dic of data storeData = {} for key in parser.database: storeData[key] = np.arange(numItr, dtype = 'float32') #create new stamp dic stampDict = {} #create counter counter = 0 #for every frame, store data and stamp for frame in parser.read_frames(self.FILENAME): storeData = self.saveData(storeData, frame, counter) self.createStampDict(frame, counter, stampDict) #increment counter so it can traverse and fill numpy arrays counter = counter + 1 #tack stamp dict into the store data numpy storeData["stamp"] = stampDict #return the whole lot of data return storeData
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/test/ATP.py
ATP.py
from datetime import datetime from datetime import timedelta import os """ This class takes dates and generates paths based upon those date time objects. """ class Util(object): # the purpose of this function is to return # the filename that the parser is going to use # based upon a datetime object # @param datetime object # # @return filename def FILENAME(self, date): month = date.month if(month < 10): month = "0" + (str)(month) else: month = (str)(month) day = date.day if(day < 10): day = "0" + (str)(day) else: day = (str)(day) #get total date totalDate = (str)(date.year) + "-" + month + "-" + day #returns file name return '/mnt/inst-data/aoss-tower/' + (str)(date.year) + '/' + month + '/rig_tower.' + totalDate + '.ascii' # create path based on the date # @return filepath # @param date def destinationPath(self, date): year = str(date.year) if date.month < 10: month = "0" + str(date.month) else: month = str(date.month) if(date.day < 10): day = "0" + str(date.day) else: day = str(date.day) #all file paths start with startofPath = os.getcwd() + '/' #next part of path is year + year-month eOPath = year + "/" + year + "-" + month + "/" return startofPath + eOPath # create netCDF4 file name # @return file name # @param date def ncFileName(self, date): year = str(date.year) if date.month < 10: month = "0" + str(date.month) else: month = str(date.month) if(date.day < 10): day = "0" + str(date.day) else: day = str(date.day) #create netCDF name netCDFName = "rig-tower." + year + "-" + month + "-" + day + ".nc" #returns newly created name return netCDFName # altitude value is not exact # return altitude value # @return altitude value # no parameters def ALTITUDE(self): return 328 # create a date format from datetime # @param datetime obj # @return YYYY-MM-DD def dateFormat(self, date): year = str(date.year) if date.month < 10: month = "0" + str(date.month) else: month = str(date.month) if(date.day < 10): day = "0" + str(date.day) else: day = str(date.day) #return YYYY-MM-DD return year + "-" + month + "-" + day # The purpose of this function is to generate yesterday's datetime # obj. # no parameters # @return yesterday's datetime object def getYesterdaysDTobj(self): #time difference of 1 day td = timedelta(1) return datetime.today() - td
Aoss_Tower_a1_Conversion
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/convertFromASCIIToNETCDF/test/Util.py
Util.py
# AP Reusable Package AP-Reusable-Package is a versatile and powerful Python package that contains many useful functions and classes for a wide range of projects. Some of the key features of this package include: <li><b>Get Client IP:</b> A function that retrieves the IP address of the client making a request to a server. This can be useful for logging and security purposes.</li> <br> <li><b>Get Client Host:</b> A function that retrieves the hostname of the client making a request to a server. This can be useful for identifying the origin of a request.</li> <br> <li><b>Fernet Encryption/Decryption:</b> A set of functions that provide easy-to-use encryption and decryption functionality using the Fernet encryption algorithm. This can be useful for protecting sensitive data in transit or at rest.</li> <br> <li><b>Extended Enumerator:</b> A class that extends the built-in Python Enumerator class, providing additional functionality and convenience methods. This can be useful for working with large sets of data.</li> <br> <li><b>Custom Renderer for Rest Framework:</b> A custom renderer that provides additional flexibility and customization options for rendering RESTful API responses. This can be useful for building complex APIs that require a high degree of control over the output format.</li> Overall, AP-Reusable-Package is a valuable resource for developers working on a wide range of projects. Its many features and convenient classes make it easy to build robust and secure applications quickly and efficiently. ### For updating version Options are major, minor, patch ``` bumpversion [option] --allow-dirty ```
ApReusable
/ApReusable-1.3.2.tar.gz/ApReusable-1.3.2/README.md
README.md
# Auto-Python-Tester [![Python Versions](https://img.shields.io/pypi/pyversions/ApTester.svg)](https://pypi.org/project/ApTester) ![PyPI - Wheel](https://img.shields.io/pypi/wheel/ApTester) ![PyPI - Implementation](https://img.shields.io/pypi/implementation/ApTester) ![PyPI - Version](https://img.shields.io/badge/version-0.1.1-blue) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Downloads](https://static.pepy.tech/personalized-badge/aptester?period=total&units=international_system&left_color=grey&right_color=blue&left_text=Downloads)](https://pepy.tech/project/aptester) Pythonで競技プログラミングをするときに、テストを自動で実行してくれるパッケージです。 テストに成功すると、成功表示がでて ![](image/README/1631254786502.png) テスト失敗すると、失敗表示が出てテストのデータとソースの結果を表示してくれます。 ![](image/README/1631255094706.png) # 注意点 おそらく、 Python 3.5以上対応です。 # インストール方法 ```bash $ pip install ApTester ```` # 使い方 使うには二つのファイルが必要になります。 - 実行するPythonファイル - テストが書いてあるファイル(*.txt) ```bash $ Aptester Testcases.txt main.py ``` ※-m を使っていなければ`aptester`でも可能です。 ## pythonファイルの書き方 普通に入力と出力のあるファイルであれば問題ないです ## テストケースの書き方 ```txt -テスト名- テストの標準入力 _テスト名_ テストの正しい出力 END ``` # 簡単なテンプレート ## Pythonファイル ```py num1, num2 = map(int, input().split()) num3, num4 = map(int, input().split()) print(num1 + num2) print(num3 + num4) ``` ## テストケースのテンプレート ```txt -TEST1- 1 2 2 5 _TEST1_ 3 7 END -TEST2- 2 3 3 3 _TEST2_ 5 6 END ``` ### `-テスト名-` ハイフンの間にテスト名を決めて書いてください 一文字以上であればなんでも大丈夫です。 ### `_テスト名_` `-テスト名-`で決めたものと同じ物を書いてください。 `-テスト名-`と`_テスト名_`間は標準入力が入る場所です ### テストの標準入力 実行するときに必要な入力を書いてください。 ### テストの正しい出力 正解の出力を書いてください。 ### ENDについて テスト名とENDの間にはコメントを書くことも可能です
ApTester
/ApTester-0.1.4.tar.gz/ApTester-0.1.4/README.md
README.md
import subprocess import pathlib import os import re import sys class Tester(): def __init__(self, python_path, input_path, executable): self.python_path = python_path self.input_path = input_path self.executable = executable @staticmethod def check_file(file) -> bool: file = pathlib.Path(file).resolve() return os.path.isfile(file) def gets_stdout(self, input_text) -> str: """ Pythonファイルを実行して、標準入力をして、 その標準出力を返しくれる """ try: stdout = subprocess.check_output( [self.executable, self.python_path], input=input_text.encode('utf-8'), stderr = subprocess.PIPE ) return stdout.splitlines() except subprocess.CalledProcessError as error: print("実行ファイルにエラーがあります") for i in error.stderr.splitlines(): print(i.decode('utf-8')) sys.exit() class ReadFiles(): @staticmethod def read(file_path) -> str: file = open(file_path, 'r') data = file.read() file.close() return data def analysis(data): testcases = None test_inputs = [] answers = [] inputs = False data = data.splitlines() count = 0 for i in data: if i == "END": yield [ testcases, test_inputs, answers ] testcases = None test_inputs = [] answers = [] inputs = None continue if re.match('-(.*)-', i): inputs = True testcases = "".join(re.findall('-(.*)-', i)) if inputs: if re.match('_(.*)_', i) and testcases == "".join(re.findall('_(.*)_', i)): inputs = False elif inputs and not i == "-" + testcases + "-": test_inputs.append(i) elif not inputs and not inputs is None: answers.append(i)
ApTester
/ApTester-0.1.4.tar.gz/ApTester-0.1.4/Aptester/controll.py
controll.py
from fabric.colors import red, green import sys import argparse from time import sleep from rich.console import Console from . import controll as Ctl def main(): last_answer = [] last_test = [] answer_app = last_answer.append test_app = last_test.append description = """ Hello. I'm Aptester. Auto Tester for Competitive programming. ©Copyright 2021 Hirose Heitor """.strip() parser = argparse.ArgumentParser( description = description, formatter_class = argparse.RawTextHelpFormatter ) # re.findall('a(.*)b', 'axyzb') parser.add_argument("path_input", help="Path to the Test case File.") parser.add_argument("path_python", help="Path to the python file.") args = parser.parse_args() PythonFile = Ctl.Tester( python_path = args.path_python, input_path = args.path_input, executable = sys.executable ) console = Console() if not PythonFile.check_file(args.path_python): raise FileNotFoundError(red("Can't find python file.")) if not PythonFile.check_file(args.path_input): raise FileNotFoundError(red("Can't find input file.")) input_file = Ctl.ReadFiles.read(args.path_input) cases = Ctl.ReadFiles.analysis(input_file) for i in cases: answers = [] spell = "\n".join(i[1]) input_text = spell test = None with console.status(f"[bold green]Testing {i[0]}...") as status: while test is None: sleep(1) test = PythonFile.gets_stdout(input_text) app = answers.append for l in test: app(l.decode('utf-8')) test = "\n".join(i[2]) answer = "\n".join(answers) if test == answer: print(green(f"The {i[0]} was pased.")) else: print(red("The answer is incorrect.")) print(red("Test answer :")) print(red(test)) print(red("Your answer :")) print(red(answer)) if __name__ == "__main__": main()
ApTester
/ApTester-0.1.4.tar.gz/ApTester-0.1.4/Aptester/core.py
core.py
## Apache Local Domain Create Local Domain for Apache Web Service #### Description Create own Domain on Your Local System #### Installation ##### Debian Base Distros (Debian,Ubuntu,...) python3 -m pip install -U Apache-Local-Domain ##### Other Distros (Arch , Fedora , ...) $ git clone https://gitlab.com/toys-projects/Apache-Local-Domain.git $ cd Apache-Local-Domain $ python3 setup.py --help customize_configs ... Options for 'CustomizeConfigurations' command: --debug-mode (-d) Debug mode [False] --apache-modules-path (-m) Apache Modules Path [/etc/apache2/mods-enabled/] --hosts (-h) Hosts file [/etc/hosts] --virtual-hosts-available-path (-a) VirtualHosts available Path [/etc/apache2/sites-available/] --virtual-hosts-enabled-path (-v) VirtualHosts Enabled Path [/etc/apache2/sites-enabled/] --extension (-e) VirtualHosts extension [.conf] ... ##### example Customize Configuration $ python3 setup.py customize_configs \ --debug-mode False \ -m /etc/httpd/modules/ \ -a /etc/httpd/vhosts-available/ \ -v /etc/httpd/vhosts-enabled/ \ -e .dom after of Generate New file Configuration Complete , run: $ sudo python3 setup.py install #### Usage 1 $ apacheld --help Usage: apacheld [OPTIONS] COMMAND [ARGS]... Options: --help Show this message and exit. Commands: php Initialize PHP Template wsgi Initialize WSGI Template ##### Usage 2 $ apacheld wsgi --help Usage: apacheld wsgi [OPTIONS] Initialize WSGI Template Options: -d, --domain TEXT This Domain is Created (ServerName) , example: example.com [required] -r, --root TEXT DocumentRoot of Your website (DocumentRoot) , example: /srv/http/MyWebSite/ [required] -w, --wsgiscript TEXT WSGIScriptAlias of Your website (WSGIScriptAlias) , example: /srv/http/MyWebSite/wsgi.py [required] -v, --virtualenv-folder-name TEXT Virtualenv Folder name in Project PATH (default: .venv) [required] -e, --email TEXT Your Email (ServerAdmin) , example: [email protected] --http2 Enable HTTP2 Protocol --enable-static Enable using static files -s, --static-folder-name TEXT static folder name in Project PATH (default: static) [required with enable_static] --help Show this message and exit. ##### Usage 3 $ apacheld php --help Usage: apacheld php [OPTIONS] Initialize PHP Template Options: -d, --domain TEXT This Domain is Created (ServerName) , example: example.com [required] -r, --root TEXT DocumentRoot of Your website (DocumentRoot) , example: /srv/http/MyWebSite/ [required] -e, --email TEXT Your Email (ServerAdmin) , example: [email protected] --http2 Enable HTTP2 Protocol --help Show this message and exit. #### Note * To use This Program You should Run it with `sudo` #### TODO - [x] Check enable http2 module or not - [x] add new Validations for inputs (documentroot , wsgiscript , virtualenv ,...) - [ ] link Configure file to /etc - [ ] Builtin sudo Execute
Apache-Local-Domain
/Apache-Local-Domain-1.1.2.tar.gz/Apache-Local-Domain-1.1.2/README.md
README.md
import click from ApacheLocalDomain.app.configs import WSGI_TEMPLATE_NAME, HOSTS from ApacheLocalDomain.app.lib.checkers import _checkWSGIEnabled, __validUrl, __validEmail, _checkHTTP2Enabled, \ __wsgiAddressValidation from ApacheLocalDomain.app.lib.cli_helpers import RequiredIF from ApacheLocalDomain.app.lib.file_handlers import _createVirtualHost, _addToHosts from ApacheLocalDomain.app.lib.log import error, info from ApacheLocalDomain.app.lib.template_handlers import mapping, templateLoader, wsgiTemplateMaps @click.command() @click.option('-d','--domain','domain', required=True, prompt="Enter Domain Please", help="This Domain is Created (ServerName) , example: example.com") @click.option('-r','--root','documentRoot', required=True, prompt="Enter DocumentRoot PATH Please", help="DocumentRoot of Your website (DocumentRoot) , example: /srv/http/MyWebSite/") @click.option('-w','--wsgiscript','wsgiScript', required=True, prompt="Enter WSGIScriptAlias file Please", help="WSGIScriptAlias of Your website (WSGIScriptAlias) , example: /srv/http/MyWebSite/wsgi.py") @click.option('-v','--virtualenv-folder-name','virtualenv', default=".venv", required=True, prompt="Enter virtualenv folder name", help="Virtualenv Folder name in Project PATH (default: .venv)" ) @click.option('-e','--email','email', required=False, default=None, help="Your Email (ServerAdmin) , example: [email protected]") @click.option('--http2',"http2", is_flag=True, default=False, help="Enable HTTP2 Protocol" ) @click.option('--enable-static',"enable_static", is_flag=True, help="Enable using static files" ) @click.option('-s','--static-folder-name','StaticFolderName', default='static', cls=RequiredIF, required_if='enable_static', help="static folder name in Project PATH (default: static)" ) def wsgi( domain, documentRoot, wsgiScript, email, virtualenv, StaticFolderName, enable_static, http2, ): """ Initialize WSGI Template """ try: # Check Enable HTTP2 or NOT if http2: _checkHTTP2Enabled() # Check Enable WSGI or NOT _checkWSGIEnabled() # validation DOMAIN = __validUrl(domain) email = __validEmail(email if email else "admin@{}".format(DOMAIN)) __wsgiAddressValidation( documentRoot, wsgiScript, virtualenv, StaticFolderName, enable_static ) # Load and Mapping result = mapping(templateLoader(WSGI_TEMPLATE_NAME),wsgiTemplateMaps( domain, documentRoot, wsgiScript, email, StaticFolderName, virtualenv, enable_static, http2, )) # Try to Create VirtualHost File if not _createVirtualHost(DOMAIN,result) : error('wsgi from cli file',"Cant Create VirtualHost File") # # Try add Domain to HOSTS file if not _addToHosts(DOMAIN): error('wsgi from cli file', "Cant Add Domain to '{}' File".format(HOSTS)) info('Now Reload Your Apache2 Service: `sudo systemctl reload apache2.service`') except Exception as e: error('wsgi from cli file', e)
Apache-Local-Domain
/Apache-Local-Domain-1.1.2.tar.gz/Apache-Local-Domain-1.1.2/ApacheLocalDomain/app/wsgi/cli.py
cli.py
import os from ApacheLocalDomain.app import configs from ApacheLocalDomain.app.lib.log import info, error from ApacheLocalDomain.app.lib.template_handlers import mapping, templateLoader, hostTemplateMaps def _createVirtualHost(domain, content): """ Create VirtualHost File :param content: Your Content That you want Write to File :return: True if File Created , False if Not Created """ try: PATH = os.path.join(configs.VIRTUAL_HOSTS_AVAILABLE_PATH, domain) VirtualHostFileAddress = "{}{}".format(PATH, configs.EXTENSION) with open(VirtualHostFileAddress, 'w') as file: file.write(content) info("VirtualHost Created On '{}'".format(VirtualHostFileAddress)) _setLinkTo(VirtualHostFileAddress) return True except Exception as e: error('_createVirtualHost from helper file', (e)) def __backup(File): """ Backup From Your File :param File: Your File """ from time import strftime, gmtime try: with open(File, 'r') as file: data = file.read() with open("{}.localadmin_{}.bkp".format(File, strftime("%Y-%m-%d_%H:%M:%S", gmtime())), 'w') as file: file.write(data) except Exception as e: error('__backup from helper file', (e)) def _addToHosts(domain): """ Add Your Domain in configs.HOSTS :param domain: Your Domain """ try: __backup(configs.HOSTS) data = mapping(templateLoader(configs.HOST_TEMPLATE_NAME), hostTemplateMaps( domain=domain )) with open(configs.HOSTS, 'a') as file: file.write("{}\n".format(data)) info("Added Domain On '{}'".format(configs.HOSTS)) return True except Exception as e: error('_addToHosts from helper file', (e)) def _setLinkTo(thisFile): import os try: link = thisFile.replace(configs.VIRTUAL_HOSTS_AVAILABLE_PATH, configs.VIRTUAL_HOSTS_ENABLED_PATH) os.link(thisFile, link) info('Linked file "{}" to "{}"'.format(thisFile, link)) except Exception as e: error('_setLinkTo from helper file', e)
Apache-Local-Domain
/Apache-Local-Domain-1.1.2.tar.gz/Apache-Local-Domain-1.1.2/ApacheLocalDomain/app/lib/file_handlers.py
file_handlers.py
import os from ApacheLocalDomain.app import configs from ApacheLocalDomain.app.lib.log import error from validators import domain as domainValidator, email as emailValidator def __validUrl(url): """ Validate domain :param url: Get Url For Validate , Without "http", "https" and "www" :return: Valid URL , if Not , Return None """ try: if not domainValidator(url): error('__validEmail from helper file', "Correct Domain: example.com\n without 'http', 'https' , 'www' =)") return url.replace("www.", '') if url.startswith("www.") else url except Exception as e: error('__validUrl from helper file', e) def __validEmail(email): """ Validate email :param email: get email to Validation :return: Valid Email , if not show ERROR and exit ! """ try: if not emailValidator(email): error('__validEmail from helper file', "Correct Email: [email protected]") return email except Exception as e: error('__validEmail from helper file', e) def _checkWSGIEnabled(): try: enable_modules = os.listdir(configs.APACHE2_MODULES_PATH) for emodule in enable_modules: if emodule.__contains__("wsgi"): return True error('_checkWSGIEnabled from helper file', "'mode_wsgi' Module of Apache not found or Disable") except Exception as e: error('_checkWSGIEnabled from helper file', e) def _checkHTTP2Enabled(): try: enable_modules = os.listdir(configs.APACHE2_MODULES_PATH) for emodule in enable_modules: if emodule.__contains__("http2"): return True error('_checkHTTP2Enabled from helper file', "'http2' Module of Apache not found or Disable") except Exception as e: error('_checkHTTP2Enabled from helper file', e) def __wsgiAddressValidation( documentRoot, wsgiScript, virtualenv, StaticFolderName, enable_static ): DOCROOT = os.path.abspath(documentRoot) # Validate DocumentRoot if ("/" not in documentRoot): if not os.path.exists(documentRoot): raise Exception("directory does not exist: {0}".format(documentRoot)) # Validate wsgiScript if ("/" not in wsgiScript): if not os.path.exists(wsgiScript): raise Exception("directory does not exist: {0}".format(wsgiScript)) # Validate Virtualenv Name if ("/" in virtualenv): raise Exception("directory does not valid: {0}".format(virtualenv)) VIRTUALENV = os.path.join(DOCROOT, virtualenv) if not os.path.exists(VIRTUALENV): raise Exception("directory does not exist: {0}".format(VIRTUALENV)) # validate Static Folder Name if enable_static: if ("/" in StaticFolderName): raise Exception("directory does not valid: {0}".format(StaticFolderName)) STATIC_FOLDER_NAME = os.path.join(DOCROOT, StaticFolderName) if not os.path.exists(STATIC_FOLDER_NAME): raise Exception("directory does not exist: {0}".format(STATIC_FOLDER_NAME)) def __phpAddressValidation( documentRoot ): DOCROOT = os.path.abspath(documentRoot) # Validate DocumentRoot if ("/" not in documentRoot): if not os.path.exists(documentRoot): raise Exception("directory does not exist: {0}".format(documentRoot))
Apache-Local-Domain
/Apache-Local-Domain-1.1.2.tar.gz/Apache-Local-Domain-1.1.2/ApacheLocalDomain/app/lib/checkers.py
checkers.py
import click from ApacheLocalDomain.app.configs import PHP_TEMPLATE_NAME, HOSTS from ApacheLocalDomain.app.lib.checkers import __validUrl, __validEmail, _checkHTTP2Enabled, __phpAddressValidation from ApacheLocalDomain.app.lib.file_handlers import _createVirtualHost, _addToHosts from ApacheLocalDomain.app.lib.log import error, info from ApacheLocalDomain.app.lib.template_handlers import mapping, templateLoader, phpTemplateMaps @click.command() @click.option('-d','--domain','domain', required=True, prompt="Enter Domain Please", help="This Domain is Created (ServerName) , example: example.com") @click.option('-r','--root','documentRoot', required=True, prompt="Enter DocumentRoot PATH Please", help="DocumentRoot of Your website (DocumentRoot) , example: /srv/http/MyWebSite/") @click.option('-e','--email','email', required=False, default=None, help="Your Email (ServerAdmin) , example: [email protected]") @click.option('--http2',"http2", is_flag=True, default=False, help="Enable HTTP2 Protocol" ) def php(domain,documentRoot,email,http2): """ Initialize PHP Template """ try: # Check Enable HTTP2 or NOT if http2: _checkHTTP2Enabled() # validation DOMAIN = __validUrl(domain) email = __validEmail(email if email else "admin@{}".format(DOMAIN)) __phpAddressValidation(documentRoot) # get Result result = mapping(templateLoader(PHP_TEMPLATE_NAME), phpTemplateMaps( server_admin=email, document_root=documentRoot, server_name=DOMAIN, http2=http2 )) # Try to Create VirtualHost File if not _createVirtualHost(DOMAIN,result) : error('php from cli file',"Cant Create VirtualHost File") # # Try add Domain to HOSTS file if not _addToHosts(DOMAIN): error('php from cli file', "Cant Add Domain to '{}' File".format(HOSTS)) info('Now Reload Your Apache2 Service: `sudo systemctl reload apache2.service`') except Exception as e: error('php from cli file', e)
Apache-Local-Domain
/Apache-Local-Domain-1.1.2.tar.gz/Apache-Local-Domain-1.1.2/ApacheLocalDomain/app/php/cli.py
cli.py
import requests from requests.exceptions import (ReadTimeout, RequestException, ConnectTimeout) from .config import GatewayConfig, ALL_ENV from .exception import (EnvTypeExp, SetUpUriExp, SetUpRegisterExp, SetUpGatewayExp, GetRegisterTokenErr) class GatewayProxy(object): """ gateway proxy class """ def __init__(self): self.headers = {"Content-Type": "application/json;charset=UTF-8"} self.env = GatewayConfig.uri.get("environment") if not isinstance(self.env, str) or self.env not in ALL_ENV: raise EnvTypeExp(env=self.env) self.register_token = None self._set_up_gateway_service_url() self._setup_uri_params() self._setup_register_params() self._get_register_token() if not self.register_token: raise GetRegisterTokenErr(msg="can't get register token") else: self.headers.update({"X-Access-Token": self.register_token}) def _set_up_gateway_service_url(self): try: self.gateway_base_urls = GatewayConfig.__dict__.get(self.env, {}).get("servers", "").split(",") self.port = GatewayConfig.__dict__.get(self.env, {}).get("port") url_pre = "http://{}:{}" self.gateway_base_urls = [url_pre.format(_url, self.port) for _url in self.gateway_base_urls] self.register_meta_data_suffix = "/gateway-shenyu/register-metadata" self.register_uri_suffix = "/gateway-shenyu/register-uri" self.register_meta_data_path_list = [_url + self.register_meta_data_suffix for _url in self.gateway_base_urls] self.register_uri_list = [_url + self.register_uri_suffix for _url in self.gateway_base_urls] except SetUpGatewayExp as sue: raise SetUpUriExp(app_name=GatewayConfig.uri.get("app_name"), msg=str(sue), env=self.env) def _setup_uri_params(self): """ setup uri params """ try: self.host = GatewayConfig.uri.get("host") self.port = GatewayConfig.uri.get("port") self.app_name = GatewayConfig.uri.get("app_name") self.rpc_type = GatewayConfig.uri.get("rpc_type") self.context_path = GatewayConfig.uri.get("context_path") self.register_type = GatewayConfig.register.get("register_type") self.register_servers = GatewayConfig.register.get("register_servers") except SetUpUriExp as se: raise SetUpUriExp(app_name=GatewayConfig.uri.get("app_name"), msg=str(se), env=self.env) def _setup_register_params(self): """ setup register params """ try: self.register_token_type = GatewayConfig.register.get("register_type") self.register_base_servers = GatewayConfig.register.get("servers").split(",") self.register_path = "/platform/login" self.register_token_servers = [_url + self.register_uri_suffix for _url in self.register_base_servers] self.register_username = GatewayConfig.register.get("props", {}).get("username") self.register_password = GatewayConfig.register.get("props", {}).get("password") except SetUpRegisterExp as se: raise SetUpRegisterExp(app_name=GatewayConfig.uri.get("app_name"), msg=str(se), env=self.env) def _request(self, url, json_data): """ base post request """ if not url or not isinstance(url, str) or not isinstance(json_data, dict): print("_request url or data format error") return False try: res = requests.post(url, json=json_data, headers=self.headers, timeout=5) status_code = res.status_code msg = res.text except ConnectTimeout as ce: print("connect timeout, detail is:{}".format(str(ce))) return False except ReadTimeout as rte: print("read time out, detail is:{}".format(str(rte))) return False except RequestException as rqe: print("request except, detail is:{}".format(str(rqe))) return False except Exception as e: print("request ({}) except, detail is:{}".format(url, str(e))) return False else: # According to the interface return value of the gateway registry, the request is considered successful # only when msg==success; if the interface return value of the gateway registry changes, the judgment # method should also be modified if msg == "success": return True print("request ({}) fail, status code is:{}, msg is:{}".format(res.url, status_code, msg)) return False def _get_register_token(self): """ base get http request """ default_res = "" params = { "userName": self.register_username, "password": self.register_password } try: for url in self.register_token_servers: res = requests.get(url, params=params, timeout=5) status_code = res.status_code res_data = res.json() token = res_data.get("data", {}).get("token", "") if token: self.register_token = token break except ConnectTimeout as ce: print("connect timeout, detail is:{}".format(str(ce))) return False except ReadTimeout as rte: print("read time out, detail is:{}".format(str(rte))) return False except RequestException as rqe: print("request except, detail is:{}".format(str(rqe))) return False except Exception as e: print("get register token except, detail is:{}".format(str(e))) return False def register_uri(self): """ register uri """ json_data = { "appName": self.app_name, "contextPath": self.context_path, "rpcType": self.rpc_type, "host": self.host, "port": self.port } register_flag = False for _url in self.register_uri_list: res = self._request(_url, json_data) if not res: continue else: print("[SUCCESS], register uri success, register data is:{}".format(str(json_data))) register_flag = True break if not register_flag: print("[ERROR], register uri fail, app_name is:{}, host is:{}, port is:{}".format(self.app_name, self.host, self.port)) return register_flag def register_metadata(self, **kwargs): """ register path to gateway path: The path needs to be unique, for example, your path is: /order/findById, your request prefix is: /hello, the path must be /hello/order/findById register_all Register all paths ? rule_name: Can be the same as path enabled: Whether to open, If you want to open the gateway proxy, you must fill in True path_desc: Path description, optional filling register_meta_data: Need to register metadata, not for http request, fill in false """ if not kwargs.get("register_all") and not kwargs.get("path"): return False register_all = kwargs.get("register_all", False) path = kwargs.get("path", "") rule_name = kwargs.get("rule_name", "") enabled = kwargs.get("enabled", True) path_desc = kwargs.get("path_desc", "") register_meta_data = kwargs.get("register_meta_data", False) if register_all: path = self.context_path + "**" if self.context_path.endswith("/") else self.context_path + "/**" rule_name = path if not rule_name else rule_name json_data = { "appName": self.app_name, "contextPath": self.context_path, "path": path, "pathDesc": path_desc, "rpcType": self.rpc_type, "ruleName": rule_name, "enabled": enabled, "registerMetaData": register_meta_data, "pluginNames": [] } register_flag = False for _url in self.register_meta_data_path_list: res = self._request(_url, json_data) if not res: continue else: print("[SUCCESS], register metadata success, register data is:{}".format(str(json_data))) register_flag = True break if not register_flag: print("[ERROR],register metadata fail, app_name:{}, path:{}, contextPath:{}".format(self.app_name, path, self.context_path)) return register_flag
Apache-ShenYu-Client
/Apache-ShenYu-Client-0.2.tar.gz/Apache-ShenYu-Client-0.2/apache_shenyu_client/api.py
api.py
************ Introduction ************ This is the Traffic Ops Python Client for Python 3.x. Installation ============ The official installation method is to use ``pip`` to install directly from from GitHub. .. code-block:: shell pip install git+https://github.com/apache/trafficcontrol.git#"egg=trafficops&subdirectory=traffic_control/clients/python" # or # pip install git+ssh://[email protected]/apache/trafficcontrol.git#"egg=trafficops&subdirectory=traffic_control/clients/python" Local Installation ------------------ The preferred method is to use ``pip`` to install locally. Starting from the repository's root directory, the following script should do the job: .. code-block:: shell cd traffic_control/clients/python pip install . # The above will install using the system's default Python interpreter - to use a specific # version it will be necessary to specify the interpreter and pass the 'pip' module to it. # e.g. for the system's default Python 2 interpreter, typically one would do: # sudo -H /usr/bin/env python3 -m pip install . # Developers may wish to use the '-e' flag. This will install the package 'edit-ably', # meaning that changes made to the package within the repository structure will be effected on # the system-wide installation. # sudo -H pip install -e . # If your system does not have 'pip', but does (somehow) have 'setuptools' as well # as the package's dependencies, you can call 'setup.py' directly to install the # package for the system's default Python 3 interpreter # sudo -H ./setup.py install The local installation method requires ``pip`` and ``setuptools``. ``setuptools`` should be installed if your system has ``pip``, but if you are missing either of them they can both be relatively easily installed with Python standard libraries. .. code-block:: shell # Here I'm using 'python' because that points to a Python 3 interpreter on my system. You may # wish to use 'python3' instead. sudo -H python -m ensure_pip sudo -H python -m pip install -U pip # If your system's 'python' already has 'pip', then you may skip to this step to # install only 'setuptools' sudo -H python -m pip install setuptools Development Dependencies ------------------------ To install the development dependencies, first ensure that your system has ``pip`` and ``setuptools`` then use ``pip`` to install the development environment. .. note:: Currently, the development environment only requires `Pylint <https://www.pylint.org/>`_, which is a simple linter for which a configuration file is provided at ``traffic_control/clients/python/pylint.rc``. .. code-block:: shell pip install -e .[dev]
Apache-TrafficControl
/Apache-TrafficControl-3.1.0.tar.gz/Apache-TrafficControl-3.1.0/README.rst
README.rst
import json import logging import os import sys from urllib.parse import urlparse from trafficops.restapi import LoginError, OperationError, InvalidJSONError from trafficops.tosession import TOSession from trafficops.__version__ import __version__ from requests.exceptions import RequestException l = logging.getLogger() l.disabled = True logging.basicConfig(level=logging.CRITICAL+1) def output(r, pretty, request_header, response_header, request_payload, indent = '\t'): """ Prints the passed response object in a format consistent with the other parameters. :param r: The :mod:`requests` response object being printed :param pretty: If :const:`True`, attempt to pretty-print payloads as JSON :param request_header: If :const:`True`, print request line and request headers :param response_header: If :const:`True`, print response line and response headers :param request_payload: If :const:`True`, print the request payload :param indent: An optional number of spaces for pretty-printing indentation (default is the tab character) """ if request_header: print(r.request.method, r.request.path_url, "HTTP/1.1") for h,v in r.request.headers.items(): print("%s:" % h, v) print() if request_payload and r.request.body: try: result = r.request.body if not pretty else json.dumps(json.loads(r.request.body)) except ValueError: result = r.request.body print(result, end="\n\n") if response_header: print("HTTP/1.1", r.status_code, end="") print(" "+r.reason if r.reason else "") for h,v in r.headers.items(): print("%s:" % h, v) print() try: result = r.text if not pretty else json.dumps(r.json(), indent=indent) except ValueError: result = r.text print(result) def parse_arguments(program): """ A common-use function that parses the command line arguments. :param program: The name of the program being run - used for usage informational output :returns: The Traffic Ops HTTP session object, the requested path, any data to be sent, an output format specification, whether or not the path is raw, and whether or not output should be prettified """ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter parser = ArgumentParser(prog=program, formatter_class=ArgumentDefaultsHelpFormatter, description="A helper program for interfacing with the Traffic Ops API", epilog=("Typically, one will want to connect and authenticate by defining " "the 'TO_URL', 'TO_USER' and 'TO_PASSWORD' environment variables " "rather than (respectively) the '--to-url', '--to-user' and " "'--to-password' command-line flags. Those flags are only " "required when said environment variables are not defined.\n" "%(prog)s will exit with a success provided a response was " "received and the status code of said response was less than 400. " "The exit code will be 1 if command line arguments cannot be " "parsed or authentication with the Traffic Ops server fails. " "In the event of some unknown error occurring when waiting for a " "response, the exit code will be 2. If the server responds with " "a status code indicating a client or server error, that status " "code will be used as the exit code.")) parser.add_argument("--to-url", type=str, help=("The fully qualified domain name of the Traffic Ops server. Overrides " "'$TO_URL'. The format for both the environment variable and the flag " "is '[scheme]hostname[:port]'. That is, ports should be specified here, " "and they need not start with 'http://' or 'https://'. HTTPS is the " "assumed protocol unless the scheme _is_ provided and is 'http://'.")) parser.add_argument("--to-user", type=str, help="The username to use when connecting to Traffic Ops. Overrides '$TO_USER") parser.add_argument("--to-password", type=str, help="The password to use when authenticating to Traffic Ops. Overrides '$TO_PASSWORD'") parser.add_argument("-k", "--insecure", action="store_true", help="Do not verify SSL certificates") parser.add_argument("-f", "--full", action="store_true", help=("Also output HTTP request/response lines and headers, and request payload. " "This is equivalent to using '--request-headers', '--response-headers' " "and '--request-payload' at the same time.")) parser.add_argument("--request-headers", action="store_true", help="Output request method line and headers") parser.add_argument("--response-headers", action="store_true", help="Output response status line and headers") parser.add_argument("--request-payload", action="store_true", help="Output request payload (will try to pretty-print if '--pretty' is given)") parser.add_argument("-r", "--raw-path", action="store_true", help="Request exactly PATH; it won't be prefaced with '/api/{{api-version}}/") parser.add_argument("-a", "--api-version", type=float, default=4.1, help="Specify the API version to request against") parser.add_argument("-p", "--pretty", action="store_true", help=("Pretty-print payloads as JSON. " "Note that this will make Content-Type headers \"wrong\", in general")) parser.add_argument("-v", "--version", action="version", help="Print version information and exit", version="%(prog)s v"+__version__) parser.add_argument("PATH", help="The path to the resource being requested - omit '/api/2.x'") parser.add_argument("DATA", help=("An optional data string to pass with the request. If this is a " "filename, the contents of the file will be sent instead."), nargs='?') args = parser.parse_args() try: to_host = args.to_url if args.to_url else os.environ["TO_URL"] except KeyError as e: raise KeyError("Traffic Ops hostname not set! Set the TO_URL environment variable or use "\ "'--to-url'.") from e original_to_host = to_host to_host = urlparse(to_host, scheme="https") useSSL = to_host.scheme.lower() == "https" to_port = to_host.port if to_port is None: if useSSL: to_port = 443 else: to_port = 80 to_host = to_host.hostname if not to_host: raise KeyError(f"Invalid URL/host for Traffic Ops: '{original_to_host}'") s = TOSession(to_host, host_port=to_port, ssl=useSSL, api_version=f"{args.api_version:.1f}", verify_cert=not args.insecure) data = args.DATA if data and os.path.isfile(data): with open(data) as f: data = f.read() if isinstance(data, str): data = data.encode() try: to_user = args.to_user if args.to_user else os.environ["TO_USER"] except KeyError as e: raise KeyError("Traffic Ops user not set! Set the TO_USER environment variable or use "\ "'--to-user'.") from e try: to_passwd = args.to_password if args.to_password else os.environ["TO_PASSWORD"] except KeyError as e: raise KeyError("Traffic Ops password not set! Set the TO_PASSWORD environment variable or "\ "use '--to-password'") from e # TOSession objects return LoginError when certs are invalid, OperationError when # login actually fails try: s.login(to_user, to_passwd) except LoginError as e: raise PermissionError( "certificate verification failed, the system may have a self-signed certificate - try using -k/--insecure" ) from e except (OperationError, InvalidJSONError) as e: raise PermissionError(e) from e except RequestException as e: raise ConnectionError("Traffic Ops host not found: Name or service not known") from e return (s, args.PATH, data, ( args.request_headers or args.full, args.response_headers or args.full, args.request_payload or args.full ), args.raw_path, args.pretty) def request(method): """ All of the scripts wind up calling this function to handle their common functionality. :param method: The name of the request method to use (case-insensitive) :returns: The program's exit code """ try: s, path, data, full, raw, pretty = parse_arguments("to%s" % method) except (PermissionError, KeyError, ConnectionError) as e: print(e, file=sys.stderr) return 1 if raw: path = '/'.join((s.to_url.rstrip('/'), path.lstrip('/'))) else: path = '/'.join((s.base_url.rstrip('/'), path.lstrip('/'))) try: if data is not None: r = s._session.request(method, path, data=data) else: r = s._session.request(method, path) except (RequestException, ValueError) as e: print("Error occurred: ", e, file=sys.stderr) return 2 output(r, pretty, *full) return 0 if r.status_code < 400 else r.status_code // 100 def get(): """ Entry point for :program:`toget` :returns: The program's exit code """ return request("get") def put(): """ Entry point for :program:`toput` :returns: The program's exit code """ return request("put") def post(): """ Entry point for :program:`topost` :returns: The program's exit code """ return request("post") def delete(): """ Entry point for :program:`todelete` :returns: The program's exit code """ return request("delete") def options(): """ Entry point for :program:`tooptions` :returns: The program's exit code """ return request("options") def head(): """ Entry point for :program:`tohead` :returns: The program's exit code """ return request("head") def patch(): """ Entry point for :program:`topatch` :returns: The program's exit code """ return request("patch")
Apache-TrafficControl
/Apache-TrafficControl-3.1.0.tar.gz/Apache-TrafficControl-3.1.0/to_access/__init__.py
__init__.py
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Module to help retrieve/create/update/delete data from/to any RESTful API (Base Class). Requires Python Version >= 2.7 or >= 3.6 """ # Core Modules import json import logging import functools from builtins import str # Third-party Modules import munch import requests import requests.adapters as ra # Python 2 to Python 3 Compatibility import requests.compat as compat # Local Modules from .utils import log_with_debug_info try: from future.utils import iteritems except ImportError: iteritems = lambda x: x.items() __all__ = ['LoginError', 'OperationError', 'InvalidJSONError', 'api_request', 'RestApiSession', 'DEFAULT_HEADERS'] LOGGER = logging.getLogger(__name__) # Exception Classes class LoginError(OSError): """ This represents an error that occurred during server login. """ def __init__(self, *args): OSError.__init__(self, *args) class OperationError(IOError): """ This class represents a generic error, indicating something went wrong with the request or on the server. """ #: Contains the response object that generated the error resp = None def __init__(self, *args, resp=None): IOError.__init__(self, *args) self.resp = resp class InvalidJSONError(ValueError): """ An error that occurs when an invalid JSON payload is passed to an endpoint. """ #: Contains the response object that generated the error resp = None def __init__(self, *args, resp=None): ValueError.__init__(self, *args) self.resp = resp # Miscellaneous Constants and/or Variables DEFAULT_HEADERS = {u'Content-Type': u'application/json; charset=UTF-8'} # Helper Functions/Decorators def api_request(method_name, api_path, supported_versions): """ This wrapper returns a decorator that routes the calls to the appropriate utility function that generates the RESTful API endpoint, performs the appropriate call to the endpoint and returns the data to the user. :param method_name: A method name defined on the Class, this decorator is decorating, that will be called to perform the operation. E.g. 'GET', 'POST', 'PUT', 'DELETE', etc. The method_name chosen must have the signature of ``<method>(self, api_path, **kwargs)`` e.g. ``def get(self, api_path, **kwargs): ...`` :type method_name: str :param api_path: The path to the API end-point that you want to call which does not include the base url e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param supported_versions: A tuple of API versions that this route supports :type supported_versions: Tuple[str] :return: rtype int: A new function that replaces the original function with a boilerplate execution process. :rtype: Callable[str, Dict[str, Any]] """ def outer(func): @functools.wraps(func) def method_wrapper(self, *args, **kwargs): # Positional arguments, e.g. *args, are not being used. Keyword arguments are the # preferred way to pass the parameters needed by the helper functions if (self.api_version is None) or (self.api_version in supported_versions): msg = (u'Calling method [{0}] with keyword arguments [{1}] ' u'via API endpoint method [{2}]') log_with_debug_info(logging.DEBUG, msg.format(method_name, kwargs, func.__name__)) return getattr(self, method_name)(api_path, **kwargs) # Client API version is not supported by the method being called msg = (u"Method [{0}] is not supported by this client's API version [{1}]; " u'Supported versions: {2}') # type: Text msg = msg.format(func.__name__, self.api_version, supported_versions) log_with_debug_info(logging.DEBUG, msg) raise OperationError(msg) return method_wrapper return outer class RestApiSession(object): """ This class represents a login session with a generic REST API server. It provides base functionality inherited by :class:`TOSession`. """ def __init__(self, host_ip, api_version=None, api_base_path=u'api/', host_port=443, ssl=True, headers=None, verify_cert=True, create_session=False, max_retries=5): """ The class initializer. :param host_ip: The dns name or ip address of the RESTful API host to use to talk to the API :type host_ip: str :param host_port: The port to use when contacting the RESTful API :type host_port: int :param api_version: The version of the API to make calls against. If supplied, endpoint version validation will be performed. If supplied as None, no version validation will be performed. :const:`None` is allowed so that non-versioned REST APIs can be implemented. :type api_version: Union[str, None] :param api_base_path: The part of the url that is the base path, from the web server root (which may include an API version), for all API endpoints without the server url portion e.g. 'api/', 'api/latest/' .. note:: To specify the base path with the passed ``api_version`` you can specify ``api_base_path`` as ``api/{api_version}/`` and the API version will be substituted. If ``api_version`` is :const:`None` and '{api_version}' is specified in the ``api_base_path`` string then an exception will be thrown. e.g. api_version=u'latest' -> 'api/{api_version}/' -> 'api/latest/' api_version=None -> 'api/{api_version}/' -> Throws Exception :type api_base_path: str :param ssl: Should SSL be used? (http vs. https) :type ssl: bool :param headers: The HTTP headers to use when contacting the RESTful API :type headers: Dict[str, str] :param verify_cert: Should the SSL certificates be verified when contacting the RESTful API. You may want to set this to :const:`False` for systems with self-signed certificates. :type verify_cert: bool :param create_session: Should a session be created automatically? :type create_session: bool """ if headers is None: headers = DEFAULT_HEADERS self._session = None self._host_ip = host_ip self._host_port = host_port self._api_version = api_version self._api_base_path = api_base_path self._ssl = ssl self._headers = headers self._verify_cert = verify_cert self._create_session = create_session self._max_retries = max_retries # Setup API End-point Version validation, if enabled self.__api_version_format_name = u'api_version' self.__api_version_format_value = u'{{{0}}}'.format(self.__api_version_format_name) if self._api_version: # if api_base_path is supplied as 'api/{api_version}/' or some string # containing '{api_version}' then try to substitute the api_version supplied # by the user. version_params = { self.__api_version_format_name: self._api_version } self._api_base_path = self._api_base_path.format(**version_params) if not self._api_version and self.__api_version_format_value in self._api_base_path: msg = (u'{0} was specified in the API Base Path [{1}] ' u'but the replacement did not occur because the API Version ' u'was not supplied.') msg = msg.format(self.__api_version_format_value, self._api_base_path) log_with_debug_info(logging.ERROR, msg) raise OperationError(msg) # Setup some common URLs self._server_url = u'{0}://{1}{2}/'.format(u'https' if ssl else u'http', host_ip, u':{0}'.format(host_port) if host_port else u'') self._api_base_url = compat.urljoin(self._server_url, self._api_base_path) self._api_base_url = self._api_base_url.rstrip(u'/') + u'/' if not self._verify_cert: # Not verifying certs so let's disable the warning #pylint: disable=E1101 requests.packages.urllib3.disable_warnings(\ requests.packages.urllib3.exceptions.InsecureRequestWarning) #pylint: enable=E1101 log_with_debug_info(logging.WARNING, u'Certificate verification warnings are disabled.') msg = u'RestApiSession instance {0:#0x} initialized: Details: {1}' log_with_debug_info(logging.DEBUG, msg.format(id(self), self.__dict__)) if self._create_session: self.create() @property def is_open(self): """ Is the session open to the RESTful API? (Read-only Property) :return: :const:`True` if yes, otherwise, :const:`False` :rtype: bool """ return self._session is not None @property def session(self): """ The RESTful API session (Read-only Property) :return: The requests session :rtype: :class:`requests.Session` """ return self._session def create(self): """ Create the requests.Session to communicate with the RESTful API. :return: :const:`None` :rtype: NoneType """ if self._session: self.close() if not self._session: self._session = requests.Session() self._session.mount('http://', ra.HTTPAdapter(max_retries=self._max_retries)) self._session.mount('https://', ra.HTTPAdapter(max_retries=self._max_retries)) msg = u'Created internal requests Session instance {0:#0x}' log_with_debug_info(logging.DEBUG, msg.format(id(self._session))) def close(self): """ Close and cleanup the requests Session object. :return: :const:`None` :rtype: NoneType """ if self._session: sid = id(self._session) self._session.close() del self._session self._session = None msg = u'Internal requests Session instance 0x{0:x} closed and cleaned up' log_with_debug_info(logging.DEBUG, msg.format(sid)) @property def server_url(self): """ The URL without the api portion. (read-only) :return: The URL should match '[\\w\\+\\-\\.]+://[\\w\\+\\-\\.]+(:\\d+)?' e.g. 'https://to.somedomain.net' or 'https://to.somedomain.net:443' :rtype: str """ return self._server_url @property def api_version(self): """ Returns the api version. (read-only) :return: The api version from which this instance will request endpoints. :rtype: str """ return self._api_version @property def api_base_url(self): """ Returns the base URL. (read-only) :return: The base URL should match '[\\w\\+\\-\\.]+://[\\w\\+\\-\\.]+(:\\d+)?' e.g. 'https://to.somedomain.net/api/0.1/' :rtype: str """ return self._api_base_url def _build_endpoint(self, api_path, params=None, query_params=None): """ Helper function to form API URL. The base URL is '<protocol>://<hostname>[:<port>]/<api base url>' e.g. 'https://to.somedomain.net/api/0.1/' :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param params: If :meth:`str.format` field_name replacement field specifications exists in the ``api_path`` use this dictionary to perform replacements of the specifications with the value(s) in the dictionary that match the parameter name(s) e.g. ``{param_id}`` or ``{param_id:d}`` in ``api_string`` is replaced by value in ``params['param_id']``. :type params: Union[Dict[str, Any], None] :param query_params: URL query params to provide to the end-point e.g. ``{ 'sort': 'asc', 'maxresults': 200 }`` which translates to something like ``?sort=asc&maxresults=200`` which is appended to the request URL :type query_params: Union[Dict[str, Any], None] :return: The base url plus the passed and possibly substituted ``api_path`` to form a complete URL to the API resource to request :rtype: str :raises: ValueError """ new_api_path = api_path # Replace all parameters in the new_api_path path, if required try: # Make the parameters values safe for adding to URLs url_params = {k: compat.quote(str(v)) if isinstance(v, str)\ else v for k, v in iteritems(params)} log_with_debug_info(logging.DEBUG, u'URL parameters are: [{0}]'.format(url_params)) qparams = u'' if query_params: # Process the URL query parameters qparams = u'?{0}'.format(compat.urlencode(query_params)) log_with_debug_info(logging.DEBUG, u'URL query parameters are: [{0}]'.format(qparams)) new_api_path = api_path.format(**url_params) + qparams except KeyError as e: msg = (u'Expecting a value for keyword argument [{0}] for format field ' u'specification [{1!r}]') msg = msg.format(e, api_path) log_with_debug_info(logging.ERROR, msg) raise ValueError(msg) except ValueError as e: msg = (u'One or more values do not match the format field specification ' u'[{0!r}]; Supplied values: {1!r} ') msg = msg.format(api_path, params) log_with_debug_info(logging.ERROR, msg) raise ValueError(msg) retval = compat.urljoin(self.api_base_url, new_api_path) log_with_debug_info(logging.DEBUG, u'Built end-point to return: {0}'.format(retval)) return retval def _do_operation(self, operation, api_path, query_params=None, munchify=True, debug_response=False, expected_status_codes=range(200, 300), *unused_args, **kwargs): """ Helper method to perform HTTP operation requests - This is a boilerplate process for HTTP operations. :param operation: Name of method to call on the :attr:`self._session` object to perform the HTTP request :type operation: str :param api_path: The path to the API end-point that you want to call which does not include the URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param: query_params: URL query parameters to provide to the endpoint e.g. ``{ 'sort': 'asc', 'maxresults': 200 }`` which translates to something like ``?sort=asc&maxresults=200`` which is appended to the request URL :type query_params: Union[Dict[str, Any], None] :param: munchify: If :const:`True` encapsulate data to be returned in a :class:`munch.Munch` object which allows keys in a Python dictionary to additionally have attribute access e.g. ``a_dict['a_key']`` with :mod:`munch` becomes ``a_dict['a_key']`` or ``a_dict.a_key`` :type munchify: bool :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure e.g. a ``dict``. This method will convert it to JSON before sending it to the API endpoint. :type kwargs: Dict[str, Any] :param debug_response: If :const:`True`, the actual response data text will be added to the log if a JSON decoding exception is encountered. :type debug_response: bool :type expected_status_codes: Tuple[int] :param: expected_status_codes: expected success HTTP status codes. If the user needs to override the defaults this parameter can be passed e.g. ``(200, 204,)`` :type munchify: bool :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[Text, Any], List[Dict[Text, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: miscellaneous.exceptions.OperationError """ if not self._session: msg = u'No session has been created for the API. Have you called create() yet?' log_with_debug_info(logging.ERROR, msg) raise OperationError(msg) response = None retdata = None endpoint = self._build_endpoint(api_path, params=kwargs, query_params=query_params) params = {u'headers': self._headers, u'verify': self._verify_cert} if u'data' in kwargs: params[u'data'] = json.dumps(kwargs[u'data']) log_with_debug_info(logging.DEBUG, u'Call parameters: {0}'.format(params)) # Call the API endpoint response = getattr(self._session, operation)(endpoint, **params) log_with_debug_info(logging.DEBUG, u'Response status: {0} {1}'.format(response.status_code, response.reason)) if response.status_code not in expected_status_codes: try: retdata = response.json() except Exception as e: # Invalid JSON payload. msg = (u'HTTP Status Code: [{0}]; API response data for end-point [{1}] does not ' u'appear to be valid JSON. Cause: {2}.') msg = msg.format(response.status_code, endpoint, e) if debug_response: log_with_debug_info(logging.ERROR, msg + u' Data: [' + str(response.text) + u']') raise InvalidJSONError(msg, resp=response) msg = u'{0} request to RESTful API at [{1}] expected status(s) {2}; failed: {3} {4};'\ u' Response: {5}' msg = msg.format(operation.upper(), endpoint, expected_status_codes, response.status_code, response.reason, retdata) log_with_debug_info(logging.ERROR, msg) raise OperationError(msg, resp=response) try: if response.status_code in ('204',): # "204 No Content" retdata = {} else: # Decode the expected JSON retdata = response.json() except Exception as e: # Invalid JSON payload. msg = (u'HTTP Status Code: [{0}]; API response data for end-point [{1}] does not ' u'appear to be valid JSON. Cause: {2}.') msg = msg.format(response.status_code, endpoint, e) if debug_response: log_with_debug_info(logging.ERROR, msg + u' Data: [' + str(response.text) + u']') raise InvalidJSONError(msg, resp=response) retdata = munch.munchify(retdata) if munchify else retdata return (retdata[u'response'] if u'response' in retdata else retdata), response def get(self, api_path, *args, **kwargs): """ Perform http get requests :param api_path: The path to the API end-point that you want to call which does not include the base url e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'get', api_path, *args, **kwargs) def post(self, api_path, *args, **kwargs): """ Perform http post requests :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'post', api_path, *args, **kwargs) def put(self, api_path, *args, **kwargs): """ Perform http put requests :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'put', api_path, *args, **kwargs) def delete(self, api_path, *args, **kwargs): """ Perform HTTP DELETE requests :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'delete', api_path, *args, **kwargs) def head(self, api_path, *args, **kwargs): """ Perform HTTP HEAD requests :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'head', api_path, *args, **kwargs) def options(self, api_path, *args, **kwargs): """ Perform HTTP OPTIONS requests :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'options', api_path, *args, **kwargs) def patch(self, api_path, *args, **kwargs): """ Perform HTTP PATCH requests :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param kwargs: Passed Keyword Parameters. If you need to send JSON data to the endpoint pass the keyword parameter ``data`` with the Python data structure. This method will convert it to JSON before sending it to the API endpoint. Use ``query_params`` to pass a dictionary of query parameters :type kwargs: Dict[str, Any] :return: Python data structure distilled from JSON from the API request. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]], munch.Munch, List[munch.Munch]], requests.Response] :raises: Union[LoginError, OperationError] """ return self._do_operation(u'patch', api_path, *args, **kwargs)
Apache-TrafficControl
/Apache-TrafficControl-3.1.0.tar.gz/Apache-TrafficControl-3.1.0/trafficops/restapi.py
restapi.py
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ utils ===== Useful utility methods """ # Core Modules import os import inspect import logging # Python 2 to Python 3 Compatibility from builtins import str LOGGER = logging.getLogger(__name__) def log_with_debug_info(logging_level=logging.INFO, msg=u'', parent=False, separator=u':'): """ Uses inspect module(reflection) to gather debugging information for the source file name, function name, and line number of the calling function/method. :param logging_level: The logging level from the logging module constants E.g. logging.INFO, logging.DEBUG, etc. :type logging_level: int :param msg: The message to log. :type msg: Text :param parent: If True, use the caller's parent information instead of the caller's information in the message. :type parent: bool :param separator: The string to use for the component separator :type separator: Text :return: '<file name>:<function name>:<line number>: <msg>' e.g. 'tosession.py:_build_endpoint:199: This is a message to log.' :rtype: Text """ frame,\ file_path,\ line_number,\ function_name,\ _,\ _ = inspect.stack()[1 if not parent else 2] file_name = os.path.split(file_path)[-1] calling_module = inspect.getmodule(frame).__name__ debug_msg = separator.join(map(str, (file_name, function_name, line_number, u' '))) + str(msg) # Log to the calling module logger. If calling_module is '__main__', use the root logger. logger = logging.getLogger(calling_module if calling_module != u'__main__' else '') logger.log(logging_level, debug_msg)
Apache-TrafficControl
/Apache-TrafficControl-3.1.0.tar.gz/Apache-TrafficControl-3.1.0/trafficops/utils.py
utils.py
# Core Modules import logging import sys from requests import Response from typing import Any, Dict, List, Tuple, Union # Third-party Modules import munch import requests.exceptions as rex # Local Modules from .restapi import LoginError, OperationError, api_request, RestApiSession from .utils import log_with_debug_info __all__ = ['TOSession'] LOGGER = logging.getLogger(__name__) class TOSession(RestApiSession): """ Traffic Ops Session Class Once you login to the Traffic Ops API via :meth:`login`, you can call one or more of the methods to retrieve, POST, PUT, DELETE, etc. data to the API. If you are not logged in, an exception will be thrown if you try to call any of the endpoint methods. This API client is simplistic and lightly structured on purpose but adding support for new endpoints routinely takes seconds. Another nice bit of convenience that result data is, by default, wrapped in :class:`munch.Munch` objects, which provide attribute access to the returned dictionaries/hashes - e.g. ``a_dict['a_key']`` with :mod:`munch` becomes ``a_dict.a_key`` or ``a_dict['a_key']``. Also, the lack of rigid structure (loose coupling) means many changes to the Traffic Ops API, as it evolves, will probably go un-noticed (usually additions), which means fewer future problems to potentially fix in user applications. An area of improvement for later is defining classes to represent request data instead of loading up dictionaries for request data. Please see the :ref:`API documentation <to-api>` for the details of the API endpoints. Adding end-point methods .. code-block:: python3 :caption: Endpoint with no URL parameters and no query parameters @api_request('get', 'cdns', ('3.0',)) def get_cdns(self): pass .. code-block:: python3 :caption: End-point with URL parameters and no query parameters @api_request('get', 'cdns/{cdn_id:d}', ('3.0',)) def get_cdn_by_id(self, cdn_id=None): pass .. code-block:: python3 :caption: End-point with no URL parameters but with query parameters @api_request('get', 'deliveryservices', ('3.0',)) def get_deliveryservices(self, query_params=None): pass .. code-block:: python3 :caption: End-point with URL parameters and query parameters @api_request('get', 'deliveryservices/xmlId/{xml_id}/sslkeys', ('3.0',)) def get_deliveryservice_ssl_keys_by_xml_id(self, xml_id=None, query_params=None): pass .. code-block:: python3 :caption: End-point with request data @api_request('post', 'cdns', ('3.0',)) def create_cdn(self, data=None): pass .. code-block:: python3 :caption: End-point with URL parameters and request data @api_request('put', 'cdns', ('3.0',)) def update_cdn_by_id(self, cdn_id=None, data=None): pass Calling end-point methods :meth:`get_cdns` calls endpoint :ref:`to-api-cdns` e.g. ``t.get_cdns()`` :meth:`get_types` calls endpoint :ref:`to-api-types`, optionally with query parameters e.g. ``get_foo_data(id=45, query_params={'sort': 'asc'})`` calls endpoint ``GET api/2.x/foo/45?sort=asc`` (presumably) :meth:`cdns_queue_update` calls endpoint :ref:`to-api-cdns-id-queue_update`, with an ID path parameter and a JSON payload e.g. ``cdns_queue_update(id=1, data={'action': 'queue'})`` .. note:: Only a small subset of the API endpoints are implemented. More can be implemented as needed. """ def __init__(self, host_ip, host_port=443, api_version='4.1', ssl=True, headers=None, verify_cert=True): """ The class initializer. :param host_ip: The dns name or ip address of the Traffic Ops host to use to talk to the API :type host_ip: str :param host_port: The port to use when contacting the Traffic Ops API :type host_port: int :param api_version: The version of the API to use when calling end-points on the Traffic Ops API :type api_version: str :param ssl: Should ssl be used? (http vs. https) :type ssl: bool :param headers: The http headers to use when contacting the Traffic Ops API :type headers: Dict[str, str] :type verify_cert: bool """ super(TOSession, self).__init__(host_ip=host_ip, api_version=api_version, api_base_path='api/{api_version}/', host_port=host_port, ssl=ssl, headers=headers, verify_cert=verify_cert) self._logged_in = False msg = 'TOSession instance {0:#0x} initialized: Details: {1}' log_with_debug_info(logging.DEBUG, msg.format(id(self), self.__dict__)) def login(self, username, password): """ Login to the Traffic Ops API. :param username: Traffic Ops User Name :type username: str :param password: Traffic Ops User Password :type password: str :return: None :rtype: None :raises: LoginError """ logging.info("Connecting to Traffic Ops at %s...", self.to_url) if not self.is_open: self.create() logging.info("Connected. Authenticating...") self._logged_in = False try: # Try to login to Traffic Ops self.post('user/login', data={'u': username, 'p': password}) self._logged_in = True except rex.SSLError as e: logging.debug("%s", e, stack_info=True, exc_info=True) self.close() msg = ('{0}. This system may have a self-signed certificate. Try creating this' ' TOSession object passing verify_cert=False. e.g. TOSession(..., ' 'verify_cert=False).') msg = msg.format(e) logging.error(msg) logging.warning("disabling certificate verification is not recommended.") raise LoginError(msg) from e except OperationError as e: logging.debug("%s", e, exc_info=True, stack_info=True) msg = 'Logging in to Traffic Ops has failed. Reason: {0}'.format(e) self.close() logging.error(msg) raise OperationError(msg) from e logging.info("Authenticated.") @property def to_url(self): """ The URL without the api portion. (read-only) :return: The URL should match '[\\w\\+\\-\\.]+://[\\w\\+\\-\\.]+(:\\d+)?' e.g https://to.somedomain.net or https://to.somedomain.net:443 :rtype: str """ return self.server_url @property def base_url(self): """ Returns the base url. (read-only) :return: The base url should match '[\\w\\+\\-\\.]+://[\\w\\+\\-\\.]+(:\\d+)?' e.g https://to.somedomain.net/api/4.0/ :rtype: str """ return self._api_base_url @property def logged_in(self): """ Read-only property of to determine if user is logged in to Traffic Ops. :return: :const:`True` if connected and logged in, :const:`False` otherwise :rtype: bool """ return self.is_open and self._logged_in # Programmatic Endpoint Methods - These can be created when you need to employ "creative # methods" to form a correlated composite data set from one or more Traffic Ops API call(s) or # employ composite operations against the API. # Also, if the API requires you to retrieve the data via paging, these types of methods can be # useful to perform that type of work too. # These methods need to support similar method signatures as employed by the restapi.api_request # decorator method_name argument. def get_all_deliveryservice_servers(self, *args, **kwargs): """ Get all servers attached to all delivery services via the Traffic Ops API. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ result_set = [] resp = None limit = 10000 page = 1 munchify = True # Default to True if 'munchify' in kwargs: munchify = kwargs['munchify'] while True: data, resp = self.get_deliveryserviceserver(query_params={'limit':limit, 'page': page}, *args, **kwargs) if not data: break result_set.extend(munch.munchify(data) if munchify else data) page += 1 return result_set, resp # Note: Return last response object received # # PUT ALL API DEFINITIONS BELOW AND UNDER ITS RESPECTIVE PAGE (whether it is 2.0 or 2.1, etc, if its # a CDN put it under CDN header and corresponding calls) # # # API Capabilities # @api_request('get', 'api_capabilities', ('3.0',)) def get_api_capabilities(self, query_params=None): """ Get all API-capability mappings :ref:`to-api-v3-api_capabilities` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # ASN # @api_request('get', 'asns', ('3.0', '4.0', '4.1', '5.0')) def get_asns(self, query_params=None): """ Get ASNs. :ref:`to-api-asns` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'asns', ('3.0', '4.0', '4.1', '5.0')) def create_asn(self, data=None): """ Create ASN :ref:`to-api-asns` :param data: The ASN data to use for ASN creation. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'asns', ('3.0', '4.0', '4.1', '5.0')) def update_asn(self, query_params=None): """ Update ASN :ref:`to-api-asns-id` :param asn_id: The ID of the ASN to update :type asn_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'asns', ('3.0', '4.0', '4.1', '5.0')) def delete_asn(self, query_params=None): """ Delete ASN :to-api-asns-id: :param asn_id: The ID of the ASN to delete :type asn_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Cache Statistics # @api_request('get', 'cache_stats', ('3.0', '4.0', '4.1', '5.0')) def get_cache_stats(self, query_params=None): """ Retrieves statistics about the CDN. :ref:`to-api-cache_stats` :param query_params: See API page for more information on accepted params :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'caches/stats', ('3.0', '4.0', '4.1', '5.0')) def get_traffic_monitor_cache_stats(self): """ Retrieves cache stats from Traffic Monitor. Also includes rows for aggregates :ref:`to-api-caches-stats` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Cache Groups # @api_request('get', 'cachegroups', ('3.0', '4.0', '4.1', '5.0')) def get_cachegroups(self, query_params=None): """ Get Cache Groups. :ref:`to-api-cachegroups` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'cachegroups/{cache_group_id:d}/parameters', ('3.0',)) def get_cachegroup_parameters(self, cache_group_id=None): """ Get a cache groups parameters. This endpoint has been deprecated and will no longer be available as of TO API v4. :ref:`to-api-v3-cachegroups-id-parameters` :param cache_group_id: The cache group Id :type cache_group_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'cachegroupparameters', ('3.0',)) def get_all_cachegroup_parameters(self): """ A collection of all cache group parameters. This endpoint has been deprecated and will no longer be available as of TO API v4. :ref:`to-api-v3-cachegroupparameters` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'cachegroups', ('4.0', '4.1', '5.0',)) def create_cachegroups(self, data=None): """ Create a Cache Group :ref:`to-api-cachegroups` :param data: The parameter data to use for cachegroup creation. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'cachegroups/{cache_group_id:d}', ('4.0', '4.1', '5.0',)) def update_cachegroups(self, cache_group_id=None, data=None): """ Update a cache group :ref:`to-api-cachegroups-id` :param cache_group_id: The cache group id to update :type cache_group_id: Integer :param data: The parameter data to use for cachegroup creation. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'cachegroups/{cache_group_id:d}', ('4.0', '4.1', '5.0',)) def delete_cachegroups(self, cache_group_id=None): """ Delete a cache group :ref:`to-api-cachegroups-id` :param cache_group_id: The cache group id to update :type cache_group_id: Integer :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'cachegroups/{cache_group_id:d}/queue_update', ('4.0', '4.1', '5.0',)) def cachegroups_queue_update(self, cache_group_id=None, data=None): """ Queue Updates by Cache Group ID :ref:`to-api-cachegroups-id-queue_update` :param cache_group_id: The Cache Group Id :type cache_group_id: int :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'cachegroupparameters/{cache_group_id:d}/{parameter_id:d}', ('3.0',)) def delete_cache_group_parameters(self, cache_group_id=None, parameter_id=None): """ Delete a cache group parameter association. This endpoint has been deprecated and will no longer be available as of TO API v4. :ref:`to-api-v3-cachegroupparameters-id-parameterID` :param cache_group_id: The cache group id in which the parameter will be deleted :type cache_group_id: int :param parameter_id: The parameter id which will be disassociated :type parameter_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Capabilities # @api_request('get', 'capabilities', ('3.0',)) def get_capabilities(self, query_params=None): """ Retrieves capabilities :ref:`to-api-v3-capabilities` :param query_params: See API page for more information on accepted parameters :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # CDN # @api_request('get', 'cdns', ('3.0', '4.0', '4.1', '5.0')) def get_cdns(self, query_params=None): """ Get all CDNs. :ref:`to-api-cdns` :param query_params: See API page for more information on accepted parameters :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'cdns', ('3.0', '4.0', '4.1', '5.0')) def create_cdn(self, data=None): """ Create a new CDN. :ref:`to-api-cdns` :param data: The parameter data to use for cdn creation. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'cdns/{cdn_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_cdn_by_id(self, cdn_id=None, data=None): """ Update a CDN by Id. :ref:`to-api-cdns-id` :param cdn_id: The CDN id :type cdn_id: int :param data: The parameter data to use for cdn update. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'cdns/{cdn_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_cdn_by_id(self, cdn_id=None): """ Delete a CDN by Id. :ref:`to-api-cdns-id` :param cdn_id: The CDN id :type cdn_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'cdns/{cdn_id:d}/queue_update', ('3.0', '4.0', '4.1', '5.0')) def cdns_queue_update(self, cdn_id=None, data=None): """ Queue Updates by CDN Id. :ref:`to-api-cdns-id-queue_update` :param cdn_id: The CDN Id :type cdn_id: int :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # CDN Health/Usage # @api_request('get', 'cdns/health', ('3.0', '4.0', '4.1', '5.0')) def get_cdns_health(self): """ Retrieves the health of all locations (cache groups) for all CDNs :ref:`to-api-cdns-health` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'cdns/{cdn_name:s}/health', ('3.0', '4.0', '4.1', '5.0')) def get_cdn_health_by_name(self, cdn_name=None): """ Retrieves the health of all locations (cache groups) for a given CDN :ref:`to-api-cdns-name-health` :param cdn_name: The CDN name to find health for :type cdn_name: String :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'cdns/capacity', ('3.0', '4.0', '4.1', '5.0')) def get_cdns_capacity(self): """ Retrieves the aggregate capacity percentages of all locations (cache groups) for a given CDN. :ref:`to-api-cdns-capacity` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # CDN Routing # @api_request('get', 'cdns/routing', ('3.0', '4.0', '4.1', '5.0')) def get_cdns_routing(self): """ Retrieves the aggregate routing percentages of all locations (cache groups) for a given CDN. :ref:`to-api-cdns-routing` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # CDN Domains # @api_request('get', 'cdns/domains', ('3.0', '4.0', '4.1', '5.0')) def get_cdns_domains(self): """ Retrieves the different CDN domains :ref:`to-api-cdns-domains` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # CDN Topology # @api_request('get', 'cdns/{cdn_name:s}/configs/monitoring', ('3.0', '4.0', '4.1', '5.0')) def get_cdn_monitoring_info(self, cdn_name=None): """ Retrieves CDN monitoring information :ref:`to-api-cdns-name-configs-monitoring` :param cdn_name: The CDN name to find configs for :type cdn_name: String :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # DNSSEC Keys # @api_request('get', 'cdns/name/{cdn_name:s}/dnsseckeys', ('3.0', '4.0', '4.1', '5.0')) def get_cdn_dns_sec_keys(self, cdn_name=None): """ Gets a list of dnsseckeys for a CDN and all associated Delivery Services :ref:`to-api-cdns-name-name-dnsseckeys` :param cdn_name: The CDN name to find dnsseckeys info for :type cdn_name: String :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'cdns/name/{cdn_name:s}/dnsseckeys', ('3.0', '4.0', '4.1', '5.0')) def delete_cdn_dns_sec_keys(self, cdn_name=None): """ Delete dnssec keys for a cdn and all associated delivery services :ref:`to-api-cdns-name-name-dnsseckeys` :param cdn_name: The CDN name to delete dnsseckeys info for :type cdn_name: String :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'cnds/dnsseckeys/generate', ('3.0', '4.0', '4.1', '5.0')) def create_cdn_dns_sec_keys(self, data=None): """ Generates ZSK and KSK keypairs for a CDN and all associated Delivery Services :ref:`to-api-cdns-dnsseckeys-generate` :param data: The parameter data to use for cachegroup creation. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # CDN SSL Keys # @api_request('get', 'cdns/name/{cdn_name:s}/sslkeys', ('3.0', '4.0', '4.1', '5.0')) def get_cdn_ssl_keys(self, cdn_name=None): """ Returns ssl certificates for all Delivery Services that are a part of the CDN. :ref:`to-api-cdns-name-name-sslkeys` :param cdn_name: The CDN name to find ssl keys for :type cdn_name: String :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Change Logs # @api_request('get', 'logs', ('3.0', '4.0', '4.1', '5.0')) def get_change_logs(self, query_params=None): """ Retrieve all change logs from traffic ops :ref:`to-api-logs` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'logs/newcount', ('3.0', '4.0', '4.1', '5.0')) def get_change_logs_newcount(self): """ Get amount of new logs from traffic ops :ref:`to-api-logs-newcount` :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service # @api_request('get', 'deliveryservices', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryservices(self, query_params=None): """ Retrieves all delivery services (if admin or ops) or all delivery services assigned to user. :ref:`to-api-deliveryservices` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'deliveryservices', ('3.0', '4.0', '4.1', '5.0')) def create_deliveryservice(self, data=None): """ Allows user to create a delivery service. :ref:`to-api-deliveryservices` :param data: The request data structure for the API request :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'deliveryservices/{delivery_service_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_deliveryservice_by_id(self, delivery_service_id=None, data=None): """ Update a Delivery Service by Id. :ref:`to-api-deliveryservices-id` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :param data: The request data structure for the API request :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'deliveryservices/{delivery_service_id:d}/safe', ('3.0', '4.0', '4.1', '5.0')) def update_deliveryservice_safe(self, delivery_service_id=None, data=None): """ Allows a user to edit limited fields of a Delivery Service. :ref:`to-api-deliveryservices-id-safe` :param delivery_service_id: The Delivery Service Id :type delivery_service_id: int :param data: The request data structure for the API request :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'deliveryservices/{delivery_service_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_deliveryservice_by_id(self, delivery_service_id=None): """ Allows user to delete a delivery service. :ref:`to-api-deliveryservices-id` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service Health # @api_request('get', 'deliveryservices/{delivery_service_id:d}/health', ('3.0', '4.0', '4.1', '5.0')) def get_delivery_service_health(self, delivery_service_id=None): """ Retrieves the health of all locations (cache groups) for a delivery service. Delivery service must be assigned to user if user is not admin or operations. :ref:`to-api-deliveryservices-id-health` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'deliveryservices/{delivery_service_id:d}/capacity', ('3.0', '4.0', '4.1', '5.0')) def get_delivery_service_capacity(self, delivery_service_id=None): """ Retrieves the capacity percentages of a delivery service. Delivery service must be assigned to user if user is not admin or operations. :ref:`to-api-deliveryservices-id-capacity` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service Server # @api_request('get', 'deliveryserviceserver', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryserviceserver(self, query_params=None): """ Retrieves delivery service / server assignments. (Allows pagination and limits) :ref:`to-api-deliveryserviceserver` :param query_params: The required url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'deliveryserviceserver', ('3.0', '4.0', '4.1', '5.0')) def assign_deliveryservice_servers_by_ids(self, data=None): """ Assign servers by id to a Delivery Service. (New Method) :ref:`to-api-deliveryserviceserver` :param data: The required data to create server associations to a delivery service :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'deliveryservices/{xml_id}/servers', ('3.0', '4.0', '4.1', '5.0')) def assign_deliveryservice_servers_by_names(self, xml_id=None, data=None): """ Assign servers by name to a Delivery Service by xmlId. :ref:`to-api-deliveryservices-xmlid-servers` :param xml_id: The XML Id of the delivery service :type xml_id: str :param data: The required data to assign servers to a delivery service :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'deliveryserviceserver/{delivery_service_id:d}/{server_id:d}',('3.0', '4.0', '4.1', '5.0')) def delete_deliveryservice_servers_by_id(self, delivery_service_id=None, server_id=None): """ Removes a server (cache) from a delivery service. :ref:`to-api-deliveryserviceserver-dsid-serverid` :param delivery_service_id: The delivery service id :type delivery_service_id: int :param server_id: The server id to remove from delivery service :type server_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'deliveryservices/{delivery_service_id:d}/servers', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryservice_servers(self, delivery_service_id=None): """ Retrieves properties of CDN EDGE or ORG servers assigned to a delivery service. :ref:`to-api-deliveryservices-id-servers` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'deliveryservices/{delivery_service_id:d}/servers/eligible', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryservice_ineligible_servers(self, delivery_service_id=None): """ Retrieves properties of CDN EDGE or ORG servers not eligible for assignment to a delivery service. :ref:`to-api-deliveryservices-id-servers-eligible` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service SSL Keys # @api_request('get', 'deliveryservices/xmlId/{xml_id}/sslkeys', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryservice_ssl_keys_by_xml_id(self, xml_id=None, query_params=None): """ Get SSL keys for a Delivery Service by xmlId. :ref:`to-api-deliveryservices-xmlid-xmlid-sslkeys` :param xml_id: The Delivery Service XML id :type xml_id: str :param query_params: The url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'deliveryservices/xmlId/{xml_id}/sslkeys', ('3.0', '4.0', '4.1', '5.0')) def delete_deliveryservice_ssl_keys_by_xml_id(self, xml_id=None, query_params=None): """ Delete SSL keys for a Delivery Service by xmlId. :ref:`to-api-deliveryservices-xmlid-xmlid-sslkeys` :param xml_id: The Delivery Service xmlId :type xml_id: str :param query_params: The url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'deliveryservices/sslkeys/generate', ('3.0', '4.0', '4.1', '5.0')) def generate_deliveryservice_ssl_keys(self, data=None): """ Generate an SSL certificate. (self-signed) :ref:`to-api-deliveryservices-sslkeys-generate` :param data: The parameter data to use for Delivery Service SSL key generation. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'deliveryservices/sslkeys/add', ('3.0', '4.0', '4.1', '5.0')) def add_ssl_keys_to_deliveryservice(self, data=None): """ Add SSL keys to a Delivery Service. :ref:`to-api-deliveryservices-sslkeys-add` :param data: The parameter data to use for adding SSL keys to a Delivery Service. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service URL Sig Keys # @api_request('post', 'deliveryservices/xmlId/{xml_id}/urlkeys/generate', ('3.0', '4.0', '4.1', '5.0')) def generate_deliveryservice_url_signature_keys(self, xml_id=None): """ Generate URL Signature Keys for a Delivery Service by xmlId. :ref:`to-api-deliveryservices-xmlid-xmlid-urlkeys-generate` :param xml_id: The Delivery Service xmlId :type xml_id: str :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service Regexes # @api_request('get', 'deliveryservices_regexes', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryservices_regexes(self): """ Get RegExes for all Delivery Services. :ref:`to-api-deliveryservices_regexes` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'deliveryservices/{delivery_service_id:d}/regexes', ('3.0', '4.0', '4.1', '5.0')) def get_deliveryservice_regexes_by_id(self, delivery_service_id=None, query_params=None): """ Get RegExes for a Delivery Service by Id. :ref:`to-api-deliveryservices-id-regexes` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :param query_params: The url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'deliveryservices/{delivery_service_id:d}/regexes', ('3.0', '4.0', '4.1', '5.0')) def create_deliveryservice_regexes(self, delivery_service_id=None, data=None): """ Create a regex for a delivery service :ref:`to-api-deliveryservices-id-regexes` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :param data: The required data to create delivery service regexes :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'deliveryservices/{delivery_service_id:d}/regexes/{regex_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_deliveryservice_regexes(self, delivery_service_id=None, regex_id=None, query_params=None): """ Update a regex for a delivery service :ref:`to-api-deliveryservices-id-regexes-rid` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :param regex_id: The delivery service regex id :type regex_id: int :param query_params: The required data to update delivery service regexes :type query_params: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'deliveryservices/{delivery_service_id:d}/regexes/' '{delivery_service_regex_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_deliveryservice_regex_by_regex_id(self, delivery_service_id=None, delivery_service_regex_id=None): """ Delete a RegEx by Id for a Delivery Service by Id. :ref:`to-api-deliveryservices-id-regexes-rid` :param delivery_service_id: The delivery service Id :type delivery_service_id: int :param delivery_service_regex_id: The delivery service regex Id :type delivery_service_regex_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Delivery Service Statistics # @api_request('get', 'deliveryservice_stats', ('3.0', '4.0', '4.1', '5.0')) def get_delivery_service_stats(self, query_params=None): """ Retrieves statistics on the delivery services. :ref:`to-api-deliveryservice_stats` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Divisions # @api_request('get', 'divisions', ('3.0', '4.0', '4.1', '5.0')) def get_divisions(self, query_params=None): """ Get all divisions. :ref:`to-api-divisions` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'divisions/{division_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_division(self, division_id=None, query_params=None): """ Update a division by division id :ref:`to-api-divisions-id` :param division_id: The division id to update :type division_id: int :param query_params: The required data to update delivery service regexes :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'divisions', ('3.0', '4.0', '4.1', '5.0')) def create_division(self, data=None): """ Create a division :ref:`to-api-divisions` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'divisions/{division_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_division(self, division_id=None, query_params=None): """ Delete a division by division id :ref:`to-api-divisions-id` :param division_id: The division id to delete :type division_id: int :param query_params: The required data to update delivery service regexes :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Federation # @api_request('get', 'federations', ('3.0', '4.0', '4.1', '5.0')) def get_federations(self): """ Retrieves a list of federation mappings (aka federation resolvers) for a the current user :ref:`to-api-federations` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'federations', ('3.0', '4.0', '4.1', '5.0')) def create_federation(self, data=None): """ Allows a user to add federations for their delivery service(s). :ref:`to-api-federations` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'cdns/{cdn_name:s}/federations', ('3.0', '4.0', '4.1', '5.0')) def get_federations_for_cdn(self, cdn_name=None, query_params=None): """ Retrieves a list of federations for a cdn. :ref:`to-api-cdns-name-federations` :param cdn_name: The CDN name to find federation :type cdn_name: String :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'cdns/{cdn_name:s}/federations', ('3.0', '4.0', '4.1', '5.0')) def create_federation_in_cdn(self, cdn_name=None, data=None): """ Create a federation. :ref:`to-api-cdns-name-federations` :param cdn_name: The CDN name to find federation :type cdn_name: String :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'cdns/{cdn_name:s}/federations/{federation_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_federation_in_cdn(self, cdn_name=None, federation_id=None, query_params=None): """ Update a federation. :ref:`to-api-cdns-name-federations-id` :param cdn_name: The CDN name to find federation :type cdn_name: String :param federation_id: The federation id :type federation_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'cdns/{cdn_name:s}/federations/{federation_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_federation_in_cdn(self, cdn_name=None, federation_id=None): """ Delete a federation. :ref:`to-api-cdns-name-federations-id` :param cdn_name: The CDN name to find federation :type cdn_name: String :param federation_id: The federation id :type federation_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Federation Delivery Service # @api_request('get', 'federations/{federation_id:d}/deliveryservices', ('3.0', '4.0', '4.1', '5.0')) def get_federation_delivery_services(self, federation_id=None): """ Retrieves delivery services assigned to a federation :ref:`to-api-federations-id-deliveryservices` :param federation_id: The federation id :type federation_id: int :param federation_id: The federation id :type federation_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'federations/{federation_id:d}/deliveryservices', ('3.0', '4.0', '4.1', '5.0')) def assign_delivery_services_to_federations(self, federation_id=None, data=None): """ Create one or more federation / delivery service assignments. :ref:`to-api-federations-id-deliveryservices` :param federation_id: The federation id :type federation_id: int :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Federation Federation Resolver # @api_request('get', 'federations/{federation_id:d}/federation_resolvers', ('3.0', '4.0', '4.1', '5.0')) def get_federation_resolvers_by_id(self, federation_id=None): """ :ref:`to-api-federations-id-federation_resolvers` Retrieves federation resolvers assigned to a federation :param federation_id: The federation id :type federation_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'federations/{federation_id:d}/federation_resolvers', ('3.0', '4.0', '4.1', '5.0')) def assign_federation_resolver_to_federations(self, federation_id=None, data=None): """ Create one or more federation / federation resolver assignments. :ref:`to-api-federations-id-federation_resolvers` :param federation_id: The federation id :type federation_id: int :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Federation Resolver # @api_request('get', 'federation_resolvers', ('3.0', '4.0', '4.1', '5.0')) def get_federation_resolvers(self, query_params=None): """ Get federation resolvers. :ref:`to-api-federation_resolvers` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'federation_resolvers', ('3.0', '4.0', '4.1', '5.0')) def create_federation_resolver(self, data=None): """ Create a federation resolver. :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'federation_resolvers/{federation_resolver_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_federation_resolver(self, federation_resolver_id=None): """ Delete a federation resolver. :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Federation User # @api_request('get', 'federations/{federation_id:d}/users', ('3.0', '4.0', '4.1', '5.0')) def get_federation_users(self, federation_id=None): """ Retrieves users assigned to a federation. :ref:`to-api-federations-id-users` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'federations/{federation_id:d}/users', ('3.0', '4.0', '4.1', '5.0')) def create_federation_user(self, federation_id=None, data=None): """ Create one or more federation / user assignments. :ref:`to-api-federations-id-users` :param federation_id: Federation ID :type federation_id: int :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'federations/{federation_id:d}/users/{user_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_federation_user(self, federation_id=None, user_id=None): """ Delete one or more federation / user assignments. :ref:`to-api-federations-id-users-id` :param federation_id: Federation ID :type federation_id: int :param user_id: Federation User ID :type user_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # ISO # @api_request('get', 'osversions', ('3.0', '4.0', '4.1', '5.0')) def get_osversions(self): """ Get all OS versions for ISO generation and the directory where the kickstarter files are found. The values are retrieved from osversions.json found in either ``/var/www/files`` or in the location defined by the kickstart.files.location parameter (if defined). :ref:`to-api-osversions` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ #TODO: this currently doesn't work, as /isos wasn't rewritten yet @api_request('post', 'isos', ('3.0', '4.0', '4.1', '5.0')) def generate_iso(self, data=None): """ Generate an ISO :ref:`to-api-isos` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Jobs # @api_request('get', 'jobs', ('3.0', '4.0', '4.1', '5.0')) def get_jobs(self, query_params=None): """ Get all content-invalidation jobs (tenancy permitting). :ref:`to-api-jobs` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'jobs', ('3.0', '4.0', '4.1', '5.0')) def create_job(self, data=None): """ Creates a new content-invalidation job sorted by start time. :ref:`to-api-jobs` :param data: The content-invalidation job object that will be created. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'jobs', ('3.0', '4.0', '4.1', '5.0')) def update_job(self, data=None, query_params=None): """ Replaces a content-invalidation job with the one passed. :param data: The content-invalidation job with which the identified job will be replaced. :type data: Dict[str, Any] :param query_params: 'id' is a required parameter, identifying the job being updated. :ref:`to-api-jobs` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'jobs', ('3.0', '4.0', '4.1', '5.0')) def delete_job(self, query_params=None): """ Deletes a content-invalidation job. :ref:`to-api-jobs` :param query_params: 'id' is a required parameter, identifying the job being deleted. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Parameter # @api_request('get', 'parameters', ('3.0', '4.0', '4.1', '5.0')) def get_parameters(self, query_params=None): """ Get all Parameters. :ref:`to-api-parameters` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'profiles/{profile_id:d}/parameters', ('3.0', '4.0', '4.1', '5.0')) def get_parameters_by_profile_id(self, profile_id=None): """ Get all Parameters associated with a Profile by Id. :ref:`to-api-profiles-id-parameters` :param profile_id: The profile Id :type profile_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'profiles/name/{profile_name}/parameters', ('3.0', '4.0', '4.1', '5.0')) def get_parameters_by_profile_name(self, profile_name=None): """ Get all Parameters associated with a Profile by Name. :ref:`to-api-profiles-name-name-parameters` :param profile_name: The profile name :type profile_name: str :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'parameters', ('3.0', '4.0', '4.1', '5.0')) def create_parameter(self, data=None): """ Create Parameter :ref:`to-api-parameters` :param data: The parameter(s) data to use for parameter creation. :type data: Union[Dict[str, Any], List[Dict[str, Any]]] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'parameters/{parameter_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_parameter(self, parameter_id=None, query_params=None): """ Update Parameter :ref:`to-api-parameters-id` :param parameter_id: The parameter id to update :type parameter_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'parameters/{parameter_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_parameter(self, parameter_id=None): """ Delete Parameter :ref:`to-api-parameters-id` :param parameter_id: The parameter id to delete :type parameter_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Physical Location # @api_request('get', 'phys_locations', ('3.0', '4.0', '4.1', '5.0')) def get_physical_locations(self, query_params=None): """ Get Physical Locations. :ref:`to-api-phys_locations` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'phys_locations/{physical_location_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_physical_location(self, physical_location_id=None, query_params=None): """ Update Physical Location by id :ref:`to-api-phys_locations-id` :param physical_location_id: The id to update :type physical_location_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'phys_locations/{physical_location_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_physical_location(self, physical_location_id=None, query_params=None): """ Delete Physical Location by id :ref:`to-api-phys_locations-id` :param physical_location_id: The id to delete :type physical_location_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Plugins # @api_request('get', 'plugins', ('3.0', '4.0', '4.1', '5.0')) def get_plugins(self): """ Retrieves the list of plugins. :ref:`to-api-plugins` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Profiles # @api_request('get', 'profiles', ('3.0', '4.0', '4.1', '5.0')) def get_profiles(self, query_params=None): """ Get Profiles. :ref:`to-api-profiles` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'profiles', ('3.0', '4.0', '4.1', '5.0')) def create_profile(self, data=None): """ Create a profile :ref:`to-api-profiles` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'profiles/name/{new_profile_name:s}/copy/{copy_profile_name:s}', ('3.0', '4.0', '4.1', '5.0')) def copy_profile(self, new_profile_name=None, copy_profile_name=None, data=None): """ Copy profile to a new profile. The new profile name must not exist :ref:`to-api-profiles-name-name-copy-copy` :param new_profile_name: The name of profile to copy to :type new_profile_name: String :param copy_profile_name: The name of profile copy from :type copy_profile_name: String :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'profiles/{profile_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_profile_by_id(self, profile_id=None, data=None): """ Update Profile by Id. :ref:`to-api-profiles-id` :param profile_id: The profile Id :type profile_id: int :param data: The parameter data to edit :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'profiles/{profile_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_profile_by_id(self, profile_id=None): """ Delete Profile by Id. :ref:`to-api-profiles-id` :param profile_id: The profile Id :type profile_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Profile Parameters # @api_request('post', 'profileparameters', ('3.0', '4.0', '4.1', '5.0')) def associate_paramater_to_profile(self, data=None): """ Associate parameter to profile. :ref:`to-api-profileparameters` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'profiles/{profile_id:d}/parameters', ('3.0', '4.0', '4.1', '5.0')) def associate_parameters_by_profile_id(self, profile_id=None, data=None): """ Associate Parameters to a Profile by Id. :ref:`to-api-profiles-id-parameters` :param profile_id: The profile id :type profile_id: int :param data: The parameter data to associate :type data: Union[Dict[str, Any], List[Dict[str, Any]]] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'profileparameter', ('3.0', '4.0', '4.1', '5.0')) def assign_profile_to_parameter_ids(self, data=None): """ Create one or more profile / parameter assignments. :ref:`to-api-profileparameter` :param data: The data to assign :type data: Union[Dict[str, Any], List[Dict[str, Any]]] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'parameterprofile', ('3.0', '4.0', '4.1', '5.0')) def assign_parameter_to_profile_ids(self, data=None): """ Create one or more parameter / profile assignments. :ref:`to-api-profileparameter` :param data: The data to assign :type data: Union[Dict[str, Any], List[Dict[str, Any]]] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'profiles/name/{profile_name}/parameters', ('3.0', '4.0', '4.1', '5.0')) def associate_parameters_by_profile_name(self, profile_name=None, data=None): """ Associate Parameters to a Profile by Name. :ref:`to-api-profiles-name-name-parameters` :param profile_name: The profile name :type profile_name: str :param data: The parameter data to associate :type data: Union[Dict[str, Any], List[Dict[str, Any]]] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'profileparameters/{profile_id:d}/{parameter_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_profile_parameter_association_by_id(self, profile_id=None, parameter_id=None): """ Delete Parameter association by Id for a Profile by Id. :ref:`to-api-profileparameters-profileID-parameterID` :param profile_id: The profile id :type profile_id: int :param parameter_id: The parameter id :type parameter_id: int :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Regions # @api_request('get', 'regions', ('3.0', '4.0', '4.1', '5.0')) def get_regions(self, query_params=None): """ Get Regions. :ref:`to-api-regions` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'regions', ('3.0', '4.0', '4.1', '5.0')) def create_region(self, query_params=None, data=None): """ Create a region :ref:`to-api-regions` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'regions', ('3.0', '4.0', '4.1', '5.0')) def delete_region(self, query_params=None): """ Delete a region by name or ID as a query parameter :ref:`to-api-regions-id` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'regions/{region_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_region(self, region_id=None): """ Update a region :ref:`to-api-regions-id` :parma region_id: The region to update :type region_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Roles # @api_request('get', 'roles', ('3.0', '4.0', '4.1', '5.0')) def get_roles(self): """ Get Roles. :ref:`to-api-roles` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'roles', ('3.0', '4.0', '4.1', '5.0')) def create_role(self, data=None): """ Create a new Role. :ref:`to-api-roles` :param data: A new Role object to be created. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'roles', ('3.0', '4.0', '4.1', '5.0')) def update_role(self, data=None, query_params=None): """ Get Roles. :ref:`to-api-roles` :param data: A new Role object which will replace the one identified. :type data: Dict[str, Any] :param query_params: 'id' is a required parameter, defining the Role to be replaced. :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'roles', ('3.0', '4.0', '4.1', '5.0')) def delete_role(self, query_params=None): """ Delete a Role. :ref:`to-api-roles` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Server # @api_request('get', 'servers', ('3.0', '4.0', '4.1', '5.0')) def get_servers(self, query_params=None): """ Get Servers. :ref:`to-api-servers` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'servers/{server_id:d}/deliveryservices', ('3.0', '4.0', '4.1', '5.0')) def get_server_delivery_services(self, server_id=None): """ Retrieves all delivery services assigned to the server :ref:`to-api-servers-id-deliveryservices` :param server_id: The server id to retrieve :type server_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'servers/details?hostName={name}', ('3.0',)) def get_server_details(self, name=None): """ Get servers/details :ref:`to-api-v3-servers-details` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] .. deprecated:: 3.0 The endpoint this represents has been removed from APIv4 and clients should use get_servers instead. """ @api_request('post', 'servercheck', ('3.0', '4.0', '4.1', '5.0')) def create_servercheck(self, data=None): """ Post a server check result to the serverchecks table. :ref:`to-api-servercheck` :param data: The parameter data to use for server creation :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'servers', ('3.0', '4.0', '4.1', '5.0')) def create_server(self, data=None): """ Create a new Server. :ref:`to-api-servers` :param data: The parameter data to use for server creation :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'servers/{server_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_server_by_id(self, server_id=None, data=None): """ Update a Server by Id. :ref:`to-api-servers-id` :param server_id: The server Id :type server_id: int :param data: The parameter data to edit :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'servers/{server_id:d}/status', ('3.0', '4.0', '4.1', '5.0')) def update_server_status_by_id(self, server_id=None, data=None): """ Update server_status by Id. :ref:`to-api-servers-id-status` :param server_id: The server Id :type server_id: int :status: https://traffic-control-cdn.readthedocs.io/en/latest/api/server.html :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'servers/{server_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_server_by_id(self, server_id=None): """ Delete a Server by Id. :ref:`to-api-servers-id` :param server_id: The server Id :type server_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'servers/{server_id:d}/queue_update', ('3.0', '4.0', '4.1', '5.0')) def servers_queue_update(self, server_id=None, data=None): """ Queue Updates by Server Id. :ref:`to-api-servers-id-queue_update` :param server_id: The server Id :type server_id: int :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'servers/{server_name}/update_status', ('3.0', '4.0', '4.1', '5.0')) def get_server_update_status(self, server_name=None): """ Gets the current update status of a server named ``server_name``. :ref:`to-api-servers-hostname-update_status` :param server_name: The (short) hostname of the server for which the update status will be fetched :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Static DNS Entries # @api_request('get', 'staticdnsentries', ('3.0', '4.0', '4.1', '5.0')) def get_staticdnsentries(self, query_params=None): """ Get static DNS entries associated with the delivery service :ref:`to-api-staticdnsentries` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'staticdnsentries', ('3.0', '4.0', '4.1', '5.0')) def create_staticdnsentries(self, data=None): """ Create static DNS entries associated with the delivery service :ref:`to-api-staticdnsentries` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'staticdnsentries', ('3.0', '4.0', '4.1', '5.0')) def update_staticdnsentries(self, data=None, query_params=None): """ Update static DNS entries associated with the delivery service :ref:`to-api-staticdnsentries` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'staticdnsentries', ('3.0', '4.0', '4.1', '5.0')) def delete_staticdnsentries(self, query_params=None): """ Delete static DNS entries associated with the delivery service :ref:`to-api-staticdnsentries` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Status # @api_request('get', 'statuses', ('3.0', '4.0', '4.1', '5.0')) def get_statuses(self, query_params=None): """ Retrieves a list of the server status codes available. :ref:`to-api-statuses` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # System # @api_request('get', 'system/info', ('3.0', '4.0', '4.1', '5.0')) def get_system_info(self): """ Get information on the traffic ops system. :ref:`to-api-system-info` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Tenants # @api_request('get', 'tenants', ('3.0', '4.0', '4.1', '5.0')) def get_tenants(self, query_params=None): """ Get all tenants. :ref:`to-api-tenants` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'tenants/{tenant_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_tenant(self, tenant_id=None): """ Update a tenant :ref:`to-api-tenants-id` :param tenant_id: The tenant to update :type tenant_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'tenants', ('3.0', '4.0', '4.1', '5.0')) def create_tenant(self, data=None): """ Create a tenant :ref:`to-api-tenants` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # TO Extensions # @api_request('get', 'servercheck/extensions', ('3.0', '4.0', '4.1', '5.0')) def get_servercheck_extensions(self): """ Retrieves the list of extensions. :ref:`to-api-servercheck_extensions` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'servercheck/extensions', ('3.0', '4.0', '4.1', '5.0')) def create_to_extension(self, data=None): """ Creates a Traffic Ops extension. :ref:`to-api-servercheck_extensions` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'servercheck/extensions/{extension_id:d}', ('3.0', '4.0', '4.1', '5.0')) def delete_to_extension(self, extension_id=None): """ Deletes a Traffic Ops extension. :ref:`to-api-servercheck_extensions-id` :param extension_id: The extension id to delete :type extension_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Topologies # @api_request('post', 'topologies', ('3.0', '4.0', '4.1', '5.0')) def create_topology(self, data: Dict[str, Any]=None) -> Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response]: """ Create a topology :ref:`to-api-topologies` :param data: The Topology data to use for Topology creation. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'topologies', ('3.0', '4.0', '4.1', '5.0')) def get_topologies(self, query_params: Dict[str, Any]=None) -> Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response]: """ Get Topologies. :ref:`to-api-topologies` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'topologies?name={name:s}', ('3.0', '4.0', '4.1', '5.0')) def update_topology(self, name: str=None, data: Dict[str, Any]=None) -> Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response]: """ Update a Topology :ref:`to-api-topologies` :param name: The name of the Topology :type name: str :param data: The new values for the Topology :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'topologies?name={name:s}', ('3.0', '4.0', '4.1', '5.0')) def delete_topology(self, name: str=None) -> Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response]: """ Delete a Topology :ref:`to-api-topologies` :param name: The name of the Topology to delete :type name: str :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'topologies/{name:s}/queue_update', ('3.0', '4.0', '4.1', '5.0')) def topologies_queue_update(self, name=None, data=None): """ Queue Updates by Topology name. :ref:`to-api-topologies-name-queue_update` :param name: The Topology name :param data: The update action. :type data: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Types # @api_request('get', 'types', ('3.0', '4.0', '4.1', '5.0')) def get_types(self, query_params=None): """ Get Data Types. :ref:`to-api-types` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Users # @api_request('get', 'users', ('3.0', '4.0', '4.1', '5.0')) def get_users(self): """ Retrieves all users. :ref:`to-api-users` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'users/{user_id:d}', ('3.0', '4.0', '4.1', '5.0')) def get_user_by_id(self, user_id=None): """ Retrieves user by ID. :ref:`to-api-users-id` :param user_id: The user to retrieve :type user_id: int :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'users', ('3.0', '4.0', '4.1', '5.0')) def create_user(self, data=None): """ Create a user. :ref:`to-api-users` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'users/{user_id:d}', ('3.0', '4.0', '4.1', '5.0')) def update_user_by_id(self, user_id=None, data=None): """ Update a user. :ref:`to-api-users` :param data: The user update data payload. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'users/register', ('3.0', '4.0', '4.1', '5.0')) def create_user_with_registration(self, data=None): """ Register a user and send registration email :ref:`to-api-users-register` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'user/current', ('3.0', '4.0', '4.1', '5.0')) def get_authenticated_user(self): """ Retrieves the profile for the authenticated user. :ref:`to-api-user-current` :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'user/current', ('3.0', '4.0', '4.1', '5.0')) def replace_authenticated_user(self, data=None): """ Updates the currently authenticated user. :ref:`to-api-user-current` :param data: The new user information which will replace the current user's user information. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Snapshot CRConfig # @api_request('get', 'cdns/{cdn_name}/snapshot', ('3.0', '4.0', '4.1', '5.0')) def get_current_snapshot_crconfig(self, cdn_name=None): """ Retrieves the CURRENT snapshot for a CDN which doesn't necessarily represent the current state of the CDN. The contents of this snapshot are currently used by Traffic Monitor and Traffic Router. :ref:`to-api-cdns-name-snapshot` :param cdn_name: The CDN name :type cdn_name: str :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('get', 'cdns/{cdn_name}/snapshot/new', ('3.0', '4.0', '4.1', '5.0')) def get_pending_snapshot_crconfig(self, cdn_name=None): """ Retrieves a PENDING snapshot for a CDN which represents the current state of the CDN. The contents of this snapshot are NOT currently used by Traffic Monitor and Traffic Router. Once a snapshot is performed, this snapshot will become the CURRENT snapshot and will be used by Traffic Monitor and Traffic Router. :ref:`to-api-cdns-name-snapshot-new` :param cdn_name: The CDN name :type cdn_name: str :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'snapshot', ('3.0', '4.0', '4.1', '5.0')) def snapshot_crconfig(self, query_params=None): """ Snapshot CRConfig by CDN Name or ID. :ref:`to-api-snapshot` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Dict[str, Any], requests.Response] :raises: Union[LoginError, OperationError] """ # # Coordinate # @api_request('get', 'coordinates', ('3.0', '4.0', '4.1', '5.0')) def get_coordinates(self, query_params=None): """ Get all coordinates associated with the cdn :ref:`to-api-coordinates` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'coordinates', ('3.0', '4.0', '4.1', '5.0')) def create_coordinates(self, data=None): """ Create coordinates :ref:`to-api-coordinates` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'coordinates', ('3.0', '4.0', '4.1', '5.0')) def update_coordinates(self, query_params=None, data=None): """ Update coordinates :ref:`to-api-coordinates` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'coordinates', ('3.0', '4.0', '4.1', '5.0')) def delete_coordinates(self, query_params=None): """ Delete coordinates :ref:`to-api-coordinates` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ # # Origin # @api_request('get', 'origins', ('3.0', '4.0', '4.1', '5.0')) def get_origins(self, query_params=None): """ Get origins associated with the delivery service :ref:`to-api-origins` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('post', 'origins', ('3.0', '4.0', '4.1', '5.0')) def create_origins(self, data=None): """ Creates origins associated with a delivery service :ref:`to-api-origins` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('put', 'origins', ('3.0', '4.0', '4.1', '5.0')) def update_origins(self, query_params=None): """ Updates origins associated with a delivery service :ref:`to-api-origins` :param data: The update action. QueueUpdateRequest() can be used for this argument also. :type data: Dict[str, Any] :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ @api_request('delete', 'origins', ('3.0', '4.0', '4.1', '5.0')) def delete_origins(self, query_params=None): """ Updates origins associated with a delivery service :ref:`to-api-origins` :param query_params: The optional url query parameters for the call :type query_params: Dict[str, Any] :rtype: Tuple[Union[Dict[str, Any], List[Dict[str, Any]]], requests.Response] :raises: Union[LoginError, OperationError] """ #################################################################################### #### #### #### Data Model Overrides #### #### #### #################################################################################### def __enter__(self): """ Implements context-management for ToSessions. This will open the session by sending a connection request immediately, rather than waiting for login. :returns: The constructed object (:meth:`__init__` is called implicitly prior to this method) """ self.create() return self def __exit__(self, exc_type, exc_value, traceback): """ Implements context-management for TOSessions. This will close the underlying socket. """ self.close() if exc_type: logging.error("%s", exc_value) logging.debug("%s", exc_type, stack_info=traceback) if __name__ == '__main__': # Sample usages import operator DEBUG = False logging.basicConfig(stream=sys.stderr, level=logging.INFO if not DEBUG else logging.DEBUG) # TOSession Class Examples # TOSession is a class that allows you to create a session to a Traffic Ops instance # and interact with the Traffic Ops API. # Traffic Ops System - for self-signed cert -> turn off cert verification TOS = TOSession(host_ip='to.somedomain.net', verify_cert=True) TOS.login('someuser', 'someuser123') # Objects get returned munch-ified by default which means you can access dictionary keys as # attributes names but you can still access the entries with keys as well e.g. # ``cdn.name`` is equivalent to ``cdn['name']`` CDNS = TOS.get_cdns()[0] print(CDNS) for cdn in CDNS: print('CDN [{0}] has id [{1}]'.format(cdn.name, cdn.id)) ALL_TYPES = TOS.get_types()[0] print('All Types are (sorted by useInTable, name):') print(ALL_TYPES) for atype in sorted(ALL_TYPES, key=operator.itemgetter('useInTable', 'name')): print('Type [{0}] for table [{1}]'.format(atype.name, atype.useInTable)) print('Getting all cache groups (bulk)...') CACHE_GROUPS = TOS.get_cachegroups()[0] for cache_group in CACHE_GROUPS: print('Bulk cache group [{0}] has id [{1}]'.format(cache_group.name, cache_group.id)) # Example with URL replacement parameters # e.g. TOSession.get_cachegroups_by_id() is the '/cachegroups/{id}' API endpoint # See TOSession object for details. print(' Getting cachegroup by id [{0}]'.format(cache_group.id), ' to demonstrate getting by id...') cg_id_list = TOS.get_cachegroup_by_id(cache_group_id=cache_group.id)[0] print(' Cache group [{0}] by id [{1}]'.format(cg_id_list[0].name, cg_id_list[0].id)) # Example with URL query parameters SERVER_TYPES = TOS.get_types(query_params={'useInTable': 'server'})[0] print('Server Types are:') print(SERVER_TYPES) for stype in SERVER_TYPES: print('Type [{0}] for table [{1}]'.format(stype.name, stype.useInTable)) TOS.close() print('Done!')
Apache-TrafficControl
/Apache-TrafficControl-3.1.0.tar.gz/Apache-TrafficControl-3.1.0/trafficops/tosession.py
tosession.py
------------ Introduction ------------ This package provides a form of layer management for Python. It transparently substitutes an existing module with ones patch thereof. This `normalizes` or `standardizes` the original modules across ones code base. ApeMan intercepts ones ``import`` calls to substitute the desired module with a patched variant provided in an overlay. An overlay\ [#gentoo]_ is simply a python package containing ones patches for other packages. Additionally the overlays :file:`__init__.py` file must invoke ApeMan. Overlays make ones patches available across multiple projects; consistently exposing the additional API features provided by them. Similarly a set of patches may quickly be substituted for another by simply importing a different overlay. This formalizes monkey patching where the Ape in question has an affection for books, dislikes their readers and discourages, quite aggressively one might say, the use of the m... word\ [#librarian]_. .. rubric:: Footnotes .. [#gentoo] The term overlay is taken from Portage the package manager for Gentoo Linux. .. [#librarian] Someone out there was about to find out their worst nightmare was a maddened librarian. With a Badge. .. Suppose... .. ---------- .. .. One is writing a script that imports a :class:`CLASS` from a :mod:`MODULE` in ones Python installation, .. :: .. .. from MODULE import CLASS .. .. print(CLASS()) .. .. and one desperately wished that the class had a certain feature, say a nicer string representation. .. One implements the following .. :: .. from MODULE import CLASS .. .. class CLASS(CLASS) : .. def __str__(self): .. return "Nicer {} representation ".format(str(self)) .. .. while in their original script they would now import the patch .. :: .. .. from .MODULE import CLASS .. .. print(CLASS()) .. .. This proves so useful that one wishes to make their patched implementation of :class:`CLASS` available to all of their projects. .. ApeMan allows one to package their patch into an overlay so that .. :: .. .. import OVERLAY .. from MODULE import CLASS .. .. print(CLASS()) ------- Problem ------- Occasionally one wants to patch the functions and/or classes provided by some other package; when it lacks features or to normalize the provided |API|. A naive implementation relying upon the following structure; :: PROJECT/ # The root folder for ones project PACKAGE/ # The root folder of ones package. PATCH.py # The module containing ones patches. ... # The other modules in the package. __main__.py # The main script importing and using the patched module. would patch the features from the `SOURCE` module by importing and overloading it's `FUNCTION`\ s and `CLASS`\ es. :: from SOURCE import * _FUNCTION_ = FUNCTION def FUNCTION(*args, **kvps): ... _FUNCTION_(*args, **kvps) ... class CLASS(CLASS): ... Ones modules would then the `PATCH` in favour of the original package; pulling in the modifications. :: from .PATCH import * ... This works well for once off patches in standalone projects. Now, should a particular patch be especially useful, one might wish to use it across multiple projects. At this point, one might copy the patch across to the other project(s) creating copies; copies that diverge from one another over time as features are added. Should the original patch grow over time may become necessary to duplicate more of the structure of the original package; creating more files and exacerbating the problem. Eventually one ends up with multiple `PATCH` files, spread across various projects, whose contents deviate further from one another to increasingly varied degrees. -------- Solution -------- ApeMan offers, a hopefully better strategy, that consistently manages these patches. It resolves this by placing ones patches into an overlay; a package dedicated to ones patches. This may be done locally, within a sub-package, for one shot usage; or globally, within a separate package, for repeated usage by multiple packages. .. . If the following represents ones package structure .. . :: .. . .. . PACKAGE/ # The root folder of ones package. .. . overlay/ # The overlay containing the patches. .. . SOURCE # The target package one is wrapping or patching. .. . __init__.py # The __init__.py script importing ApeMan. .. . __main__.py # The packages main script, executed when invoked as a module. .. The packages main file makes it's usual calls to import the `SOURCE` modules but by importing the overlay first ApeMan redirects later imports to use ones patched modules instead. .. :: .. .. import overlay .. from SOURCE import * .. .. ... Overlay Structure ================= Whether it is made available globally or locally ones structures their overlay(s) as follows:: OVERLAY/ # The root folder of the ApeMan overlay _PACKAGE_.py # The module containing ones patches, renamed after the source module or package ... # Further patches provided by the overlay __init__.py # The file invoking ApeMan; identifying it as an overlay The overlays' :file:`__init__.py` file then imports and registers the :class:`ApeMan` instance; :: from apeman import ApeMan; apeman = ApeMan() which intercepts later imports, substituting ones patches for the original modules. Local Overlay(s) ================ Locally one may create an overlay at any level within their package by including a sibling package along side it's modules. :: PROJECT/ # The root folder for ones project PACKAGE/ # The root folder of ones package. OVERLAY/ # The ApeMan overlay ... # The contents of the overlay ... # The other modules in the package. __main__.py # The main script importing and using the patched module. Other modules within ones package may then invoke the overlay via relative import. :: import .OVERLAY from SOURCE import * ... Global Overlay(s) ================= Globally, an overlay, is provided as a separate, standalone package. :: PROJECT/ # The root folder for ones project OVERLAY/ # The root folder of the ApeMan Overlay ... # The contents of the overlay PACKAGE/ # The root folder of ones package. ... # The other modules in the package. __main__.py # The main script importing and using the patched module. In this case the modules in ones package must invoke the overlay using an absolute import. :: import OVERLAY from SOURCE import * ... .. One must explicitly import the features they need as the `OverlayImporter` actually blocks further imports. .. Note that an overlay package is meant to reside alongside its sibling module to afford the most flexibility. .. Whether or not this is possible at every level within a package depends upon how python enforces scoping. ------- Example ------- Consider patching the :class:`Decimal` class from the :mod:`decimal` module. Monkey Patching =============== The following structure is the simplest to implement. :: PACKAGE/ # The root folder of ones package. _decimal_.py # The module containing ones patches to the decimal module. __main__.py # The packages main script, executed when invoked as a module. Within :file:`_decimal_.py` import everything from the :mod:`decimal` module then subclass and monkey patch the `Decimal` class; modifying it's string representation. :: from decimal import * class Decimal(Decimal): def __str__(self) : return super().__str__().split("'")[1] Then within the :file:`__main__.py` file one would import and use the patch as follows:: import ._decimal_ as decimal from decimal import Decimal print(Decimal(42)) This should output `42` instead of `Decimal('42')` when we invoke the package using :code:`python -m PACKAGE`. Ape Patching ============ Using ApeMan we would move the `_decimal_.py` file into a sub-folder called `overlay`, with the resulting structure; :: PACKAGE/ # The root folder of ones package. overlay/ # The overlay containing the patches. _decimal_.py # The module containing ones patches to the decimal module. __init__.py # The __init__.py script importing ApeMan. __main__.py # The packages main script, executed when invoked as a module. accordingly the :file:`__init__.py` file should contain :: from apeman import ApeMan apeman = ApeMan The main file is then adapted to reflect the following. :: import .overlay from decimal import Decimal print(Decimal(42)) Without ... =========== One might argue that a cleaner structure still, is as follows :: PACKAGE/ # The root folder of ones package. decimal.py # The module containing ones patches to the decimal module. __main__.py # The packages main script, executed when invoked as a module. but this results in a whole series of clashes and the following error :: AttributeError: 'module' object has no attribute 'Decimal' .. Other related errors include : .. SystemError: Parent module '' not loaded, cannot perform relative import .. Essentially the :file:`decimal.py` module gets installed within the decimal name space preventing the import of the original library. Essentially the :file:`PACKAGE/decimal.py` file gets loaded as the :mod:`decimal` module and is assigned under :attr:`sys.modules` reserving the `decimal` key; preventing the subsequent import of the actual :mod:`decimal` module. .. note :: This method actually works if one tells python it's executing a module using the `-m` switch, :code:`python -m PACKAGE`, but only I found this out after creating the package. ------------- Compatability ------------- The machinery underlying :meth:`import` has undergone some radical changes over the lastest releases of Python (Particularly versions 3.3-3.5 and next in 3.7). In light of this ApeMan aims to support a minimal set of features; namely explicit and implicit overlays providing patches whose structure matches their intended source packages. Any functionality offered beyond these base features is considered sugar e.g. repeated imports, stacked overlays, restructured patches and substructured patches; .. tested by the :mod:`*Explcit` :mod:`*Implcit` and :mod:`*Init` tests (See :ref:`Testing`). .. table :: Set of features supported by the Python import system in the different Python implementations :align: center :widths: auto ====================== === === === === Python 2.7 3.3 3.4 3.7 ====================== === === === === explicit packages X X X X implicit packages X X X lazy loading X C implementation X X Python implementation X X ====================== === === === === It should be noted that Apeman has only been developed and tested in Python 2.7, 3.4 and 3.6. The Python 3.4 implementation was last tested when the author still had it installed, before switching to 3.6. The author also maintains a flaky build of Python 3.5 but this is not a good testing envvironment as a result ApeMan in 3.5 is flaky. Generally speaking if you're using ApeMan in anything other then Python 2.7 or 3.6 you're on your own. .. table :: Set of features supported ApeMan under the different Python implementations :align: center :widths: 4,1,1,1,1 ======================= === === === === Python 2.7 3.4 3.5 3.6 ======================= === === === === explicit packages X X ? X implicit packages N/A X ? X repeated imports ? ? ? X Substructured overlays Restructured overlays Stacking overlays ======================= === === === === .. There are tworules for success ... .. 1) Never reveal everything that you know -a command line option -b path command line option Having said that the 2.7 implementation ought to work in Python 2.7-3.3. Python 3.4 saw a big overhaul from 3.3 but one did develop ApeMan against this version and, unless one is grossly mistaken, the implementation should still work. Python 3.5 included a few leftovers that were forgotten for Python 3.4. The 3.4 implementation ought to work in 3.5 but the current 3.5 implementation diverges from the one for 3.4 and appears broken upon the authors machine. The Python 3.6 implementation is presently the most tested and stable while Python 3.7 has not been attempted just yet. ------------------ Live and Let Die ! ------------------ This is largely inspired by Portage, the package manager for Gentoo Linux and the tutorial by David Beasley. However it is only possible through the contributions of Brett Cannon, who ported the Python import machinery from C/C++ to Python. In general a big thank you is also made to the developers of Python and all the other third party packages that come withit. .. .. tikz:: Title .. :libs: calc .. .. \draw (0,0) circle (3em) circle (4em) circle (5em); --------- Licencing --------- This software is licenced under a GPL v3 licence. One requests that anyone hosting a fork of this code inform the author accordingly so that any useful modifications one has made may periodically be merged into the code base.
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/readme.rst
readme.rst
import os import sys # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as zip_longest # Imports - Why is this being done here ??? from importlib import util, abc ,machinery # Debugging import logging # Information __version__ = "0.0.0" modsep = '.' if __name__ == "__main__" : from pathlib import Path import unittest suite = unittest.TestLoader().discover('..') unittest.TextTestRunner(verbosity=1).run(suite) # print("Main") # import logging # logging.basicConfig(format = '%(message)s') # if sys.version_info[:2] == (3,4) : # logger = logging.getLogger("__34__") # if sys.version_info[:2] == (3,5) : # logger = logging.getLogger("__35__") # logger.setLevel(logging.DEBUG) # General Import # import overlay # from overlay import * # Targeted Import # from overlay import tiers # Nested Import # from overlay.tiers import first # Staggered Import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import first # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{:24} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) else : # Note : This code is only compatible with Python 3 if __package__ : # Relative imports for normal usage if sys.version_info[:2] == (3,5) : print("35A") from .__35__ import OverlayImporter if sys.version_info[:2] == (3,4) : print("34A") from .__34__ import OverlayImporter else : # Absolute imports prevent "SystemError : Parent module '' not loaded,..." if sys.version_info[:2] == (3,5) : print("35B") from __35__ import OverlayImporter if sys.version_info[:2] == (3,4) : print("34B") from __34__ import OverlayImporter
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/_apeman.py
_apeman.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # Flags DEBUG = True # System import os import sys import builtins # Debugging if DEBUG : from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc, machinery, _bootstrap as bootstrap, import_module import imp # Local Libraries try : from .descriptors import PathName from .utilities import Indentation except (SystemError) : from descriptors import PathName from utilities import Indentation # Logging if DEBUG : import logging # Constants modsep = '.' version = (0,0,0) class Import(Indentation): # This comes from the Python 3.6 implementation which ultimately comes from the 3.4 implementation """ This class replaces the `builtins.import` function with itself. Bypassing the ModuleSpec and Finder/Loader or Importer mechanisms. .. note :: This class fails to work properly .. note :: This is derived from the Python 3.4 implementation. """ root = PathName() lom = [] _import_ = builtins.__import__ def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, debug = DEBUG, **kvps): super().__init__(*args, **kvps) # Properties self.mask = "_{}_" self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() # print(self.mods) # Logging and Debugging self.debug = debug if self.debug : self.log = logging.getLogger(__name__) if self.debug : self.log.debug("Initialized : Import") # if self.debug : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # if self.debug : self.log.debug("{:10}: {:40} {}".format(self.ondent("Instance"), str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) if self.debug : self.log.debug("Modules : {}".format(sorted(self.mods.keys()))) # Import Functionality self._import_ = _import_ builtins.__import__ = self def __call__(self, name, *args, **kvps) : # (self, *args, *kvps): # Hooks the import statement # self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) if self.mapToTarget(name) in self.mods.keys() : # if self.debug : self.log.debug("Overloading : {}".format(name)) self.log.debug(self.lom) if name in self.lom : if self.debug : self.log.debug("Reverting : " + name) return self._import_(name, *args, **kvps) # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # self.log.debug(name) # self.log.debug(self.name + modsep + self.mapToTarget(name)) # self.log.debug("Wrapped Import") self.lom.append(name) if self.debug : self.log.debug("Redirecting : {}".format(self.name + modsep + self.mapToTarget(name))) return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args if self.debug : self.log.debug("Importing : {}".format(name)) return self._import_(name, *args, **kvps) def mapToTarget(self, name) : """Maps request to the overlay module""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : """Lists the overlays implemented within a directory""" # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} class OverlayLoader(machinery.SourceFileLoader) : """ .. note :: This was deliberately named OverlayLoader, verify that this is still true. """ # Reference [1] is essentially useless but gives an example # [1] http://stackoverflow.com/a/29585082/958580 flag = True def __init__(self, *args, logger = logging.getLogger(__name__), debug = DEBUG, **kvps): super().__init__(*args, **kvps) if self.debug : self.log = logging.getLogger(__name__) def create_module(self, spec, *args, **kvps) : # temp = super().create_module(spec, *args, **kvps) if self.flag: self.flag = False if self.debug : self.log.debug("\n".join(dir(util))) # self.log.debug("\n".join(dir(spec))) # temp = util.module_from_spec(spec) # Python 3.5 temp = spec.loader.load_module() if self.debug : self.log.debug(temp) return temp return None class OverlayImporter(abc.MetaPathFinder) : # This is not a True importer according to the python doc's # This is because it does not fomerly find and load a module. # This is due to the fact that the machinery.*Loader classes # expect a name and a path when instantiated. While this has # to accomodate it's own state. # ref : 1 find_spec and mapToSystem # # Here we defer the spec search to the import machinery and # modify the result somewhat. # # ref : 2 find_spec and mapToSystem # # One may generate a module spec using the ModuleSpec method # and a folder name as an argument to SourceFileLoader but # this clashes saying there is a permission error for some # reason # # ref : 3 find_spec and mapToSystem # # One must generate a spec by either importing a moule or an # __init__.py file or one must and use a file Loader # # Logging dent = 0 def indent(self, label = None, char = " ", length = 10): self.dent += 1 return "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) def undent(self, label = None, char = " ", length = 10): self.dent -= 1 return "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) def ondent(self, label = None, char = ".", length = 10): return "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) # Mapping def mapToSource(self,name) : """Maps request to the corresponding overlay module""" # Consider the use of util.resolve_name(name, path) return modsep.join([part if test else self.mask.format(part) for test, part in izip(self.name.split(modsep),name.split(modsep))]) def mapToTarget(self, name) : return modsep.join([part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test]) # return modsep.join(name.split(modsep)[1:]) def mapToHidden(self,name) : """Maps request to a corresponding hidden module""" # for trap in self.trap : # parts = trap.split(modsep) return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapToSystem(self, name): """Maps a module to the corresponding overlay system path""" # This finds the first folder or file matching the module name # note that name must be processed beforehand using self,mapToSource stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.path) path = None while next(test, None) : path = next(item, None) if not os.path.isdir(path) : # [ref:2] path += '.py' return path def mapToFile(self, name, path): """ Similar to mapToSystem but for packages it tries to map to __init__.py files""" # Note one must premap the path FQMN using mapToSource # self.log.debug("MapToFile : {}".format(self.path)) stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in path) path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path # def modules(): # return [os.path.splitext(item)[0] for item in os.listdir(self.path[0]) if os.path.splitext(item)[0] not in ["__init__","__pycache__"]] # Importer Code trap = [] def __init__(self, *args, path = None, name = None, logger = logging.getLogger(__name__), debug = DEBUG, **kvps): # The following alternate call structure may be necessary # # __init__(self, path, *args, logger = logging.getLogger(__name__), **kvps) # # Where path maps the values roughly as follows : # # name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ # path = name or inspect.getmodule(inspect.stack()[1][0]).__path__ # # logger.debug("Args : {} \nKvps :".format(args, kvps)) super().__init__(*args, **kvps) self.mask = "_{}_" # db() self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.path = path or inspect.getmodule(inspect.stack()[1][0]).__path__ self.debug = debug if self.debug : self.log = logging.getLogger(__name__) if self.debug : self.log.debug("Initialized : {:74} {}".format(self.name, self.path)) sys.meta_path.insert(0, self) # if self.debug : self.log.debug("{}".format(sys.meta_path)) def find_spec(self, name, path, target = None): # This is a class method within importlib # path is the absolute import path while name may be a relative # # Note it's useful to use either # # spec_from_file_location # spec_from_loader # # Target is only specified if thisi is a re-import # if 'overlay' not in name or "tiers" not in name : # return None if self.debug : self.log.debug("find_spec : {1:24} {0:24} {3:24} {2}".format("({})".format(self.name), name, path, str(target))) if name.startswith(self.name) and name not in self.trap : _trap_ = self.mapToTarget(name) _name_ = self.mapToSource(name) _path_ = self.mapToFile(_name_, path) if self.debug : self.log.debug("Overlay : {1:24} {0:24} {3:24} {2}".format("({})".format(self.name), _name_, _path_, _trap_)) if _trap_ : # The following should always execute and the error shoulf never be raised self.trap.append(_trap_) else : raise ValueError("The name {} could not be mapped to {}. ".format(name, _trap_)) # self.log.debug(" {:50}-> {:50}\n {:50} -> {:50} ".format(name, _name_, str(path), _path_)) # self.log.debug(self.mapToSystem(_name_)) # spec = util.find_spec(self.mapToSource(name), path) # [ref:1] # spec.name = _name_ # self.log.debug(spec) # self.log.debug([attr for attr in dir(spec.loader) if not attr.startswith('__')]) # self.log.debug(spec.loader.path) # loader = machinery.SourceFileLoader(name, _path_) # [ref:2] # spec = util.spec_from_loader(name, loader) # self.log.debug(loader.path) # self.log.debug(loader) # spec = machinery.ModuleSpec( # name = _name_, # loader = loader, # origin = _path_, # # submodule_search_locations = _path_, # is_package=True, # ) # self.log.debug(spec) spec = util.spec_from_file_location(name, _path_, loader = OverlayLoader(name, _path_)) # [ref:3] # spec_from_file asscepts the following arguments # name - One may alter this # location - The location of the package, that is a file path, or "NameSpace" # loader - The loader that the package must use (Note this is picked for use if one is not provided) # submodule_search_location - The submodule locations in the system (Note this is picked for use if one is not provided) # self.log.debug("Module Spec.: {}".format(spec)) # spec = util.find_spec(_name_, path) # self.log.debug(spec) # util.spec_from_loader(name, spec.loader) # self.log.debug(spec) return spec if self.debug : self.log.debug("Should Trap : {} in {} as {}".format(name, self.trap, self.mapToHidden(name))) # if name in self.trap : # self.log.debug("Trapped : {1:24} {0:24} {3:24} {2}".format("({})".format(self.name), name, path, " ".join(self.trap))) # self.trap.pop(self.trap.index(name)) # spec = util.find_spec(name, path) # # self.log.debug("\n".join(dir(spec))) # # self.log.debug(util.spec_from_loader(name, spec.loader, origin = spec.origin, is_package =True)) # # self.log.debug(machinery.ModuleSpec(name, spec.loader, origin = spec.origin, loader_state = spec.loader_state)) # spec.name = self.mapToHidden(spec.name) # # spec.submodule_search_locations.append(self.) # # self.log.debug("\n".join(dir(spec.submodule_search_locations))) # shows the append method is accessible # self.log.debug(spec) # return spec return None # def find_spec(self, name, path, target = None): # self.log.debug("{0}:{1:20} {3} \n {2}".format(self.ondent("find_spec"), name, "\n ".join(path) if isinstance(path, list) else path, target))# self.ondent("FindSpec"), name, path, target)) # return super().find_spec(name, path, target) # def loader(): # # Use module_from_spec here (Python 3.5 onwards) # # Use loader.load_module (Python 3.4) if __name__ == "__main__" : # print("Main") # import logging # logging.basicConfig(format = '%(message)s') # logger = logging.getLogger("__35__") # logger.setLevel(logging.DEBUG) # Call Test Suites import unittest tests = { "all" : 'test*.py', "overlay" : '*Overlay.py', "assumptions": '*Assumptions.py', "machinery" : '*Machinery.py', "scaffold" : '*Scaffoled.py', "structure" : '*Structure.py'} test = 'structure' suite = unittest.TestLoader().discover('..',tests[test]) unittest.TextTestRunner(verbosity=1).run(suite) # 760 GB ~ 20 Million Pages # General Import # from overlay import * # Targeted Import # from overlay import tiers # Nested Import # from overlay.tiers import first # Staggered Import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import first # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # Log message # logger.debug("\n".join(["{:24} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__35__.py
__35__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # Constants DEBUG = True # System import os import sys import builtins # Types import types # Debugging if DEBUG : from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap, import_module import imp # Debugging import logging # Local Libraries try : from . import descriptors from . import utilities except SystemError: import descriptors import utilities # Constants modsep = '.' version = (0,0,0) class OverlayImporter(abc.MetaPathFinder, utilities.Indentation): # , abc.SourceLoader """ This class combines a Finder and a Loader into an Importer. .. inheritance-diagram:: apeman.__34__ :parts: 2 The strategy used maps overwrites the imported module with the overlay import under a different name Since Python imports are atomic one needs to trap modules being loaded and wrapped overlay.tiers is to be mapped to overlay._tiers_.py which is imported as tiers, while tiers, the original module is imported as _tiers_ .. note :: This is not an especially good implementation, it is not thread safe as it does not invoke module locks when loaded. """ # See section 5.5 in [1] to determine if the Path Based Finder # is a better fit for this class # # https://docs.python.org/3/reference/import.html root = descriptors.PathName() lom = [] def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, **kvps): super().__init__(*args, **kvps) # Importer Functionality self.mask = "_{}_" self.trap = {} self.wrap = {} self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() if DEBUG : self.log = logging.getLogger(__name__) if DEBUG : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Import Functionality builtins.__import__ = self self.imp = _import_ def __call__(self, name, *args, **kvps) : # (self, *args, *kvps): # Hooks the import statement # self.log.debug("importing : {}".format(name)) # self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) # if self.mapToTarget(name) in self.mods.keys() : # self.log.debug("Overloaded") # if name in self.lom : # self.log.debug("unmap : " + name) # return self.imp(name, *args) # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # self.log.debug(name) # self.log.debug(self.name + modsep + self.mapToTarget(name)) # self.log.debug("Wrapped Import") # self.lom.append(name) # return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args self.log.debug("import : {}".format(name)) return self.imp(name, *args, **kvps) def mapToTarget(self, name) : """Maps request to the overlay module""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : """ Lists the overlays implemented within a directory """ # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # def get_data() : # pass # def get_filename() : # pass # The 3.5 module should implement this. # # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) # def find_module(self, name, path=None): # # self.log.debug("Find_module") # self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # if self.mapToTarget(name) in self.mods : # User imports _PACKAGE_ # # self.log.debug(self.undent("F:Trap")) # self.trap[name] = self.mods.pop(self.mapToTarget(name)) # return self # if self.trap.pop(name) : # overlay imports PACKAGE # # self.log.debug(self.undent("F:Wrap")) # for meta in [meta for meta in sys.meta_path if meta is not self]: # self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) # return self # # if name in self.wrap : # overlay imports PACKAGE # # return self # return None # def load_module(self, name): # # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) # load = sys.modules.get(name) # if name in self.trap : # # One should strictly use SourceFileLoader here instead. # # self.log.debug(self.ondent("L:Trap")) # file = self.trap.get(name) # load = types.ModuleType(self.mapToTarget(name)) # with file.open('r') as data : code = data.read() # # self.log.debug([key for key in sys.modules.keys() if name in key]) # load.__file__ = str(file) # code = compile(code, str(file), 'exec') # sys.modules[name] = load # must occur before exec # exec(code, load.__dict__) # # self.log.debug([key for key in sys.modules.keys() if name in key]) # # self.log.debug(load.__version__) # if name in self.wrap : # # Note : importing PACKAGE as _PACKAGE_ fails. # # This is due to the to the `builtin` importers preventing # # name changes. To be explicit they can't find a funny # # named module and one can't cross assign the module. One # # can reassign it however # # self.log.debug(self.ondent("L:Wrap")) # spec = self.wrap.pop(name) # load = spec.load_module() # # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # # self.log.debug(self.undent("L:Done")) # return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger() # "__34__" logger.setLevel(logging.DEBUG) # Call Test Suites # import unittest # tests = { # "all" : 'test*.py', # "overlay" : '*Overlay.py', # "uppercase": '*UpperCase.py', # "tiers" : '*Tiers.py', # } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) # Atomic Imports import uppercase logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug("Primary") # import tiers # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug(tiers.__file__) # logger.debug(tiers.__version__) logger.debug("Secondary") from tiers import package_a logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) logger.debug(package_a.__version__) from tiers import module_a logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) logger.debug(module_a.ClassA()) logger.debug("\nTertiary") from tiers.package_a import module_b logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(module_b.ClassB()) # db() # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # else : # import builtins # def _import_(*args, importer = __import__) : # # Hooks the import statement # logger.debug("import : {}".format(args[0])) # temp = importer(*args) # # logger.debug(dir(temp)) # logger.debug([temp.__name__, temp.__file__, temp.__package__, temp.__loader__]) # return temp # builtins.__import__ = _import_
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__34__.py
__34__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # Flags DEBUG = True # Weak references import weakref # System import os import sys import builtins # Types import types # Debugging if DEBUG : from pdb import set_trace as db # Inspection import inspect # Iteration # from itertools import zip_longest as izip, tee # Deprecated # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap, import_module import imp # Local Libraries try : from .descriptors import PathName from .utilities import Indentation except (ModuleNotFoundError, ImportError) : # SystemError from descriptors import PathName from utilities import Indentation # Logging if DEBUG : import logging if DEBUG : log = logging.getLogger(__name__) # Constants modsep = '.' version = (0,0,0) class Import(Indentation): """ This class replaces the `builtins.import` function with itself. Bypassing the ModuleSpec and Finder/Loader or Importer mechanisms. .. note :: This is derived from the Python 3.4 implementation. """ root = PathName() lom = [] # _import_ = builtins.__import__ mask = "_{}_" def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, debug = DEBUG, **kvps): """ root : The folder from which ApeMan is invoked, this defaults to the folder containig the init file invoking ApeMan name : The package from which ApeMan is invoked, this defaults to the package invoking ApeMan, that is it maps to the folder containig the init file. path : Deprecated, this is no longer in use. debug : Deprecated, a flag to enable debugging, this will be done by configuration file in the future. """ super().__init__(*args, **kvps) # Logging and Debugging self.debug = debug if self.debug : self.log = logging.getLogger(__name__) if self.debug : self.log.debug("Initialized : Import") # if self.debug : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Attributes self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() # print(self.mods) # Builtins self._import_, builtins.__import__ = _import_, self # weakref.ref(self, self.__del__) def __call__(self, name, *args, **kvps) : # (self, *args, *kvps): """Substitutes builtins.__import__ when one invokes an import statement Arguments: self: The ApeMan instance name: The modules to be imported locals: The local scope invoking the import statement .e.g. module/function/class scope globals: The global scope invoking the import statement e.g. the module or the package scope (The __init__ file for packages) level: The relative level of the import. """ # Hooks the import statement # self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) if self.mapToTarget(name) in self.mods.keys() : # if self.debug : self.log.debug("Overloading : {}".format(name)) if name in self.lom : if self.debug : self.log.debug("Reverting : " + name) return self._import_(name, *args, **kvps) # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # self.log.debug(name) # self.log.debug(self.name + modsep + self.mapToTarget(name)) # self.log.debug("Wrapped Import") self.lom.append(name) if self.debug : self.log.debug("Redirecting : {}".format(self.name + modsep + self.mapToTarget(name))) return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args if self.debug : self.log.debug("Importing : {}".format(name)) return self._import_(name, *args, **kvps) def mapToTarget(self, name) : """Maps request to the overlay module""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : """Lists the overlays implemented within a directory """ # {'_docopt_' : WindowsPath('e:/python/apeman-overlays/overlays/_docopt_.py'), # '_pathlib_': WindowsPath('e:/python/apeman-overlays/overlays/_pathlib_.py')} # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} def __del__(self): """ The recommended method for removing an instance of ApeMan is to call __del__ on a reference ones retains for this purpose. :: apeman = ApeMan() apeman.__del__() If one has not retained a reference for this purpose then the following call may be used instead. :: ApeMan() builtins.__import__.__del__() """ # print("__del__") # self._substitute_() builtins.__import__ = self._import_ class OverlayImporter(abc.MetaPathFinder, Indentation): # , abc.SourceLoader """ This class combines a Finder and a Loader into an Importer. .. inheritance-diagram:: apeman.__34__ :parts: 2 The strategy used maps overwrites the imported module with the overlay import under a different name Since Python imports are atomic one needs to trap modules being loaded and wrapped overlay.tiers is to be mapped to overlay._tiers_.py which is imported as tiers, while tiers, the original module is imported as _tiers_ .. note :: This is not an especially good implementation, it is not thread safe as it does not invoke module locks when loaded. """ # See section 5.5 in [1] to determine if the Path Based Finder # is a better fit for this class # # https://docs.python.org/3/reference/import.html root = PathName() lom = [] def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, **kvps): if DEBUG : log.debug("OverlayImporter Initialized") super().__init__(*args, **kvps) # Importer Functionality self.mask = "_{}_" self.trap = {} self.wrap = {} self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() if DEBUG : self.log = logging.getLogger(__name__) if DEBUG : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Import Functionality # builtins.__import__ = self # self._import_ = _import_ def __call__(self, name, *args) : # (self, *args, *kvps): # Hooks the import statement # self.log.debug("importing : {}".format(name)) # self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) # if self.mapToTarget(name) in self.mods.keys() : # self.log.debug("Overloaded") # if name in self.lom : # self.log.debug("unmap : " + name) # return self._import_(name, *args) # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # self.log.debug(name) # self.log.debug(self.name + modsep + self.mapToTarget(name)) # self.log.debug("Wrapped Import") # self.lom.append(name) # return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args if DEBUG : log.debug("import called : {}".format(name)) return self._import_(name, *args) def mapToTarget(self, name) : """Maps request to the overlay module""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : """ Lists the overlays implemented within a directory """ # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # def get_data() : # pass # def get_filename() : # pass # The 3.5 module should implement this. # # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) # def find_module(self, name, path=None): # # self.log.debug("Find_module") # self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # if self.mapToTarget(name) in self.mods : # User imports _PACKAGE_ # # self.log.debug(self.undent("F:Trap")) # self.trap[name] = self.mods.pop(self.mapToTarget(name)) # return self # if self.trap.pop(name) : # overlay imports PACKAGE # # self.log.debug(self.undent("F:Wrap")) # for meta in [meta for meta in sys.meta_path if meta is not self]: # self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) # return self # # if name in self.wrap : # overlay imports PACKAGE # # return self # return None # def load_module(self, name): # # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) # load = sys.modules.get(name) # if name in self.trap : # # One should strictly use SourceFileLoader here instead. # # self.log.debug(self.ondent("L:Trap")) # file = self.trap.get(name) # load = types.ModuleType(self.mapToTarget(name)) # with file.open('r') as data : code = data.read() # # self.log.debug([key for key in sys.modules.keys() if name in key]) # load.__file__ = str(file) # code = compile(code, str(file), 'exec') # sys.modules[name] = load # must occur before exec # exec(code, load.__dict__) # # self.log.debug([key for key in sys.modules.keys() if name in key]) # # self.log.debug(load.__version__) # if name in self.wrap : # # Note : importing PACKAGE as _PACKAGE_ fails. # # This is due to the to the `builtin` importers preventing # # name changes. To be explicit they can't find a funny # # named module and one can't cross assign the module. One # # can reassign it however # # self.log.debug(self.ondent("L:Wrap")) # spec = self.wrap.pop(name) # load = spec.load_module() # # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # # self.log.debug(self.undent("L:Done")) # return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Logging logging.basicConfig(level=logging.DEBUG) # format = '%(message)s') # Testing import unittest # System # from pathlib import Path # Python Path(s) # mockup = str(Path(os.path.dirname(os.path.abspath(__file__))).joinpath('../mockups').resolve()) # sys.path.append(mockup) # Call Test Suites tests = { "all" : 'test*.py', "overlay" : '*Overlay.py', "assumptions": '*Assumptions.py', "machinery" : '*Machinery.py', "scaffold" : '*Scaffoled.py', "structure" : '*Structure.py'} # Single tes # test = 'structure' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) # Multiple tests test = ['structure'] suite = lambda test : unittest.TestLoader().discover('..',tests[test]) suites = unittest.TestSuite([suite(tst) for tst in test]) unittest.TextTestRunner(verbosity=1).run(suites) # External call # from pathlib import Path # path = str(Path(os.path.dirname(os.path.abspath(__file__))).joinpath('../mockups').resolve()) # sys.path.append(path) # from subprocess import Popen, PIPE # check_output as cmd, CalledProcessError # # code = \ # # """ # # import logging; logging.basicConfig(level=logging.INFO); logging.info(__name__) # # import uppercaseWithInit # # """ # # code = \ # # """ # # import logging; logging.basicConfig(level=logging.INFO); logging.info(__name__) # # from withInit_a.module_a import ClassA # # """ # code = \ # """ # import overlay # # import logging as log; log.basicConfig(level=log.DEBUG); log.info(__name__) # from module import ClassA # print(ClassA()) # """ # command = Popen([sys.executable,"-c",code], stdout=PIPE, stderr=PIPE, cwd = path) # output, error = command.communicate() # print("Process Messages") # print(output.decode()) # print("Error Messages") # print(error.decode()) # Atomic Imports # import uppercaseWithInit # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # # logger.debug("Primary") # # import tiers # # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # # logger.debug(tiers.__file__) # # logger.debug(tiers.__version__) # logger.debug("Secondary") # from tiers import package_a # logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(package_a.__version__) # from tiers import module_a # logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(module_a.ClassA()) # logger.debug("\nTertiary") # from tiers.package_a import module_b # logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # # logger.debug(module_b.ClassB()) # db() # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # else : # import builtins # def _import_(*args, importer = __import__) : # # Hooks the import statement # logger.debug("import : {}".format(args[0])) # temp = importer(*args) # # logger.debug(dir(temp)) # logger.debug([temp.__name__, temp.__file__, temp.__package__, temp.__loader__]) # return temp # builtins.__import__ = _import_ # Pass pass
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__36__.py
__36__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # System import os import sys # Types import types # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap import imp # Debugging import logging # Local Libraries try : from . import descriptors from . import utilities except SystemError: import descriptors import utilities # Constants modsep = '.' class OverlayImporter(abc.MetaPathFinder, abc.SourceLoader, utilities.Indentation): """ This class combines a Finder and a Loader into an Importer. .. inheritance-diagram:: apeman.__34__ :parts: 2 The strategy used maps overwrites the imported module with the overlay import under a different name Since Python imports are atomic one needs to trap modules being loaded and wrapped overlay.tiers is to be mapped to overlay._tiers_.py which is imported as tiers, while tiers, the original module is imported as _tiers_ .. note :: This is not an especially good implementation, it is not thread safe as it does not invoke module locks when loaded. """ # See section 5.5 in [1] to determine if the Path Based Finder # is a better fit for this class # # https://docs.python.org/3/reference/import.html root = descriptors.PathName() def __init__(self, *args, name = None, path = None, root = None, **kvps): super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = {} self.wrap = {} self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() self.log = logging.getLogger(__name__) self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) def mapToTarget(self, name) : """Maps request to the overlay module""" return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # The 3.5 module should implement this. # # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) def find_module(self, name, path=None): # self.log.debug("Find_module") self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) if self.mapToTarget(name) in self.mods : # User imports _PACKAGE_ # self.log.debug(self.undent("F:Trap")) self.trap[name] = self.mods.pop(self.mapToTarget(name)) return self if self.trap.pop(name) : # overlay imports PACKAGE # self.log.debug(self.undent("F:Wrap")) for meta in [meta for meta in sys.meta_path if meta is not self]: self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) return self # if name in self.wrap : # overlay imports PACKAGE # return self return None def load_module(self, name): # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) load = sys.modules.get(name) if name in self.trap : # One should strictly use SourceFileLoader here instead. # self.log.debug(self.ondent("L:Trap")) file = self.trap.get(name) load = types.ModuleType(self.mapToTarget(name)) with file.open('r') as data : code = data.read() # self.log.debug([key for key in sys.modules.keys() if name in key]) load.__file__ = str(file) code = compile(code, str(file), 'exec') sys.modules[name] = load # must occur before exec exec(code, load.__dict__) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(load.__version__) if name in self.wrap : # Note : importing PACKAGE as _PACKAGE_ fails. # This is due to the to the `builtin` importers preventing # name changes. To be explicit they can't find a funny # named module and one can't cross assign the module. One # can reassign it however # self.log.debug(self.ondent("L:Wrap")) spec = self.wrap.pop(name) load = spec.load_module() # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # self.log.debug(self.undent("L:Done")) return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger() # "__34__" logger.setLevel(logging.DEBUG) # Call Test Suites # import unittest # tests = { # "all" : 'test*.py', # "overlay" : '*Overlay.py', # "uppercase": '*UpperCase.py', # "tiers" : '*Tiers.py', # } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) import builtins def _import_(*args, importer = __import__) : # Hooks the import statement logger.debug("import : {}".format(args[0])) temp = importer(*args) # logger.debug(dir(temp)) logger.debug([temp.__name__, temp.__file__, temp.__package__, temp.__loader__]) return temp # Atomic Imports import uppercase builtins.__import__ = _import_ # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug("Primary") # import tiers # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug(tiers.__version__) logger.debug("Secondary") from tiers import module_a # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(module_a.__version__) # from tiers import package_a # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) logger.debug(package_a.__version__) # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__34__.replacement.py
__34__.replacement.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # System import os import sys # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery import imp # Debugging import logging # Constants modsep = '.' # def outer(name): # frame = inspect.stack()[1][0] # while name not in frame.f_locals: # frame = frame.f_back # if frame is None: # return None # return frame.f_locals[name] class OverlayImporter(abc.MetaPathFinder, abc.Loader): # PEP302 prescribes the use of two different classes, a Finder # and a Loader, that find and load modules respectively. Each # respectively provides a find_module and a load_module method. # These two classes can be combined into a unified Importer. # # FQMN - Fuly Qualified Module name overlay.tiers This is what the user imports i.e. the handler or wrapper # # FQON - Fuly Qualified Overlay name tiers This is what is installed i.e. the overlay # FQAN - Fuly Qualified Hidden/Abstracted name _tiers_ This is what should have been installed but is now covered up i.e. the original # #*FQSN - Fuly Qualified System name overlay._tiers_ This is what the user really imports # # FQPN - Fuly Qualified Path name overlay\\tiers This is the relative path name # FQFN - Fuly Qualified Path name overlay\\tiers\\.__init__.py This is the relative file name (e.g. for a File rather then a Path loader) # # * This entry is probably redundant or meant to be deprecated # # The strategy here is to map an import under a different name # # Given the following module to import # # overlay.tiers is to be mapped to overlay._tiers_.py which # is imported as tiers, while tiers, the original module is # imported as _tiers_ # # if __debug__ : # When Branching this into it's own class call it DebugLabels # This should be a standalone mixin, that is one should resist # the temptation to mixin logging. __indent__ = 0 __taglen__ = 18 def indent(self, label = None, char = " ", length = __taglen__): if label : message = "{0}{1:{2}}".format(char*self.__indent__, label[:length-self.__indent__], max(length-self.__indent__,1)) else : message = "" self.__indent__ += 1 return message def undent(self, label = None, char = " ", length = __taglen__): if label : message = "{0}{1:{2}}".format(char*self.__indent__, label[:length-self.__indent__],length-self.__indent__) else : message = "" self.__indent__ -= 1 return message def ondent(self, label = None, char = " ", length = __taglen__): return "{0}{1:{2}}".format(char*self.__indent__, label[:length-self.__indent__],length-self.__indent__) def __init__(self, *args, name = None, path = None, logger = logging.getLogger(__name__), **kvps): # When this code is executed the module importing layman.OverlayImporter # is already installed within sys.modules but the importer itself is not # yet registered within sys.meta_path, the latter happens after __init__ # It Ought to be possible to install the Importer in sys.Meta_Path from # here but this __init__ function but this might creating an undesired # side effect. It does however make for a convenient one liner : # # from layman import Overlay; OverlayImporter() # # versus the slightly longer # # import sys # from overlay import OverlayImporter # sys.meta_path.insert(0, OverlayImporter()) # # Note that in both cases the class must be instantiated. Later versions # of importlib seem to discourage this but I do not know why. super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = [] self.path = {} # print(dir(inspect.getmodule(self))) # import traceback # from pprint import pprint # pprint([item for item in inspect.stack()[1:]]) # pprint(outer('__package__')) # pprint(dir(inspect.stack()[1][0])) # print(builtin.__qualname__) # pprint(inspect.stack()[1][0].f_globals) # print(inspect.getmoduleinfo(traceback.extract_stack()[-2][0])) # print(inspect.getframeinfo(inspect.stack()[1][0])) # print(dir(inspect.stack()[1][3])) # print(inspect.stack()[1][3].__qualname__) self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = path or inspect.getmodule(inspect.stack()[1][0]).__path__ # Used to reference __path__ somehow self.log = logger self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Mapping def pathParts(self,name) : """Splits the FQMN into the part for the importr and the overlay""" root, stem = [], [] [root.append(part) if test else stem.append(part) for test, part in izip(self.name.split(modsep),name.split(modsep))] return modsep.join(root), modsep.join(stem) def mapToSource(self,name) : """Maps request to the corresponding overlay module""" # Consider the use of util.resolve_name(name, path) # Given overlay.tiers return overlay._tiers_ return modsep.join([part if test else self.mask.format(part) for test, part in izip(self.name.split(modsep),name.split(modsep))]) def mapTarget(self, name) : """Maps request to the overlay module""" # Deprecated use self.pathParts(self.mapToSource(FQMN)) # Given overlay.tiers return tiers # Older Code return self.mask.format(name) # Newer Code return modsep.join([part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test]) # return modsep.join(name.split(modsep)[1:]) def mapToHidden(self,name) : """Maps request to a corresponding hidden module""" # This must be run upon the output of mapToTarget # Given overlay.tiers or tiers return _tiers_ # Older Code # N/A # Newer Code # for trap in self.trap : # parts = trap.split(modsep) return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapSource(self, name) : """Deprecated : Mapped the Overlay back to the module""" # Older Code mask = self.mask.split("{}") return name[len(mask[0]):-len(mask[-1])] # Newer Code # N/A def mapToSystem(self, name): """Maps a module to the corresponding overlay system path""" # This finds the first folder or file matching the module name # note that name must be processed beforehand using self,mapToSource stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.root) path = None while next(test, None) : path = next(item, None) if not os.path.isdir(path) : # [ref:2] path += '.py' return path def mapToFile(self, name): """ Similar to mapToSystem but for packages it tries to map to __init__.py files""" # Note one used to pre-map the path FQMN using mapToSource, # this is done internally now # self.log.debug("MapToFile : {}".format(self.root)) stem = [part for test, part in izip(self.name.split(modsep),self.mapToSource(name).split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.path[name]) path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path def mapToRoot(self, name): """ Similar to mapToFile but from the Importers' Root Path""" # Note one used to pre-map the path FQMN using mapToSource, # this is done internally now # This is somewhat experimental stem = [part for test, part in izip(self.name.split(modsep),self.mapToSource(name).split(modsep)) if not test] self.log.debug("{}: {} {} {} ".format(self.ondent("MapToRoot"), stem, self.root, self.path[name])) test, item = tee(os.path.join(path, *stem) for path in self.root) # self.path[name] path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path def rename(self, name) : # - Deprecated # Currently this assumes the module is one level deep within # the package, that is the following structure is expected # # package\ The folder containing the __init__.py you are reading # _module_ The module you are patching renamed with underscores # return modsep.join([item if enum!=1 else "_{}_".format(item) for enum, item in enumerate(name.split(modsep))]) def overlays(self) : # - Deprecated # This is simply the list of modules that are patched under # this overlay. # pkg.util.walkpackages is apparently useful here modules = [os.path.splitext(item)[0] for item in os.listdir(self.root[0]) if os.path.splitext(item)[0] not in ["__init__","__pycache__"]] if self.trap : return [self.mapSource(os.path.splitext(item)[0]) for item in modules] else : return modules # def find_spec(self, name, path, target = None): # # One should use the module returned by find_module along # # with the function utils.spec_from_loader() to create a # # spec for the more modern API's. # # # # FQMN/name - name of the modules # # path - path entries for the module, that is the parent packages.__path__ attribute. # # target - previous module if the current one is being reloaded, none otherwise. # self.log.debug("{}: {} {}".format(self.ondent("Find Spec"), name, path, target)) # # spec = util.find_spec(name) # # self.log(spec) # self.loader=self # return self.find_module(name, path) # causes infinite recursion # # return None # def loader(self) : # return self # return self.load_module def find_module(self, name, path=None): # Deprecated use : # # Python > 3.3 use IMPORTLIB.UTIL.FIND_SPEC # Python = 3.3 use IMPORTLIB.FIND_LOADER # # path - List of File System Path # name - The FQMN # bits = name.split(modsep) self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # Overlay Layer if name.startswith(self.name) : # len(bits) > 1 and self.mapToHidden(bits[1]) in self.overlays(): # Note : the clamp on bit length is to ensure the importer rolls back to root to import patched modules. self.path[name] = path self.log.debug("{0:12}: {1:<40} {2:<80}".format(self.undent("Finder"), self.mapToSource(name),__file__)) return self # Standard Layer if bits[-1:] in self.trap : for meta in sys.meta_path : if meta is not self : self.temp = meta.find_module(name, path) if self.temp : self.log.debug("{}: {1:<40}".format(self.ondent("Trapper"), name)) return self self.undent() return None def load_module(self, name): # Deprecated replace with the classes in IMPORTLIB.MACHINERY # # If IMP.LOAD_MODULE was used with IMP.FIND_MODULE previously # then IMPORTLIB.IMPORT_MODULE is a better substitute. If not # then use the loader that pairs with the prior finder. That # is one of : # # IMPORTLIB.UTIL.FIND_SPEC <-> # IMPORTLIB.FIND_LOADER <-> # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) parent, _, module = name.partition(modsep) # Was rpartition if name in self.trap : # This might break # Handle Source Import self.trap.pop(name) self.log.debug(self.ondent("Pass Trapped")) temp = self.temp.load_module() sys.modules[self.mapTarget(name)] = temp self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # self.dent -= 1 return temp else : # Handle Overlay Import # if module not in self.overlays(): # Not Importable # raise ImportError("%s can only be used to import pytz!",self.__class__.__name__) # Inclde module name and possibly modules if module in sys.modules: # Already Imported return sys.modules[module] # Modules' absolute path # Import the module self.trap.append(module) # Python 3.2 API #self.log.debug("{}: {:40} {:40}".format(self.ondent("Loader"),self.name, name, module)) # Needs to be more useful #file, path, desc = imp.find_module(self.mapTarget(module), self.path) #try: # temp = imp.load_module(name, file, path, desc) #finally: # if file: # file.close() # Python 3.3 and 3.4 API - It's a bit messy right now file = self.mapToRoot(name) _name_ = self.mapToSource(name) root,stem = self.pathParts(self.mapToSource(name)) self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. sys.modules[name] = temp # Using sys.modules[module] = temp fails self.log.debug("{}< {}".format(self.undent("Imported"),temp)) return temp if __name__ == "__main__" : # This section is primarily intended for developers # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger("__34__") logger.setLevel(logging.DEBUG) # Call Test Suites import unittest tests = { "all" : 'test*.py', "overlay" : '*Overlay.py', "uppercase": '*UpperCase.py', "tiers" : '*Tiers.py', } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\help') sys.path.append(__root__) # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # module_a.Alpha()
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__34__.substitution.py
__34__.substitution.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # Constants DEBUG = True # Weak references import weakref # System import os import sys import builtins # Types import types # Debugging if DEBUG : from pdb import set_trace as db # Inspection import inspect # Iteration # from itertools import zip_longest as izip, tee # Deprecated # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap, import_module import imp from glob import glob # Local Libraries try : from .descriptors import PathName # Remove this dependency in favour of glob from .utilities import Indentation except (ModuleNotFoundError, ImportError) : # SystemError from apeman.descriptors import PathName # Remove this dependency in favour of glob from apeman.utilities import Indentation # Logging if DEBUG : import logging if DEBUG : log = logging.getLogger(__name__) # Constants modsep = '.' version = (0,0,0) class Import(Indentation): """ This class replaces the `builtins.import` function with itself. Bypassing the ModuleSpec and Finder/Loader or Importer mechanisms. .. note :: This is derived from the Python 3.4 implementation. .. note :: This class is setup as a singleton and replaces builtins.__import__ with an instance of itself """ _modules_ = [] modules = [] @property def sources(self): return {modsep.join(mod) : (name, root, src) for name, root, modules in self._modules_ for mod, src in modules} @property def targets(self): return {modsep.join(mod) : src for name, root, modules in self.overlays for mod, src in modules} # root = PathName() # lom = [] # The following converts ApeMan into a singleton, this is undesirable in most cases. # _self_ = None # def __new__(cls, *args, **kvps) : # if not isinstance(cls._self_, cls) : # cls._self_ = super().__new__(cls) # *args, **kvps) # This is about to cause trouble in the future # return cls._self_ def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, debug = DEBUG, **kvps): """ root : The folder from which ApeMan is invoked, this defaults to the folder containig the init file invoking ApeMan name : The package from which ApeMan is invoked, this defaults to the package invoking ApeMan, that is it maps to the folder containig the init file. path : Deprecated, this is no longer in use. debug : Deprecated, a flag to enable debugging, this will be done by configuration file in the future. """ super().__init__(*args, **kvps) # Properties self.mask = "_{}_" # Packages name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) mods = self.overlays(name, root) if mods : logging.debug(mods) self._modules_.append(mods) # print(self.sources) # Logging and Debugging self.debug = debug if self.debug : self.log = logging.getLogger(__name__) if self.debug : self.log.debug("Initialized : Import") # if self.debug : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Import Functionality # Ideally the code should call self._substitute_() self._import_ = _import_ builtins.__import__ = weakref.ref(self, self.__del__) # Originally weakref.proxy : https://eli.thegreenplace.net/2009/06/12/safely-using-destructors-in-python/ # Secondary implementation def __call__(self, name, globals=None, locals=None, fromlist=(), level=0, **kvps) : # (self, *args, *kvps): # name - The package to be imported e.g. A.B # globals - The global variables for the module making the import # locals - The local variables for the module making the import # from - Indicates if root module, A, or submodule, B, must be imported for `import A` or `from A import B` respectively # level - Relative level above module that imports must search for modules e.g. ..C -> level = 2 """The ApeMan implementation of/subsituting :meth:`builtins.__import__` This traps the call to import MODULE and redirects to import _MODULE_. _MODULE_ of course imports the original MODULE and the trap is redirected. """ # print("Import : {}".format(name)) # print(globals) # print(self.sources) # print(self.mapToTarget(name)) _module_ = next(((key, *val) for key, val in self.sources.items() if self.mapToTarget(name) == key), None) # module = next(((key, *val) for key, val in self.targets.items() if self.mapToTarget(name) == key), None) if _module_ and not globals["__name__"] == modsep.join([_module_[1], _module_[0]]): # Include : hasattr(globals, "__name__") _name_, overlay, root, path = _module_ if self.debug : self.log.debug("Substituting: {} -> {}".format(name, _name_)) # if self.debug : self.log.debug("Gloabls : {}".format(globals["__name__"])) # return self._import_(modsep.join([overlay, _name_]), globals=globals, locals=locals, fromlist=fromlist, level=level, **kvps) return import_module(modsep.join([overlay, self.mapToTarget(name)])) # This is a little black magic as we ignore the args if self.debug : self.log.debug("Importing : {}".format(name)) return self._import_(name, globals=globals, locals=locals, fromlist=fromlist, level=level, **kvps) # Original Implementation # def __call__(self, name, *args, **kvps) : # (self, *args, *kvps): # # name - The package to be imported e.g. A.B # # globals - The global variables for the module making the import # # locals - The local variables for the module making the import # # from - Indicates if root module, A, or submodule, B, must be imported for `import A` or `from A import B` respectively # # level - Relative level above module that imports must search for modules e.g. ..C -> level = 2 # """The ApeMan implementation of/subsituting :meth:`builtins.__import__` # # This traps the call to import MODULE and redirects to import _MODULE_. # _MODULE_ of course imports the original MODULE and the trap is redirected. # """ # # This works as follows : # # if MODULE in _MODULES_ : # # if MODULE in ListOfModules : # # return _MODULE_ as MODULE and register it # # else : # # return module # # else : # # return MODULE # # self.log.debug([{arg.get('__name__',None) or ".".join([arg["__module__"],arg["__qualname__"]]):arg.keys()} if isinstance(arg, dict) else arg for arg in args]) # Globals has __name__ and Locals has __qualname__ and __module__ # if self.mapToTarget(name) in self.mods.keys() : # self.log.debug("Mapping : {} -> {}".format(name, self.mapToTarget(name))) # self.log.debug([{arg['__name__']:[]} if isinstance(arg, dict) else arg for arg in args]) # if self.debug : self.log.debug("Overloading : {}".format(name)) # if name in self.lom : # if self.debug : self.log.debug("Importing : " + name) # return self._import_(name, *args, *kvps) # # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # # self.log.debug(name) # # self.log.debug(self.name + modsep + self.mapToTarget(name)) # # self.log.debug("Wrapped Import") # self.lom.append(name) # if self.debug : self.log.debug("Redirecting : {}".format(self.name + modsep + self.mapToTarget(name))) # return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args # if self.debug : self.log.debug("Importing : {}".format(name)) # return self._import_(name, *args, **kvps) def mapToTarget(self, name): """Maps request to the overlay module Given the name for a modules, `MODULE`, map this to an overlay, `_MODULE_`. Longer paths are handled somewhat simply at the moment e.g. `PACKAGE.MODULE` is simply ampped to `_PACKAGE_._MODULE_` versus say `PACKAGE._MODULE_`. """ # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def overlays(self, name, root) : """Lists the overlays implemented within a directory """ # This differs from overlays in that it recurses through the # folder structure to find python modules # # Note : It is more useful for this fucntion to return the overlay tuple then it is for it to append it directly to self._modules_ ext = '.py' # This version quashed the __init__ file somehow # mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] # This should really use os.path.splitext # lst = [(mod(os.path.relpath(file,root).split(os.path.sep), ext), file) for file in glob(os.path.join(root, '*' + ext), recursive = True)] mod = lambda root, path : tuple((os.path.splitext(os.path.relpath(path, root))[0]).split(os.path.sep)) lst = [(mod(root, file), file) for file in glob(os.path.join(root, '*' + ext), recursive = True)] # print([(os.path.relpath(file,root)) for file in glob(os.path.join(root, '*' + ext), recursive = True)]) # new = {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # print(self._modules_) self._modules_.append((name, root, lst)) # Root is a bit redundant # print((name, root, lst)) # print(self._modules_) # The code here stores the modules in a bit of a convoluted manner # self.mods = new # self.modules[(name,root)] # self.modules.update((name,root,modules,{})) # Be weary not to # def __repr__(self) : # return str(self.__class__, self.name, self.path) # def __str__(self) : # return str(self.__class__, self.name, self.path) def __del__(self): """ The recommended method for removing an instance of ApeMan is to call __del__ on a reference ones retains for this purpose. :: apeman = ApeMan() apeman.__del__() If one has not retained a reference for this purpose then the following call may be used instead. :: ApeMan() builtins.__import__.__del__() """ # print("__del__") # self._substitute_() builtins.__import__ = self._import_ def _substitute_(self, apply = None): """DEPRECATED : Substitute the current builtins.__import__ for self and vice versa. It is assumed that this is called twice, once by __init__ and once by __del__. It does not track staate properly and calling the function outside of these contexts should not really be done. There might be a better implementation, such as splitting it into two functions to either apply or revert the substitution. """ # [1] describes how a weak reference might be used to prevent a concrete reference to the class. # This allows for the deletion of the class from a concrete reference. # This must be considered if we make ApeMan into a singleton. # [1]https://stackoverflow.com/a/3014477 if apply is None : if builtins.__import__ == self : # Previously, When Import was a Singleton : builtins.__import__ == _import_ builtins.__import__ = self._import_ else : print("revert") builtins.__import__ = weakref.ref(self) # This was weakref.proxy : https://eli.thegreenplace.net/2009/06/12/safely-using-destructors-in-python/ # builtins.__import__ = self class OverlayImporter(abc.MetaPathFinder, Indentation): # , abc.SourceLoader """ This class combines a Finder and a Loader into an Importer. .. inheritance-diagram:: apeman.__34__ :parts: 2 The strategy used maps overwrites the imported module with the overlay import under a different name Since Python imports are atomic one needs to trap modules being loaded and wrapped overlay.tiers is to be mapped to overlay._tiers_.py which is imported as tiers, while tiers, the original module is imported as _tiers_ .. note :: This is not an especially good implementation, it is not thread safe as it does not invoke module locks when loaded. """ # See section 5.5 in [1] to determine if the Path Based Finder # is a better fit for this class # # https://docs.python.org/3/reference/import.html root = PathName() lom = [] def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, **kvps): """ name : The name of the module that ApeMan is invoked from. That is the __init__ file in the overlays' folder. root : The folder where ApeMan is invoked from That is the __init__ file in the overlays' folder. path : DEPRECATED, not sure of the original purpose, presumably the older code may have this documented otherwise one might have intended it be used to project that the root of the overlay was relative to the root of the invoking space. _import_ : This should not be set unless you are really trying to break things, it defaults to the builtin.__import__ """ if DEBUG : log.debug("OverlayImporter Initialized") super().__init__(*args, **kvps) # Importer Functionality self.mask = "_{}_" self.trap = {} self.wrap = {} self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() if DEBUG : self.log = logging.getLogger(__name__) if DEBUG : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Import Functionality # builtins.__import__ = self # self._import_ = _import_ def __call__(self, name, *args) : # (self, *args, *kvps): # Hooks the import statement # self.log.debug("importing : {}".format(name)) # self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) # if self.mapToTarget(name) in self.mods.keys() : # self.log.debug("Overloaded") # if name in self.lom : # self.log.debug("unmap : " + name) # return self._import_(name, *args) # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # self.log.debug(name) # self.log.debug(self.name + modsep + self.mapToTarget(name)) # self.log.debug("Wrapped Import") # self.lom.append(name) # return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args if DEBUG : log.debug("import called : {}".format(name)) return self._import_(name, *args) def mapToTarget(self, name) : """Maps request to the overlay module""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : """ Lists the overlays implemented within a directory """ # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # def get_data() : # pass # def get_filename() : # pass # The 3.5 module should implement this. # # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) # def find_module(self, name, path=None): # # self.log.debug("Find_module") # self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # if self.mapToTarget(name) in self.mods : # User imports _PACKAGE_ # # self.log.debug(self.undent("F:Trap")) # self.trap[name] = self.mods.pop(self.mapToTarget(name)) # return self # if self.trap.pop(name) : # overlay imports PACKAGE # # self.log.debug(self.undent("F:Wrap")) # for meta in [meta for meta in sys.meta_path if meta is not self]: # self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) # return self # # if name in self.wrap : # overlay imports PACKAGE # # return self # return None # def load_module(self, name): # # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) # load = sys.modules.get(name) # if name in self.trap : # # One should strictly use SourceFileLoader here instead. # # self.log.debug(self.ondent("L:Trap")) # file = self.trap.get(name) # load = types.ModuleType(self.mapToTarget(name)) # with file.open('r') as data : code = data.read() # # self.log.debug([key for key in sys.modules.keys() if name in key]) # load.__file__ = str(file) # code = compile(code, str(file), 'exec') # sys.modules[name] = load # must occur before exec # exec(code, load.__dict__) # # self.log.debug([key for key in sys.modules.keys() if name in key]) # # self.log.debug(load.__version__) # if name in self.wrap : # # Note : importing PACKAGE as _PACKAGE_ fails. # # This is due to the to the `builtin` importers preventing # # name changes. To be explicit they can't find a funny # # named module and one can't cross assign the module. One # # can reassign it however # # self.log.debug(self.ondent("L:Wrap")) # spec = self.wrap.pop(name) # load = spec.load_module() # # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # # self.log.debug(self.undent("L:Done")) # return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # External call # from pathlib import Path # path = str(Path(os.path.dirname(os.path.abspath(__file__))).joinpath('../mockups').resolve()) # sys.path.append(path) # from subprocess import Popen, PIPE # check_output as cmd, CalledProcessError # # code = \ # # """ # # import logging; logging.basicConfig(level=logging.INFO); logging.info(__name__) # # import uppercaseWithInit # # """ # # code = \ # # """ # # import logging; logging.basicConfig(level=logging.INFO); logging.info(__name__) # # from withInit_a.module_a import ClassA # # """ # code = \ # """ # import overlay # # import logging as log; log.basicConfig(level=log.DEBUG); log.info(__name__) # from module import ClassA # print(ClassA()) # """ # command = Popen([sys.executable,"-c",code], stdout=PIPE, stderr=PIPE, cwd = path) # output, error = command.communicate() # print("Process Messages") # print(output.decode()) # print("Error Messages") # print(error.decode()) # Logging logging.basicConfig(level=logging.DEBUG) # format = '%(message)s') # System from pathlib import Path # Python Path(s) __root__ = str(Path(os.path.dirname(os.path.abspath(__file__))).joinpath('../mockups').resolve()) sys.path.append(__root__) # Call Test Suites import unittest tests = { "all" : 'test*.py', "overlay" : '*Overlay.py', "assumptions": '*Assumptions.py', "machinery" : '*Machinery.py', "scaffold" : '*Scaffoled.py', "structure" : '*Structure.py'} test = 'structure' suite = unittest.TestLoader().discover('..',tests[test]) unittest.TextTestRunner(verbosity=1).run(suite) # Atomic Imports # import uppercaseWithInit # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # # logger.debug("Primary") # # import tiers # # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # # logger.debug(tiers.__file__) # # logger.debug(tiers.__version__) # logger.debug("Secondary") # from tiers import package_a # logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(package_a.__version__) # from tiers import module_a # logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(module_a.ClassA()) # logger.debug("\nTertiary") # from tiers.package_a import module_b # logger.debug([key for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # # logger.debug(module_b.ClassB()) # db() # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # else : # import builtins # def _import_(*args, importer = __import__) : # # Hooks the import statement # logger.debug("import : {}".format(args[0])) # temp = importer(*args) # # logger.debug(dir(temp)) # logger.debug([temp.__name__, temp.__file__, temp.__package__, temp.__loader__]) # return temp # builtins.__import__ = _import_ # Pass pass
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__36__.repeats.py
__36__.repeats.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # System import os import sys # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery import imp # Debugging import logging # Constants modsep = '.' class OverlayImporter(abc.MetaPathFinder, abc.Loader): # if __debug__ : dent = 0 def indent(self, label = None, char = " ", length = 12): message = "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) self.dent += 1 return message def undent(self, label = None, char = " ", length = 12): message = "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) self.dent -= 1 return message def ondent(self, label = None, char = " ", length = 12): return "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) def __init__(self, *args, name = None, path = None, logger = logging.getLogger(__name__), **kvps): super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = None self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.path = path or inspect.getmodule(inspect.stack()[1][0]).__path__ # Used to reference __path__ somehow self.log = logger self.log.debug("{:12}: {}".format(self.ondent("Instance"), self.__class__)) def mapTarget(self, name) : """Maps request to the overlay module""" # Given overlay.tiers return tiers # Older Code return self.mask.format(name) # Newer Code return modsep.join([part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test]) # return modsep.join(name.split(modsep)[1:]) def mapToHidden(self,name) : """Maps request to a corresponding hidden module""" # This must be run upon the output of mapToTarget # Given overlay.tiers or tiers return _tiers_ # Older Code # N/A # Newer Code # for trap in self.trap : # parts = trap.split(modsep) return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapSource(self, name) : """Deprecated : Mapped the Overlay back to the module""" # Older Code mask = self.mask.split("{}") return name[len(mask[0]):-len(mask[-1])] # Newer Code # N/A def mapToSystem(self, name): """Maps a module to the corresponding overlay system path""" # This finds the first folder or file matching the module name # note that name must be processed beforehand using self,mapToSource stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.path) path = None while next(test, None) : path = next(item, None) if not os.path.isdir(path) : # [ref:2] path += '.py' return path def mapToFile(self, name, path): """ Similar to mapToSystem but for packages it tries to map to __init__.py files""" # Note one must premap the path FQMN using mapToSource # self.log.debug("MapToFile : {}".format(self.path)) stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in path) path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path def rename(self, name) : # Currently this assumes the module is one level deep within # the package, that is the following structure is expected # # package\ The folder containing the __init__.py you are reading # _module_ The module you are patching renamed with underscores # return modsep.join([item if enum!=1 else "_{}_".format(item) for enum, item in enumerate(name.split(modsep))]) def overlays(self) : # This is simply the list of modules that are patched under # this overlay. # pkg.util.walkpackages is apparently useful here modules = [os.path.splitext(item)[0] for item in os.listdir(self.path[0]) if os.path.splitext(item)[0] not in ["__init__","__pycache__"]] if self.trap : return [self.mapSource(os.path.splitext(item)[0]) for item in modules] else : return modules # def find_spec(self, name, path, target = None): # # One should use the module returned by find_module along # # with the function utils.spec_from_loader() to create a # # spec for the more modern API's. # # # # FQMN/name - name of the modules # # path - path entries for the module, that is the parent packages.__path__ attribute. # # target - previous module if the current one is being reloaded, none otherwise. # self.log.debug("{:12}: {} {}".format(self.ondent("Find Spec"), name, path, target)) # # spec = util.find_spec(name) # # self.log(spec) # self.loader=self # return self.find_module(name, path) # causes infinite recursion # # return None # def loader(self) : # return self # return self.load_module def find_module(self, name, path=None): # Deprecated use : # # Python > 3.3 use IMPORTLIB.UTIL.FIND_SPEC # Python = 3.3 use IMPORTLIB.FIND_LOADER # bits = name.split(modsep) self.log.debug("{0:12}> {1:<40} {2:<80}".format(self.indent("Find Mods"),name, str(path))) if len(bits) > 1 and self.mapTarget(bits[-1]) in self.overlays(): # Note : the clamp on bit length is to ensure the importer rolls back to root to import patched modules. self.path = path self.log.debug(" "*self.dent + "Discovered : {0:<40} {1:<80}".format(name,__file__)) return self if bits[-1] == self.trap : for meta in sys.meta_path : if meta is not self : self.temp = meta.find_module(name, path) if self.temp : self.log.debug(" "*self.dent + "Discovered : {}".format(name)) return self return None def load_module(self, name): # Deprecated replace with the classes in IMPORTLIB.MACHINERY # # If IMP.LOAD_MODULE was used with IMP.FIND_MODULE previously # then IMPORTLIB.IMPORT_MODULE is a better substitute. If not # then use the loader that pairs with the prior finder. That # is one of : # # IMPORTLIB.UTIL.FIND_SPEC <-> # IMPORTLIB.FIND_LOADER <-> # # self.dent += 1 self.log.debug(" "*self.dent + "Importing > {}".format(name)) parent, _, module = name.rpartition(modsep) if self.trap : self.trap = None self.log.debug(" "*self.dent + "Pass Trapped") temp = self.temp.load_module() sys.modules[self.mapTarget(name)] = temp self.log.debug(" "*self.dent + "Imported < {}".format(self.mapTarget(name))) # self.dent -= 1 return temp else : self.log.debug(" "*self.dent + "Pass Through {}".format(module)) # if module not in self.overlays(): # Not Importable # raise ImportError("%s can only be used to import pytz!",self.__class__.__name__) # Inclde module name and possibly modules if name in sys.modules: # Already Imported return sys.modules[name] # Modules' absolute path self.trap = module file, path, desc = imp.find_module(self.mapTarget(module), self.path) # NB !!! This was psuedo try: temp = imp.load_module(name, file, path, desc) finally: if file: file.close() sys.modules[module] = temp self.log.debug("{:10} < {}".format(self.undent("Imported"),module)) return temp if __name__ == "__main__" : # print("Main") import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger("__34__") logger.setLevel(logging.DEBUG) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) # General Import # from overlay import * # Targeted Import # from overlay import tiers # Nested Import # from overlay.tiers import first # Staggered Import from uppercase import tiers logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) from tiers import module_a logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{:24} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__34__.original.py
__34__.original.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # System import os import sys from pathlib import Path # Types import types # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap import imp # Debugging import logging # Descriptors from .descriptors import FileName, PathName, RootName # Utilities from .utilities import Indentation, sliceLists as stack # Constants modsep = '.' version = (0,0,0) class OverlayFinder(object) : # set_data = SourceFileLoader.set_data # _cache_bytecode = SourceFileLoader._cache_bytecode pass # class OverlayLoader(external.SourceLoader) : # # set_data = SourceFileLoader.set_data # # _cache_bytecode = SourceFileLoader._cache_bytecode # __init__ = external.FileLoader.__init__ # __eq__ = external.FileLoader.__eq__ # __hash__ = external.FileLoader.__hash__ # load_module = external.FileLoader.load_module # get_filename = external.FileLoader.get_filename # get_Data = external.FileLoader.get_data # class OverlayLoader(machinery.SourceFileLoader): # def __init__(self, *args, **kvps): # super().__init__(*args, **kvps) # self.log = logging.getLogger(__name__) # def load_module(self, name, *args, **kvps): # # Supposedly overwriting this method, in a subclass, should # # void the @_name_check decorator active upon the method # try : # super(OverlayLoader, self).load_module(name, *args, **kvps) # except ImportError as er : # self.log.debug(sys.modules) # temp = bootstrap._load_module_shim(self, name) # self.log.debug(sys.modules) # return temp class OverlayImporter(abc.MetaPathFinder, abc.Loader): # This class combines a Finder and a Loader into a unified Importer. # # FQMN - Fuly Qualified Module name overlay.tiers This is what the user imports i.e. the handler or wrapper # # FQON - Fuly Qualified Overlay name tiers This is what is installed i.e. the overlay # FQAN - Fuly Qualified Hidden/Abstracted name _tiers_ This is what should have been installed but is now covered up i.e. the original # #*FQSN - Fuly Qualified System name overlay._tiers_ This is what the user really imports # # FQPN - Fuly Qualified Path name overlay\\tiers This is the relative path name # FQFN - Fuly Qualified Path name overlay\\tiers\\.__init__.py This is the relative file name (e.g. for a File rather then a Path loader) # # * This entry is probably redundant or meant to be deprecated # # The strategy here is to map an import under a different name # Since Python imports are atomic one needs to trap modules being loaded and wrapped # # overlay.tiers is to be mapped to overlay._tiers_.py which # is imported as tiers, while tiers, the original module is # imported as _tiers_ # # Indentation Constants __indent__ = 0 __taglen__ = 18 # Main Class Implementation root = PathName() def __init__(self, *args, name = None, path = None, root = None, **kvps): super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = {} self.mods = self.modules() self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.log = logging.getLogger(__name__) self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) def mapToTarget(self, name) : """Maps request to the overlay module""" return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) def find_module(self, name, path=None): self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # Overlay Layer temp = self.mods.get(self.mapToTarget(name)) if temp : if name in self.trap : # overlay imports PACKAGE self.log.debug(self.ondent("Wrap")) for meta in [meta for meta in sys.meta_path if meta is not self]: self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) return self else : # User imports _PACKAGE_ self.log.debug(self.ondent("Trap")) self.trap[name] = temp return self return None def load_module(self, name): self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) load = sys.modules.get(name) if load is None : if name in self.wrap : # Note : importing PACKAGE as _PACKAGE_ fails. # This is due to the to the built in importers preventing # name changes. To be explicit they can't find a funny # named module and one can't cross assign the module. One # can reassign it however module = self.wrap[name] load = module.load_module() if name in self.trap : file = self.modules()[self.mapToTarget(name)] load = types.ModuleType(self.mapToTarget(name)) with file.open('r') as data : code = data.read() self.log.debug([key for key in sys.modules.keys() if name in key]) load.__file__ = str(file) code = compile(code, str(file), 'exec') exec(code, load.__dict__) sys.modules[name] = load self.log.debug([key for key in sys.modules.keys() if name in key]) self.log.debug(load.__version__) return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger() # "__34__" logger.setLevel(logging.DEBUG) # Call Test Suites # import unittest # tests = { # "all" : 'test*.py', # "overlay" : '*Overlay.py', # "uppercase": '*UpperCase.py', # "tiers" : '*Tiers.py', # } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) # Atomic Imports import uppercase # print([key for key in sys.modules.keys() if 'tiers' in key]) import tiers import tiers # print([key for key in sys.modules.keys() if 'tiers' in key]) print(tiers.__version__) # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__33__.py
__33__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function # System import sys # Logging # import logging # log = logging.getLogger(__name__) import logging; log = logging.getLogger(__name__) # if __name__ == "__main__": # log.basicConfig(level = log.DEBUG) # Previously this simply imported apeman.py # :: # from .apeman import OverlayImporter # # apeman.py would then perform this redirection. # To make this work with Python 2.7 the selection code was moved here. # This largely deprecated the apeman.py file. # Later incarnations should provide branches in the Git reposotiry for each version of Python and supply an apeman.py specifci to that version. # if __package__ : # Relative imports for normal usage log.debug("Package (Relative) : " + str(__package__)) if sys.version_info[:2] == (3,6) : log.debug("Apeman variant : Python 3.6") from .__36__ import OverlayImporter, Import, version ApeMan = Import # OverlayImporter # if sys.version_info[:2] == (3,5) : log.debug("Apeman variant : Python 3.5") from .__35__ import OverlayImporter, Import, version ApeMan = OverlayImporter # ApeMan = Import if sys.version_info[:2] == (3,4) : log.debug("Apeman variant : Python 3.4") from .__34__ import OverlayImporter, version ApeMan = OverlayImporter if sys.version_info[:2] == (2,7) : log.debug("Apeman variant : Python 2.7") from .__27__ import OverlayImporter, Import, PingPong, version ApeMan = OverlayImporter else : # Absolute imports prevent "SystemError : Parent module '' not loaded,..." log.debug("Package (Absolute) : " + str(__package__)) if sys.version_info[:2] == (3,6) : log.debug("Apeman variant : Python 3.6") from __36__ import OverlayImporter, Import, version ApeMan = Import if sys.version_info[:2] == (3,5) : log.debug("Apeman variant : Python 3.5") from __35__ import OverlayImporter, Import, version # ApeMan = OverlayImporter ApeMan = Import if sys.version_info[:2] == (3,4) : log.debug("Apeman variant : Python 3.4") from __34__ import OverlayImporter, version ApeMan = OverlayImporter if sys.version_info[:2] == (2,7) : log.debug("Apeman variant : Python 2.7") from apeman.__27__ import OverlayImporter, Import, PingPong, version ApeMan = Import # Originally : OverlayImporter
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__init__.py
__init__.py
# Builtins import __builtin__ # System import os import sys # Imports import imp # Types import types # Inspection import inspect # Paths from pathlib import Path # Iteration from itertools import izip_longest as izip, tee # Descriptors from descriptors import FileName, PathName, RootName # Utilities from utils import Indentation # Logging import logging # Debugging # from pdb import set_trace as db # Constants modsep = '.' class Import(Indentation): """Substitutes :attr:`__builtins__.__import__` with itself to load patched versions of other modules :class:`Import` substitutes the :attr:`__builtins__.__import__` function with itself. This allows it to intercept later imports substituing the source module, containing the original code, with the target module, the patch for the source module. The target module necessarily imports the source module a second time; forcing :class:`Import` to either track the imports it's seen or inspect the origin requesting the import. """ root = PathName() def __init__(self, *args, **kvps): # Originally : def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, debug = DEBUG, **kvps): """Instantiates ApeMan assigning it as a default importer in :attr:`_builtin__.__import__` root : The folder from which ApeMan is invoked, this defaults to the folder containig the init file invoking ApeMan name : The package from which ApeMan is invoked, this defaults to the package invoking ApeMan, that is it maps to the folder containig the init file. debug : Flag enabling debugging, hopefully this will be done by configuration file in the future. """ # Python 2.7 Compatability # kvps['path'] if 'path' in kvps # Original code super(Import, self).__init__(*args, **kvps) # Properties self.mask = "_{}_" self.debug = kvps['debug'] if 'debug' in kvps else False if self.debug : self.log = logging.getLogger(__name__) if self.debug : self.log.debug("Initialized : Import") # self.name = kvps['scope'] if 'name' in kvps else inspect.getmodule(inspect.stack()[1][0]).__name__ self.name = kvps['name'] if 'name' in kvps else inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = kvps['root'] if 'root' in kvps else os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() # Logging and Debugging # if self.debug : self.log.debug("Modules : {}".format(str(self.mods.keys()))) # if self.debug : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Import Functionality self._import_ = __builtin__.__import__ # kvps['_import_'] if '_import_' in kvps else __builtin__.__import__ = self # weakref.ref(self, self.__del__) # sys.path.append(self.root) def __call__(self, name, *args, **kvps) : # (self, *args, *kvps): """ :class:`Import` needs to track whether the source, the original package, or the target, a patch in the overlay, is being loaded. This may be done statelessly by comparing the module argument, that is the module being imported, to the :attr:`__package__` attribute, the origin, of the globals argument. This may also be done statefully by counting the number times an import has been performed, which seems this seems flaky, or by tracking which import has been performed, which seems more concrete. Should the origin fall within the overly then the original module is loaded otherwise return the patch. Note that a loose check upon the origin allows for substructures within the overlay, strict checks prevent this. In Python 2.7 :meth:`__builtins__.__import__` loads a module in two stages. The first, :meth:`imp.find_module`, searches ones system for the given module, returning a tuple containing the components to load the module. This tuple, referred to as a :class:`ModuleSpec` from Python 3.4 onwards, is passed to the second function :meth:`imp.load_module` which performs the actual loading. :: spc = imp.find_module("PACKAGE.MODULE", [str(Path.cwd()/"OVERLAY")]) mod = imp.load_module("PACKAGE.MODULE", *spc) The job of :meth:`Import.__call__` is to redirect the initial import for a matching patch to the patched target. The patch invariably triggers a second import which :meth:`Import.__call__` should direct to the original unptched source. The call signature for __import__ is as follows name The module name that is to be imported globals The globals for the calling modules scope locals The locals for the calling modules scope fromlist The list of items to be imported from the module level The relative level from the calling modules scope that the import is to be made from e.g. module -> level = 0, .module -> level = 1, ..module -> level = 2. """ # if self.debug : self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) # Ping if (name,self.mapToTarget(name)) in self.mods.keys() : if self.debug : self.log.debug("Diverting : " + name + " -> " + self.mapToTarget(name)) # if self.debug : self.log.debug("Diverting : {}".format(self.name + modsep + self.mapToTarget(name))) spc = imp.find_module(self.mapToTarget(name), [str(self.root)]) if self.debug : self.log.debug(spc) mod = imp.load_module(name, *spc) if self.debug : self.log.debug(mod) return mod # Pong if (self.mapToSource(name),name) in self.mods.keys() : if self.debug : self.log.debug("Reverting : " + self.mapToSource(name) + " <- " + name) # return self._import_(self.mapToSource(name), *args, **kvps) spc = imp.find_module(self.mapToSource(name)) if self.debug : self.log.debug(spc) mod = imp.load_module(name, *spc) if self.debug : self.log.debug(mod) return mod # Pass through/Catch all # if self.debug : self.log.debug("Returning : {}".format(name)) return self._import_(name, *args, **kvps) def mapToTarget(self, name) : """Maps request to the target module, that is the patch in the overlay""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapToSource(self,name) : """Maps request to the source module, that is the original module""" # Converts _tiers_._package_._module_ to tiers.package.module try : mask = self.mask.split("{}") source = lambda text : text[(text[:len(mask[0])]==mask[0])*len(mask[0]):len(text)-len(mask[-1])*(text[-len(mask[-1]):]==mask[-1])] # Originally : source = lambda name : name[len(mask[0]):-len(mask[-1])] return modsep.join([source(part) for part in name.split(modsep)]) except AttributeError , error : return "" def modules(self) : """Lists the overlays implemented within a directory This method returns a structure mapping the module name to the patches within the overlay. Since :class:`Import` needs to track whether or not it is loading the target patch or the source module, one had though that a bidirectional mapping would fit the need. `S. Lott`_ explains that this is more of a design error then it is a solution. After some thought one realised that the source and target names could be combined into a single key for a dict and that this would suffice for the structure. State may then be managed by popping the item, :code:`(source, target)`, from the dict and appending it under the reverse key, :code:`(target, source)`. :: 1) Setup mods = {(source, target): path, ...} 2) Given : source path = mods[(mapToTarget(source),source)] = mods.pop((source, mapToTarget(source))) 3) Given : target path = mods.pop((target, mapToSource(target))) .. _S. Lott: https://stackoverflow.com/a/1456482/958580 """ # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] # self.log.debug(lst) lst = {(self.mapToSource(modsep.join(item[0][:-1])),modsep.join(item[0][:-1])) if item[0][-1] == "__init__" else (self.mapToSource(modsep.join(item[0])),modsep.join(item[0])) : item[1] for item in lst} # Originally : lst = {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} if self.debug : self.log.debug("Overlays : {}".format(str(lst.keys()))) return lst def __del__(self): """ This removes ApeMan from :attr:`__builtin__.__import__`, restoring the original import implementation that was in place when ApeMan was instantiated. """ __builtin__.__import__ = self._import_ if __name__ == "__main__" : # This simply invokes a demonstration and not any unit tests. import subprocess as su su.check_output("C:\\Python\\64bit\\2714\\python.exe example27.py", cwd = "e:\\python\\apeman\\mockup") # shell = True, stderr=su.STDOUT)
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__27__/pingpong.py
pingpong.py
# Python 2.7 Compatability # from __future__ import absolute_import # from __future__ import division # from __future__ import print_function # from __future__ import unicode_literals # from builtins import super # from builtins import range # from builtins import str # from future import standard_library # standard_library.install_aliases() # # from builtins import * # import builtins import __builtin__ # System import os import sys # Imports import imp # Types import types # Inspection import inspect # Paths from pathlib import Path # Iteration from itertools import izip_longest as izip, tee # Descriptors from descriptors import FileName, PathName, RootName # Utilities from utils import Indentation # Logging import logging # Debugging # from pdb import set_trace as db # Constants modsep = '.' version = (0,0,0) class Import(Indentation): """Substitutes :attr:`__builtins__.__import__` with itself to load patched versions of other modules :class:`Import` substitutes the :attr:`__builtins__.__import__` function with itself. This allows it to intercept later imports substituing the source module, containing the original code, with the target module, the patch for the source module. The target module necessarily imports the source module a second time; forcing :class:`Import` to either track the imports it's seen or inspect the origin requesting the import. """ root = PathName() lom = [] mask = "_{}_" def __init__(self, *args, **kvps): # Originally : def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, debug = DEBUG, **kvps): """ root : The folder from which ApeMan is invoked, this defaults to the folder containig the init file invoking ApeMan name : The package from which ApeMan is invoked, this defaults to the package invoking ApeMan, that is it maps to the folder containig the init file. debug : Flag enabling debugging, hopefully this will be done by configuration file in the future. """ # Python 2.7 Compatability # kvps['path'] if 'path' in kvps # Original code super(Import, self).__init__(*args, **kvps) # Logging and Debugging self.debug = kvps['debug'] if 'debug' in kvps else True if self.debug : self.log = logging.getLogger(__name__) if self.debug : self.log.debug("Initialized : Import") # Attributes # self.name = kvps['scope'] if 'name' in kvps else inspect.getmodule(inspect.stack()[1][0]).__name__ self.name = kvps['name'] if 'name' in kvps else inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = kvps['root'] if 'root' in kvps else os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() if self.debug : self.log.debug("Modules : {}".format(str(sorted(self.mods.keys())))) # if self.debug : self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Builtins self._import_ = __builtin__.__import__ # kvps['_import_'] if '_import_' in kvps else __builtin__.__import__ = self # weakref.ref(self, self.__del__) # sys.path.append(self.root) def __call__(self, name, *args, **kvps) : # (self, *args, *kvps): """ In Python 2.7 :meth:`__builtins__.__import__` loads a module in two stages. The first, :meth:`imp.find_module`, searches ones system for the given module, returning a tuple containing the components to load the module. This tuple, referred to as a :class:`ModuleSpec` from Python 3.4 onwards, is passed to the second function :meth:`imp.load_module` which performs the actual loading. :: spc = imp.find_module("PACKAGE.MODULE", [str(Path.cwd()/"OVERLAY")]) mod = imp.load_module("PACKAGE.MODULE", *spc) The job of :meth:`Import.__call__` is to redirect the initial import for a matching patch to the patched target. The patch invariably triggers a second import which :meth:`Import.__call__` should direct to the original unptched source. The call signature for __import__ is as follows name The module name that is to be imported globals The globals for the calling modules scope locals The locals for the calling modules scope fromlist The list of items to be imported from the module level The relative level from the calling modules scope that the import is to be made from e.g. module -> level = 0, .module -> level = 1, ..module -> level = 2. """ # Pythons `MOTW`_ describes how the path containing the module should be the one one searches for a module hence the ugly line # :: # spc = imp.find_module(self.mapToTarget(name).split(modsep)[-1] if self.mods[self.mapToTarget(name)].stem != "__init__" else "__init__", [str(self.mods[self.mapToTarget(name)].parent)]) # [str(self.root)]) # # .. _MOTW: https://pymotw.com/2/imp/#finding-modules # Hooks the import statement # self.log.debug([{arg['__name__']:arg.keys()} if isinstance(arg, dict) else arg for arg in args]) if self.mapToTarget(name) in self.mods.keys() : # if self.debug : self.log.debug("Overloading : {}".format(name)) if name in self.lom : if self.debug : self.log.debug("Reverting : " + name) return self._import_(name, *args, **kvps) # self.log.debug("remap : " + name + " -> "+ self.name + "." +self.mapToTarget(name)) # self.log.debug(name) # self.log.debug(self.name + modsep + self.mapToTarget(name)) # self.log.debug("Wrapped Import") self.lom.append(name) if self.debug : self.log.debug("Diverting : {}".format(self.name + modsep + self.mapToTarget(name))) # return import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args if self.debug : self.log.debug(self.mapToTarget(name) + " >>---> " + str(self.mods[self.mapToTarget(name)].parent)) spc = imp.find_module(self.mapToTarget(name).split(modsep)[-1] if self.mods[self.mapToTarget(name)].stem != "__init__" else "__init__", [str(self.mods[self.mapToTarget(name)].parent)]) # [str(self.root)]) if self.debug : self.log.debug(spc) mod = imp.load_module(self.mapToTarget(name), *spc) if self.debug : self.log.debug(mod) return mod # if self.debug : self.log.debug("Importing : {}".format(name)) return self._import_(name, *args, **kvps) def mapToTarget(self, name) : """Maps request to the target module, that is the patch in the overlay""" # Converts tiers.package.module to _tiers_._package_._module_ return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapToSource(self,name) : """Maps request to the source module, that is the original module""" # Converts _tiers_._package_._module_ to tiers.package.module # try : mask = self.mask.split("{}") source = lambda text : text[(text[:len(mask[0])]==mask[0])*len(mask[0]):len(text)-len(mask[-1])*(text[-len(mask[-1]):]==mask[-1])] # Originally : source = lambda name : name[len(mask[0]):-len(mask[-1])] return modsep.join([source(part) for part in name.split(modsep)]) # except AttributeError as error : # return "" def modules(self) : """Lists the overlays implemented within a directory """ # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # self.log.debug(lst) # lst = {(self.mapToSource(modsep.join(item[0][:-1])),modsep.join(item[0][:-1])) if item[0][-1] == "__init__" else (self.mapToSource(modsep.join(item[0])),modsep.join(item[0])) : item[1] for item in lst} # # Originally : lst = {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # if self.debug : self.log.debug("Overlays : {}".format(str(lst.keys()))) # return lst def __del__(self): """ The recommended method for removing an instance of ApeMan is to call __del__ on a reference ones retains for this purpose. :: apeman = ApeMan() apeman.__del__() If one has not retained a reference for this purpose then the following call may be used instead. :: ApeMan() __builtins__['__import__'].__del__() """ # __builtins__['__import__'] = self._import_ __builtin__.__import__ = self._import_ class OverlayFinder(Indentation, object) : # set_data = SourceFileLoader.set_data # _cache_bytecode = SourceFileLoader._cache_bytecode pass # class OverlayLoader(external.SourceLoader) : # # set_data = SourceFileLoader.set_data # # _cache_bytecode = SourceFileLoader._cache_bytecode # __init__ = external.FileLoader.__init__ # __eq__ = external.FileLoader.__eq__ # __hash__ = external.FileLoader.__hash__ # load_module = external.FileLoader.load_module # get_filename = external.FileLoader.get_filename # get_Data = external.FileLoader.get_data # class OverlayLoader(machinery.SourceFileLoader): # def __init__(self, *args, **kvps): # super().__init__(*args, **kvps) # self.log = logging.getLogger(__name__) # def load_module(self, name, *args, **kvps): # # Supposedly overwriting this method, in a subclass, should # # void the @_name_check decorator active upon the method # try : # super(OverlayLoader, self).load_module(name, *args, **kvps) # except ImportError as er : # self.log.debug(sys.modules) # temp = bootstrap._load_module_shim(self, name) # self.log.debug(sys.modules) # return temp class OverlayImporter(object) : # (abc.MetaPathFinder, abc.Loader): # This class combines a Finder and a Loader into a unified Importer. # # FQMN - Fuly Qualified Module name overlay.tiers This is what the user imports i.e. the handler or wrapper # # FQON - Fuly Qualified Overlay name tiers This is what is installed i.e. the overlay # FQAN - Fuly Qualified Hidden/Abstracted name _tiers_ This is what should have been installed but is now covered up i.e. the original # #*FQSN - Fuly Qualified System name overlay._tiers_ This is what the user really imports # # FQPN - Fuly Qualified Path name overlay\\tiers This is the relative path name # FQFN - Fuly Qualified Path name overlay\\tiers\\.__init__.py This is the relative file name (e.g. for a File rather then a Path loader) # # * This entry is probably redundant or meant to be deprecated # # The strategy here is to map an import under a different name # Since Python imports are atomic one needs to trap modules being loaded and wrapped # # overlay.tiers is to be mapped to overlay._tiers_.py which # is imported as tiers, while tiers, the original module is # imported as _tiers_ # # Indentation Constants __indent__ = 0 __taglen__ = 18 # Main Class Implementation # root = RootName() # Descriptors not supported in Python 2.7 I think def __init__(self, *args, **kvps): kvps['name'] = kvps.get('name', inspect.getmodule(inspect.stack()[1][0]).__name__) kvps['root'] = kvps.get('root', Path(os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__))) kvps['path'] = kvps.get('path', None) # I'm not sure this is relevant super(OverlayImporter, self).__init__() # *args, **kvps) self.mask = "_{}_" self.trap = {} self.name = kvps['name'] or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = kvps['root'].parent if kvps['root'].is_file() else kvps['root'] # Python 3 : kvps['root'] or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() self.log = logging.getLogger(__name__) # self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) def mapToTarget(self, name) : """Maps request to the overlay module""" return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} def find_module(self, name, path=None): # self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # Overlay Layer temp = self.mods.get(self.mapToTarget(name)) if temp : if name in self.trap : # overlay imports PACKAGE # self.log.debug(self.ondent("Wrap")) for meta in [meta for meta in sys.meta_path if meta is not self]: self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) return self else : # User imports _PACKAGE_ # self.log.debug(self.ondent("Trap")) self.trap[name] = temp return self return None def load_module(self, name): # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) load = sys.modules.get(name) if load is None : if name in self.wrap : # Note : importing PACKAGE as _PACKAGE_ fails. # This is due to the to the built in importers preventing # name changes. To be explicit they can't find a funny # named module and one can't cross assign the module. One # can reassign it however module = self.wrap[name] load = module.load_module() if name in self.trap : file = self.modules()[self.mapToTarget(name)] load = types.ModuleType(self.mapToTarget(name)) with file.open('r') as data : code = data.read() # self.log.debug([key for key in sys.modules.keys() if name in key]) load.__file__ = str(file) code = compile(code, str(file), 'exec') exec(code, load.__dict__) sys.modules[name] = load # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(load.__version__) return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Finders # ======= # This simply ensures that one can find a module, whether it is a patch or not. # name = "_module_" # path = "e:\\python\\apeman\\mockup\\explicitImport\\_explicit_" # spc = imp.find_module(name, [path]) # mask = "_{}_" # mapToTarget = lambda name : modsep.join([mask.format(part) for part in name.split(modsep)]) # path = Path.cwd().parents[1]/"mockup"/"overlay" # spc = imp.find_module(mapToTarget(name), [str(path)]) # spc = imp.find_module("_module_", ["E:\\Python\\apeman\\mockup\\overlay"]) # print(spc) # Demonstration # ============= # This invokes ApeMan through the example.py file in the mockup directory. # The example loads an overlay and should import a patch, overlay/_module_.py, over the original module, module.py. import subprocess as su # Example 27 # ========== # This uses the ping pong importer over the usual one provided by ApeMan. # su.check_output("C:\\Python\\64bit\\2714\\python.exe example27.py", cwd = "e:\\python\\apeman\\mockup") # shell = True, stderr=su.STDOUT) # Example 27 # ========== # This uses the ping pong importer over the usual one provided by ApeMan. su.check_output("C:\\Python\\64bit\\2714\\python.exe explicitImport.py", cwd = "e:\\python\\apeman\\mockup") # shell = True, stderr=su.STDOUT) # # Compatability # import six # # Logging # logging.basicConfig(level=logging.DEBUG) # format = '%(message)s') # # Testing # import unittest2 as unittest # # Test Selection # test = ['explicit','implicit','versions'] # # Test Configuration # tests = { # "all" : 'test*.py', # "overlay" : '*Overlay.py', # "assumptions": '*Assumptions.py', # "machinery" : '*Machinery.py', # "explicit" : '*Explicit.py', # "implicit" : '*Implicit.py', # "versions" : '*Init.py', # "structure" : '*Structure.py'} # test = test or ['all'] # suite = lambda test : unittest.TestLoader().discover('../..',tests[test]) # suites = unittest.TestSuite([suite(tst) for tst in test]) # unittest.TextTestRunner(verbosity=1).run(suites) # # Setup Logging # import logging # logging.basicConfig(format = '%(message)s') # logger = logging.getLogger(__name__) # "__34__" # logger.setLevel(logging.DEBUG) # # # Call Test Suites # import six # import unittest # tests = { # "all" : 'test*.py', # # "overlay" : '*Overlay.py', # # "uppercase": '*UpperCase.py', # # "tiers" : '*Tiers.py', # "explicit" : '*Explicit.py', # "implicit" : '*Implicit.py', # } # # Single tests # # test = 'explicit' # # suite = unittest.TestLoader().discover('..',tests[test]) # # unittest.TextTestRunner(verbosity=1).run(suite) # # Multiple tests # test = ['explicit', 'implicit'] # suite = lambda test : unittest.TestLoader().discover('../tests',tests[test]) # suites = unittest.TestSuite([suite(t) for t in test]) # unittest.TextTestRunner(verbosity=1).run(suite) # Import(root = str(Path.cwd()/".."/"mockup")) # import module # __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') # # sys.path.append(__root__) # # # Atomic Imports # import uppercase # # print([key for key in sys.modules.keys() if 'tiers' in key]) # import tiers # import tiers # # print([key for key in sys.modules.keys() if 'tiers' in key]) # print(tiers.__version__) # # Implicit Root Import # # from overlay import * # Test with/out __all__ defined # # Explicit Root Import # # from uppercase import tiers # # Explicit Nested import # # from overlay.tiers import module_a # # Explicit Nested import # # from overlay.tiers.module_a import Alpha # # print(Alpha()) # # Explicit Staged import # # from overlay import tiers # # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # # from tiers import module_a # # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/__27__/apeman.py
apeman.py
-------------------------- Frequently Asked Questions -------------------------- Overlays ======== Wierd behaviours observed with overlays are documented here for the time being While importing an overlay ``import`` throws :class:`ReferenceError` -------------------------------------------------------------------- .. After one has imported an overlay one finds that import throws a :class:`ReferenceError`. .. This results when, internally, a weak reference is used to point to an :class:`ApeMan` instance. The most likely cause of this is that you have not retained a reference to the :class:`ApeMan` instance. You should change the following code in your overlays' init file, :file:`OVERLAY/__init__.py` from :: from apeman import ApeMan; Apeman () to something like the following code. :: from apeman import ApeMan; apeman = Apeman () The reason for this depends upon the implementation of :class:`ApeMan` that you are given when you import it. Internally the implementation of ApeMan gets selected according to the current Python interpreter and the preferred implementation for that version of Python, either the :class:`Import` or :class:`OverlayImporter` class. Depending on how the selected class is implemented :attr:`builtins.__import__` may be assigned either a concrete reference or a weak reference to the instance of :class:`ApeMan`. In the case of a weak reference one must provide their own concrete reference, hence the fix above, to prevent the :class:`ApeMan` instance from being garbage collected when the init code is done executing. This is a quirk the author monitors with the :mod:`tests.testRollBackWithReference` and :mod:`tests.testRollBackSensReference` tests and hopes to either remove or enforce in due course. Patch modules versus patch sub-packages --------------------------------------- As one builds up their suite of patches one may want to substitute a module, providing a patch, :: OVERLAY/ # The root folder of the ApeMan overlay _PACKAGE_.py # The module containing ones patches, renamed after the source module or package ... # Further patches provided by the overlay __init__.py # The file invoking ApeMan; identifying it as an overlay for a sub-package, providing separate patches. :: OVERLAY/ # The root folder of the ApeMan overlay _PACKAGE_ # The sub-package containing multiple patches, renamed after the source module or package ... # Sub-patches modifying the components of the source module or package __init__.py # The file combining all of these into a single patch ... # Further patches provided by the overlay __init__.py # The file invoking ApeMan; identifying it as an overlay Presently ApeMan does not support this sort of thing; due to it's trapping and substitution mechanism. The plan is to fix this but for now one must place all of their patched into s single module and may not factor this out into a sub-package. To be sure this is a decidedly different problem from patching :ref:`faq:Nested Structure(s)` Nested Structure(s) ------------------- Given a source package with a heavily nested scaffold :: PROJECT/ PACKAGE/ SUB-PACKAGE/ MODULE.py Apeman, for the most part, supports patching this scaffold with a similarly nested structure. :: OVERLAY/ _PACKAGE_/ _SUB-PACKAGE_/ _MODULE_.py How to ... ========== Ignore site-packages and environmental variables ? -------------------------------------------------- One may disable the environmental and site-package when invoking Python as shown in :dabeaz15:`4472`. This allows one to have "clean" python environment at startup. :: python3 -E ... # Ignore environemnt variables python3 -s ... # Ignore user site-packages python3 -I ... # Combines -E and -s into a single switch One has seen instructions on how to subsequnelty populate the properly but forgets th reference and the necessary instruction.
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/faq.rst
faq.rst
------------ Observations ------------ The following observations were not entirely evident from the Python documentation. These are considered first before discussing the implementation. Modules ======= Strictly speaking a :term:`module` is a type, :class:`types.ModuleType`, in Python. The typical user, however, will understand a module to represnt a python file, that is a file with a :file:`*.py` or :file:`*.pyw` extention, upon their system. Packages ======== A :term:`package` is a :term:`module`, it too is of type :class:`types.ModuleType` but it will contain one or more sub-package(s) or sub-module(s). Typically one views a package as a folder upon their system that contains one or more python files or one or more subfolders that evetnually do so. Naming ====== .. Given a module name, one may map it to any resource, but one may not install it within ``sys.modules`` under an alternate name. An overlay patches one module by another with the same name. Clearly this leads to namespace and scope conflicts and one has to mitigate this in some way. Two means of ontrolling this include : * Renaming the original module, or it's overly, using a unique mapping * Enclosing the original module, or it's overlay, within another scope Experimenting with the Python import mechanism one has learnt that module renaming is not especially easy. Within the *Importer*, or *Finder*/*Loader*, layer it does not seem possible to remap a given a module name, ``PACKAGE.MODULE``, to another, ``OVERLAY._PACKAGE_._MODULE_``. Specifically a *finder*/*importer* in ``sys.meta_path`` can not remap a requested module to an alternate one. The machinery seems to prevent one from renaming or redirecting an import mid process. Where this ahppens within the machinery is difficult to pin down as only an :class:`ImportError` is thrown. It seems possible to perform this mapping by hooking into the *Import* entry point. In particular one may readily enclose or nest an import. Given a module, ``PACKAGE.MODULE``, to import one may redirect this to, ``OVERLAY.PACKAGE.MODULE`` rather easily. Mapping a module name to an alternate resource is also possible. File(s) or folder(s), archives (Zip files) and even URL's are possible resource targets. That is one may import ``_module_.py`` as the source for ``module`` but may not assign ``module`` as ``_module_`` within `sys.modules` and the *Importer* layer. Structural Equivalence ====================== .. One may even distribute such a module as a Python package via the Python Package Index, PyPI; ideally, by convention, a separate :file:`setup.py` script would be setup to do this but this is not enforcable. It is not immediately obvious to a new Python user that a standalone module is simultaneously both importable and executable. Any such module must include a guard to distinguish between invocation by import, that is by some other module e.g :code:`import MODULE` or :code:`from MODULE import ITEM`, and invocation by command line, that is directly through python e.g. :command:`python -m MODULE` or :command:`python MOODULE.py`:: # Common code that is always executed if __name__ == "__main__" : # Only executed when invoked from the command line if __name__ == "MODULE" : # Only executed when invoked from another module # Common code that is always executed A Python project with the simplest possible structure is, therefore, as follows:: PROJECT/ # Project root folder MOUDLE.py # A standalone module Equivalently, a structure exists that substitutes the standalone module for a standalone package:: PROJECT/ # Project root folder PACKAGE # A standalone package __init__.py # Only executed when invoked from another module __main__.py # Only executed when invoked from the command line When a standalone package is invoked Python elects which file to run, :file:`__init__.py` for invocation by import and :file:`__main__.py` for invocation via command line, negating the need for the guard required in a standalone module. To Python these two structures are equivalent and they may readily by interchanged. Packages are comparatively the *descripters* for modules. Isolation ========= Python treats each import call as a unique operation. It does not pass previously imported modules from the current scope into later import calls. The following for example will typically fail :: from tiers import package_a from package_a import package_b while the following, which explicitly identifies the module, will succeed :: from tiers import package_a from tiers.package_a import package_b A possible work around could import ``package_b`` from within the init file of ``package_a`` while the latter is being imported. .. I do not, however, think that this will work as the import command receives the module name parts as strings and not as references to previous modules. Effectively imports occur in isolation, are unique from one another and are independent of prior import(s). The module(s) listed in an ``import`` statement are converted to string arguments before being passed to ``__import__``. References to previously imported modules are not passed in as is usual within Pythonic. .. note :: This text is repeated verbatim within the ``tests.testUpperCase.testStructure`` tests, which tests this out every few iterations. Registration ============ Given that imports occur in Isolation, there is a possibility that one might co-erce an importer to selectively pick and register overlays. Consider the following overlay structure :: PROJECT // # The project's root folder overlay// # The overlay(s) one wishes to use _PACKAGE_.py # The overlay for PACKAGE _OTHER_.py # The overlay for some OTHER package In the following lines of code one could make the Meta Path *Finder* register which modules are to be overlayed during the execution of the first line i.e. Importing ``overlay.PACKAGE`` would cause ``PACKAGE`` to be registered and caught in later imports while some ``OTHER`` package would not. Python insists on a module object being returned during an import and one may have to install an empty ``type.ModuleType`` into ``sys.modules`` as a place holder, possibly ``None`` is sufficient for this purpose. Although this is a rather unorthodox, unintuitive and probably not desirable. The, registered, ``overlay`` would then be caught and imported within later lines. :: from overlay import PACKAGE import PACKAGE import OTHER The overlay, ``_PACKAGE_.py`` for the ``PACKAGE``, and the original, ``PACKAGE.py`` for ``PACKAGE``, would both be imported by the overlay importer/manager. The ``OTHER`` package, by default, would be imported using the usual import mechanism(s). .. todo :: This section is a bit confusing as it discusses both usage and implementation details. The exact method by which this might finally be implemented is unclear at this time. Nuances ======= This is a collection of notes I'm not entriely sure about. * Given a module, :mod:`B`, in a package, :mod:`A`. What is the behaviour when I try to import :mod:`B` from withn :mod:`A.B`. This is asked with respect to :meth:`__import__` which accepts both the global and local scope as an argument. One might glean the actual source importing a module through :samp:`{globals}/{locals}["__name__"]` and determine if the overlay is performing the import or some package using the overlay is. References ========== Even though the import mechanism performs imports in `Isolation`_, passing in strings rather then references to identify a module, it does not prevent one from accessing prior imports. One may create their own reference to previously imported modules by simply importing them. This is due to the short circuiting that happens within the import machinery. For instance, while importing a sub-package or sub-module, one may obtain a reference to any prior module instance, the parent module for example. This is done best with :func:`importlib.import_module` since :func:`builtins.__import__` tends to suffer from :ref:`side effects <sec:__import__>`. Errors ====== When debugging an *Importer* or *Finder*/*Loader* combination try to avoid using dictioneries in the initial code. Often an import will fail with a ``KeyError`` which does not indicate a fault in ones classes but rather an unassigned key the ``sys.modules``. e.g. Say one is importing some ``MODULE`` and their importer is still returning ``None`` as it is partially implemented then ``import`` will raise an error as ones code does not install ``module`` in ``sys.modules``. .. todo :: This is badly explains and simply a kick in the teeth from past me to future me. .. note :: What I was getting at here is that bad assignments in ``sys.modules`` appear as generic :class:`KeyError`'s which, if one is working with ``dict``'s can lead to some confusion. Specifically I was rather frustrated after tracing an error for an entire afternoon that turned out to be rather trivial.
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/observations.rst
observations.rst
--------- Machinery --------- :ref:`import:Python's Import Mechanism` provides an overview of the import process. This section goes into the detail of the machinery operating under the hood. .. toctree:: :hidden: Modules <machinery/modules> Utilities <machinery/utilities> Components ========== sys.metapath ------------ This is the list of Meta finders that Python systematically checks when looking for a modules specification. sys.path -------- This is a list of file paths that Python searches when looking for a module. I get the impressions that, while this was the go to scanner in Python 2.7, it is now only consulted by the SourceFileLoader from the MetaPath. sys.modules ----------- This retains a list of all the loaded modules and is used to return thos that thave already been cached. Execution ========= .. graphviz :: :caption: Illustration of how simple import is really defined. :alt: Pythons' import command :align: center :name: import command digraph { subgraph import { a;b; } a [target="_sys.modules", label="sys.modules", target="_top", URL="./import.html#sys-modules"] b [target="_Execution", label="sys.modules", target="_top", URL="./import.html#Execution"] a -> b; } .. digraph import { "sys.module" [target="_sys.modules"] -> imp [label="sys.module", target="_sys.modules"]} .. graphviz :: digraph { a [shape = "ellipse", ] b [shape = "box", ] c [shape = "circle", ] d [shape = "record", ] e [shape = "plaintext", ] a -> b -> c -> d -> e -> a; a -> e -> d -> c -> b -> a; } Import ====== .. autofunction :: builtins.__import__ .. autofunction :: importlib.import_module Finders ======= .. inheritance-diagram :: importlib.abc.Finder importlib.abc.MetaPathFinder importlib.abc.PathEntryFinder .. inheritance-diagram :: _frozen_importlib_external.FileFinder _frozen_importlib_external.PathFinder _frozen_importlib_external.WindowsRegistryFinder importlib.machinery.FileFinder importlib.machinery.PathFinder importlib.machinery.WindowsRegistryFinder .. autoclass :: importlib.abc.Finder :members: .. autoclass :: importlib.abc.MetaPathFinder :members: .. autoclass :: importlib.abc.PathEntryFinder :members: Loaders ======= .. inheritance-diagram :: importlib.abc.ExecutionLoader importlib.abc.FileLoader importlib.abc.InspectLoader importlib.abc.Loader importlib.abc.ResourceLoader importlib.abc.SourceLoader :parts: 2 .. inheritance-diagram :: _frozen_importlib_external.ExtensionFileLoader _frozen_importlib_external.SourceFileLoader _frozen_importlib_external.SourcelessFileLoader :parts: 0 .. autoclass :: _frozen_importlib_external.SourceLoader .. autoclass :: importlib.abc.FileLoader :members: :inherited-members: :undoc-members: .. autoclass :: importlib.machinery.SourceFileLoader .. autoclass :: importlib.machinery.SourcelessFileLoader :members: Module Specifications ===================== .. inheritance-diagram:: importlib.machinery.ModuleSpec .. currentmodule :: importlib.machinery .. autoclass :: ModuleSpec :members: :inherited-members: :undoc-members: .. .. autoattribute:: ModuleSpec.name
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/machinery.rst
machinery.rst
-------- Glossary -------- .. glossary :: :abbr:`API (Advanced Program Interface)` : API The interface used by programmers to utilize ones code base. :abbr:`FQMN (Fuly Qualified Module name)` : FQMN The Fully Qualified Module Name (FQMN) is the complete name of a module or a package e.g. the module name :mod:`PACKAGE.MODULE` and the package name :mod:`PACKAGE`; granted the latter is really a |FQPN|. :abbr:`FQON(Fuly Qualified Overlay Name)` : FQON The Fuly Qualified Overlay Name (FQON) represents the name of an overlay name e.g. the :mod:`OVERLAY` package. The name the the user imports to enable a specific suite of patches :abbr:`FQPN (Fuly Qualified Package name)` : FQPN The Fully Qualified Package Name (FQPN) is the |FQMN| with the module name lobbed off e.g. :mod:`PACKAGE` in :mod:`PACKAGE.MODULE` :abbr:`FQRN(Fuly Qualified Replacement Name)` : FQRN The Fuly Qualified Patch Name (FQPN) represents the name of a particular patch within an overlay e.g. a package, :mod:`OVERLAY._PACKAGE_` or a module, :mod:`OVERLAY._PACKAGE_._MODULE_` :abbr:`FQSN(Fuly Qualified Source Name)` : FQSN The Fuly Qualified Source Name (FQSN) is the name the package or module being patched within some overlay e.g. the :mod:`PACKAGE.MODULE`. This is synonamous with both the |FQPN| and the |FQMN| :abbr:`FQTN(Fuly Qualified Target Name)` : FQTN The Fuly Qualified Target Name (FQTN) is the name of the patch within an overlay, taken from the overlays root as though it were a |FQPN| or |FQMN| e.g. the package, :mod:`_PACKAGE_` in :mod:`OVERLAY._PACKAGE_`, or the module, :mod:`_PACKAGE_._MODULE_` in :mod:`OVERLAY._PACKAGE_._MODULE_`, within an overlay, :mod:`OVERLAY` explicit package A package that is explicitly identified by the inclusion of an :file:`__init__.py` file. implicit package A package that is implicitly identified by the inclusion one or more modules, none of which are called :file:`__init__.py`. module The atomic unit for Python source code e.g. :code:`a` or :code:`c` in :code:`b.c`. Traditionally this refers to a python file. Within the context of this document it largely excludes :file:`__init__.py` files. package A collection of one or more modules. Traditionally speaking, packages would map to the folders upon ones system; hence any folder containing one or more Python module(s) is a package. The package is identified as the first part of a modules' name e.g. :code:`a` and :code:`a.b` in :code:`a.b.c`. scaffold This refers to the structure of a source package that one is patching. This may refer to both the physical structure of the package as it is stored e.g. :file:`package/sub-package/__init__.py` and to the path structure used to import it in python e.g. :mod:`package.sub-package`. structure This refer to the structure of a patch that mimics the scaffold of some source package. This may refer to both the physical structure of the patch as it is stored e.g. :file:`OVERLAY/_package_/_sub-package_.py` and to the path structure used to import it in python e.g. :mod:`overlay._package_._sub-package_` or :mod:`package.sub-package`. It is implied that the structure of the patch largely mimics the structure of the underlying scaffold. submodule This refers to any module contained within a package e.g. :code:`b` in :code:`a.b`. subpackage A package nested within another. Traditionally speaking, any subfolder containing a module. More accurately the first part of a modules' name, containing more then one part e.g. :code:`a.b` and :code:`a.b.c` in :code:`a.b.c`. .. rubric:: Footnotes .. [#fqsn1] This entry is to be deprecated as it is redundant .. :abbr:`FQON(Fuly Qualified Overlay Name)` : FQON .. This Fuly Qualified Overlay Name is what is installed i.e. the overlay e.g. :file:`PACKAGE` .. :abbr:`FQAN(Fuly Qualified Hidden/Abstracted Name)` : FQAN .. The Fuly Qualified Hidden/Abstracted Name is the module that should have been installed but is now covered up i.e. the original e.g. :file:`_PACKAGE_` .. :abbr:`FQSN(Fuly Qualified System Name)` : FQSN .. The Fuly Qualified System Name is what the user really imports e.g. :file:`overlay._PACKAGE_` [#fqsn1]_ .. :abbr:`FQPN(Fuly Qualified Path Name)` : FQPN .. The Fuly Qualified Path Name is the relative path name e.g. :file:`overlay\\PACKAGE` .. :abbr:`FQFN(Fuly Qualified Path Name)` : FQFN .. The Fuly Qualified Path Name is the relative file name (e.g. for a File rather then a Path loader) e.g. :file:`overlay\\PACKAGE\\.__init__.py`
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/glossary.rst
glossary.rst
.. ApeMan documentation master file, created by sphinx-quickstart on Wed Nov 30 23:32:50 2016. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. ================== Portage for Python ================== .. toctree:: :maxdepth: 3 :hidden: Home Page <self> Objective <objective> Terminology <terminology> Mechanism <import> Machinery <machinery> Observations <observations> ApeMan <apeman> Overlays <overlay> Literature <literature> Contribution <contribution> Testing <tests> Frequently Asked Questions <faq> Glossary <glossary> The cursory reader need only cover the contents upon this, the main/home page, to understand why and how one might use ApeMan. Those wanting an introduction into how ApeMan works should read the :ref:`objective:Objective` and :ref:`import:Python's Import Mechanism` sections. While those looking into it's implementation should read the :ref:`apeman:ApeMan`, :ref:`machinery:Machinery`, :ref:`tests:Testing` and :ref:`contribution:Contribution` sections. :ref:`machinery:Machinery` and :ref:`literature:Literature` discuss and provide resources for those interested in the Python import system. The remainder of the documenation reviews the Python import system and dicusses the implementation of ApeMan within it. .. only :: builder_html .. include :: ../readme.rst ------------------ Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search`
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/index.rst
index.rst
------------------------- Python's Import Mechanism ------------------------- This section reviews how imports are effected within Python. It exists as a compliment to Python's ``imp``/``importlib`` documentation, which I found to be rather terse and a little confusing. Granted at the time (Circa. 2016) it contained information pertaining to Python 2.7, 3.3, 3.4 and 3.5 all of which saw significant API changes. The import mechanism is largely a black box. Comprising of the core machinery, which one can't really tinker with, and three entry points which one may hook into to a degree. .. figure :: .static/figures/blackbox.png :align: center :figwidth: 80% Pythons' import mechanism is complex and provides only three entrypoints that one may hook into, *Import* itself or more preferably the *Finder* and/or *Loader* subcomponents. .. The import machinery exists as a black box with three entry points, creating an object that has effect in all three entry points seems like the only way to affect certain behaviours import determines which module to load and is the first point where one may intercede. The other, more closely related, points concern finding and loading a package/module. While one might hook into all three points it is common place, and indeed encouraged, to only hook into the latter two. .. An object hooking into the latter two points is referred to as an importer, which combines both a loader and a finder into a single object. Generally it is not advised to fiddle with the ``__import__`` statement one may readiily Although it is not advised it's possible to monkey patch this function to a degree. It is common to have an object behave both as a finder and a loader . these are often combined into a single object. Import ====== `Import <https://docs.python.org/2/reference/simple_stmts.html#import>`_ does a few unexpected things. It processes ones request before handing over to the machinery and returns the imported module which it, the machinery, loaded. Both the ``import`` statement, which is parsed and converted into an `__import__`_ call, and the `importlib.import_module`_ function wrap an internal call to `bootstrap.gcd_import` which activates the machinery. .. _fig:import: .. figure :: .static/figures/structure/import.png :align: center :figwidth: 80% The import hook may be used to control or alter what gets imported. It is within this hook that one might implement an overlayer manager. .. _sec:__import__: ``__import__`` -------------- When an import statement is parsed it is converted into a call to ``__import__``. Calls to ``__import__`` pass either one or five argument(s). This includes the package/module `name`, the current scopes `global` and `local` variables, a `from` list and a `level`. The `from` list affects what ``__import__`` returns as follows : * If there is no 'from' list it returns the root package e.g. import A, import A.B and import A.B.C all return A. >>> __import__('tiers', globals(), locals(), [], 0) <module 'tiers' from 'E:\\Python\\apeman\\complex\\tiers\\__init__.py'> >>> __import__('tiers.module_a', globals(), locals(), [], 0) <module 'tiers' from 'E:\\Python\\apeman\\complex\\tiers\\__init__.py'> * If there is a 'from' list it returns the trunk package e.g. from A.B import C returns A.B. >>> __import__('tiers.package_a', globals(), locals(), ('module_a',), 0) <module 'tiers.package_a' from 'E:\\Python\\apeman\\complex\\tiers\\package_a\\__init__.py'> .. The `level` indicates if a relative import is being performed, which is done relative to the package. .. note :: The sections predating the `__import__`_ section assumed there was no means of intercepting the ``__import__`` mechanism. The problem was viewed from the Importer side of things and the author may have assumed some odd things as a result. ``importlib.import_module`` --------------------------- ``importlib.import_module`` allows one to explicitly import a named sub package, this is in contrast to ``__import__``'s behaviour. .. Using this one may import `A.B.C` versus importing `A` or `A.B` Finder(s), Loader(s) and Importer(s) ==================================== The machinery splits the work into an independant "finding" and "loading" phases. During the "finding" phase the finders/importers in ``sys.meta_path`` are interrogated sequentially to ascertain which one recognizes the requested package or module. The finder/importer that is familiar with the package or module then returns a loader that it believes can perform the "loading" operation. The loader then loads the module and installs it within ``sys.modules``, which it does by executing the source code and updating the ``sys.modules`` dict. Finder(s) --------- An object providing a method for determining whether or not it is aware of a *Loader* that might import a given module. .. figure :: .static/figures/structure/finder.png :align: center :figwidth: 80% The *Finder* returns a *Loader* that Python uses to load a given module. The module is not necessarily specific to the pair. Usually they behave more like factory methods loading a selection of modules that are available in a certain format e.g. builtin modules, site-packages and zip based modules. .. It is meant to returns a suitable *Loader* or ``ModuleSpec`` if it does and ``None`` if it does not. .. Note .. .. These classes are really implemented within _frozen_importlib_external. .. The following two lines will generate the inhritance diagram code. .. import _frozen_importlib_external .. import importlib .. ".. inheritance-diagram :: " + \ .. " ".join(["_frozen_importlib_external.{}".format(item) for item in dir(_frozen_importlib_external) if "Finder" in item and item not in [*dir(importlib.abc)]]) + " " + \ .. " ".join(["importlib.abc.{}".format(item) for item in dir(importlib.abc) if "Finder" in item and item not in []]) + " " + \ .. " ".join(["importlib.machinery.{}".format(item) for item in dir(importlib.machinery) if "Finder" in item and item not in [*dir(importlib.abc)]]) Loader(s) --------- An object providing a method that instantiates and registers a module. .. figure :: .static/figures/structure/loader.png :align: center :figwidth: 80% The *Loader* creates, loads, executes, populates and returns a module object. These operations allow one some lee-way in how they might alter an import. .. Note .. .. These classes are really implemented within _frozen_importlib_external. .. The following two lines will generate the inhritance diagram code. .. import _frozen_importlib_external .. import importlib .. ".. inheritance-diagram :: " + \ .. " ".join(["_frozen_importlib_external.{}".format(item) for item in dir(_frozen_importlib_external) if "Loader" in item and item not in [*dir(importlib.abc)]]) + " " + \ .. " ".join(["importlib.abc.{}".format(item) for item in dir(importlib.abc) if "Loader" in item and item not in []]) + " " + \ .. " ".join(["importlib.machinery.{}".format(item) for item in dir(importlib.machinery) if "Loader" in item and item not in [*dir(importlib.abc)]]) Importer(s) ----------- .. PEP302 prescribes the use of two different classes, a Finder .. and a Loader, that find and load modules respectively. Each .. respectively provides a find_module and a load_module method. .. These two classes can be combined into a unified Importer. .. The combination of both a *Finder* and a *Loader* into a single class is referred to as an *Importer*. .. Typically this is done to share state between both operations. Importers combine the *Finder* and *Loader* into a single object. They provide a clean mechanism for handling imports since the same class performs both *Finder* and *Loader* operations it can share state information that would normally have to be transferred between the separate operations. .. figure :: .static/figures/structure/importer.png :align: center :figwidth: 80% Showing the difference between an *Importer* and how it encompases both a *Finder* and a *Loader*. Since this mechanism has undergone some rather drastic modifications of late the exact mechanism is considered separately for each Python variant. Module Spec-ification(s) ------------------------ The *Importer* mechanism described above is loosing favour for a semaphore like mechanism, where the *Finder* returns a ``ModuleSpec`` containing an embedded *Loader*. Any state information that originally would have been stored within an *Importer*, to share it between the *Finder* and *Loader* parts, must now be embedded within a ``ModuleSpec``. The Python developers believe this allows for a more generic import process. .. figure :: .static/figures/structure/modspec.png :align: center :figwidth: 80% Module Specifcations assume a semaphor role allowing communication between a finder and loader but do not share a common set of attributes like an *Importer* would. Old vs. New =========== With the transition towards ``ModuleSpec`` it becomes harder to combine both the *Loader* and *Finder* into a single **instantiable** *Importer*. Many of the ``machinery.*Loader`` subclasses expect certain information during their instantiation which is not available during the instantiation of the *Finder*, preventing the instantiation of an *Importer* until this information is known. The *Finder*, of an *Importer*, must become a factory method which instantiates and returns a *Loader*, hence forming an *Importer*, as required. The rational for this is that the *Finder*/*Importer* does not need to be instantiated before it's inclusion within ``sys.meta_path``, presumably this is an optimization but it limits the usage of *Importers*. Effectively this enforces seperate *Finder* and *Loader* classes over an integrated *Importer* class as a result. .. This makes the system slightly more modular as the user may specify any one of the available *Loader* classes within the ``ModuleSpec``, but one had to do so with an *Importer* anyways. One might view this as a step backwards in the API design. The Python development team seem to think it's an improvement since it accomodates some more obscure import scenarios. One believes the primary benefit of this stems from providing a module with localized data access. As an ``imp``/``importlib`` user the *Importer* implementation seems far more structured then the ``ModuleSpec`` one. The next few sections consider the import mechanism under the respective Python versions. .. Implementations .. =============== .. .. The following sub-sections consider the variations of the import mechanism under different versions of Python. .. These are mostly as commentary to the preceeding section. .. toctree :: :hidden: :caption: Python Import Variations :name: Import Variations Python 3.3 : Import <33/import> Python 3.4 : Import <34/import> Python 3.5 : Import <35/import>
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/import.rst
import.rst
---------- Literature ---------- PEP === This section lists the various PEP's that describe the features of the import system. :pep:`273` : Zip files This discusses how the import system utilizes zipfiles and how one could leverage these in their own code. :pep:`302` : *Finders*, *Loaders* and *Importers* This discusses the mechanism by which *Finder*, *Loaders* and *Importers* ought to interact, I believe this describes Brett Cannon's conversion from a C to Python based import system. :pep:`395` : Qualified Names for Modules (Deprecated) Deals with pitfalls in the older import systems :pep:`420` : Implicit Namespace Packages Identifies the differences between implicit and explicit packages :pep:`451` : ModuleSpec This discusses the ModuleSpec class implementation :pep:`3147` : Module Cache This describes the current behaviour of the module cache Blogs ===== Since the presentation of "Live and Let Die !" by David Beazley every monkey with a keyboard and a Wordpress account seems to have decided that they are now an authority upon the Python import system. They are not... and simply clutter the internet. Below is a list of more legitimate authorities. `Brett Cannon <https://plus.google.com/+BrettCannon>`_ Overhauled the Python import system from versions 3.3 to 3.5 `articles <https://snarky.ca/>`_ are worth a read. Nick Coghlan Provides a list of `pitfialls <http://python-notes.curiousefficiency.org/en/latest/python_concepts/import_traps.html>`_ in Python's imports and seems to have been the author of author of :pep:395. One found this useful during the initial development of ApeMan, but it is bit wishy washy. `Yarbelk <http://stackoverflow.com/a/14050282/958580>`_ and `Sebastian Rittau <http://stackoverflow.com/a/67692/958580>`_ describe some of the changes in the import machinery. Presentations ============= This section provides table(s) of contents for the presentations that are available upon the topic. Live and Let Die ! ------------------ David Beazley does a terrific job at covering the import system. `Modules`_ Describes the Module type, how it's loaded, compiled and populated. `Import`_ Reviews what import really does under the hood. `Module Cache`_ Module caching and how it works, intercepting reloads and the like. `Import Hooks`_ Describes the Meta_Path and Path hooks. `References`_ Beazley lists the resources he had used to form his talk. .. _`Modules`: https://youtu.be/0oTh1CXRaQ0?t=5820 .. _`Import`: https://youtu.be/0oTh1CXRaQ0?t=6360 .. _`Module Cache`: https://youtu.be/0oTh1CXRaQ0?t=6600 .. _`Import Hooks`: https://youtu.be/0oTh1CXRaQ0?t=8160 .. _`References`: https://youtu.be/0oTh1CXRaQ0?t=10560 .. This serves as a table of contents for the David Beazely video. .. .. ==== ============ ================================================================================== .. Time Section Description .. ---- ------------ ---------------------------------------------------------------------------------- .. 1:37 Module(s) Describes the Module type, how it's loaded, compiled and populated. .. 1:43 Import Reviews what import really does under the hood. .. 1:50 Module Cache Module caching and how it works, intercepting reloads and the like. .. 2:16 Import Hooks Describes the Meta_Path and Path hooks. .. 2:56 References Beazley lists the resources he had used to form his talk. .. .. .. Brett Cannon talks "How Import Works" and "Import This, That and the Other thing" .. ==== ============ ================================================================================== How Import Works ---------------- .. todo :: Go through this video again. If I remember correctly the fimography was a bit scrappy in this one. Import This, That and the Other thing ------------------------------------- .. todo :: Go through this video again. Examples ======== `"Customizing the Python Import System" by C.W. <http://blog.dowski.com/2008/07/31/customizing-the-python-import-system/>`_ `blog.dowski.com <blog.dowski.com>`_ (Circa. 2008) seems to provide the first attempt at an online module loader. `"Importing Dynamically Generated module (Python Recipe)" by Andars Hammarquist <http://code.activestate.com/recipes/82234-importing-a-dynamically-generated-module/>`_ The earliest attempt to dynamically import a module (Circa. 17 Oct. 2001). I believe the patch to :mod:`unittest.mock` in the ApeMan-Overlays are a bit more comprehensize then this script. `PyDev.Debugger <https://github.com/fabioz/PyDev.Debugger/blob/8142cbfbceb1b80e1e118f7fe133d02da7f1f8bd/pydev_import_hook.py#L34>` An interesting class I have to still go through this one as I'm not sure how applicable it is or isn't to ApeMan. Deprecation =========== .. todo :: This belongs elsewhere but one is not entirely sure where to place it just yet. In the transition from Python 3.3 to Python 3.4 the import machinery got overhauled. Brett Cannon apparently gutted any remnant C code within these layers and made it all pure Python. The following table lists what was changed during the transition and lists discrepencies between the standard python library and the code within this package. This section aims to resolve the function name changes in a quick lookup table. ================================================ ================================================ ================================================ Imp (Python < 3.3) Importlib (Python 3.3) Importlib (Python > 3.3) ------------------------------------------------ ------------------------------------------------ ------------------------------------------------ `imp.find_module(name[,path])` `importlib.find_loader` `importlib.util.find_spec(name, package=None)` `imp.load_module(name, file, path, note)` `importlib.util.import_module` ================================================ ================================================ ================================================ `note` `description` in the python docs, is a small description string for the package. `path` `pathname` in the python docs, is the current file path or possibly the module path. Related ======= This section lists a number of related projects that are available upon the Python Package Index :PyPI:`aspectlib` and :PyPI:`featuremonkey` These seems to be a more advanced implementation of mock or seem tot ackle the problem from some other paradigm. :PyPI:`dingus` Dingus is a sort of mock object one throws at other code to see what the other code does to it, after an run one post processes the calls made and determines what the object should really do. :PyPI:`gorilla` This provides a competing method for patching to ApeMan. It seems to register the patches throughout ones code base and apply them when one angers teh Gorilla as it were (Perhaps they dislike the M.. word too ?). :PyPI:`recursive-monkey-patch`, :PyPI:`assign`, :PyPI:`pytestutils` and :PyPI:`monkey-patch` These packages seem to tackle the problem from a similar side as ApeMan :PyPI:`pypatch` A more aggressive variant of ApeMan that will actually apply the patch to the source module itself. :PyPI:`ook` and :PyPI:`monkey` Python, version specific patching, seems to be a nice compliment for use with ApeMan. :PyPI:`pyjack`, :PyPI:`monkeypatcher`/:PyPI:`monkeypatch`, :PyPI:`mock-open`, :PyPI:`python-monkey-business` :PyPI:`patched` and :PyPI:`simian` These packages appear to do be alternative implementations of :mod:`unittest.mock` (Previously :mod:`mock`) Monkeypatcher seems to be dead. PyJack seems to be a well developed competitor to mock. Simian and mock-open extend and add functionality to mock. :PyPI:`wrapt` Graham Dumpleton of WSGI/CGI fame wrote this package as an alternative to :meth:`collections.wraps` it would seem. :PyPI:`ext` and :PyPI:`forbiddenfruit` This seems to facilitate patching builtins (these are usually written in C and not readily modified from the Python side of things). :PyPI:`mr-monkeypatch` This supposedly simulates ruby monkey patching but the github page is dead :PyPI:`patcher` These seem to fall more udner git-like roll and will diff/patch source trees. The following are unrelated but seemed interesting and showed up in a Python search for Patches. :PyPI:`whatthepatch` More of a diffing tool :PyPI:`gorella` This has nothing to do with mokey patching modules but rather fixing up regular expressions :PyPI:`monkeytime` A quicker version of strptime for Python :PyPI:`patched_unittest` Not sure, seems dead :PyPI:`virtualtime` Fiddles with the time modules. :PyPI:`extras`, :PyPI:`wrappers` and :PyPI:`AnyQt` Both provide additional features to the various libraries, it may be possible to provide these as patches for ApeMan-Overlays. wrappers seems interesting. :PyPI:`pretend` It pretends not to be a mock clone but ... :PyPI:`modulegraph` Python dependency checker that seems to check the compiled bytecode versus the sources. :PyPI:`utknows` Seems to skip unittests based upon prior executions :PyPI:`code_monkey` Python refactoring tool :PyPI:`patchio` This patches command line applications or something Totally unrelated :PyPI:`ase` Check this out, it does sort of co-ordinated molecular model simulation :PyPI:`marrie` Command line podcast player :PyPI:`PyLobby` Python chat interface :PyPI:`PyDSLtool` Easy to program DSL languages :PyPI:`agile` Metapackage for Python agile development :PyPI:`cdiff` Coloured diff output, much like a merge tool. :PyPI:`overwatch` Log watching and tracing/tracking utility. :PyPI:`pagoda` Simulation framework for Python :PyPI:`habito` Tracks ones command line usage to measure productivity :PyPI:`scd`, :PyPI:`bumpversion` and :PyPI:`versioneer` Version number management/tracking :PyPI:`Docu` Previously :PyPI:`PyModels` maps Python objects to schemaless databases. :PyPI:`IntelHex` Binary/hex code editor :PyPI:`sphinxcontrib-trio` Python/Sphinx extension for Async based documenation :PyPI:`gignore` Pulls down git ignore files from the github repo hosting them :PyPI:`ib_insync` previously :PyPI:`tws_async` Some sort of wrapper for some sort of broker interface. :PyPI:`pychemy` :PyPI:`tesselate` Chemistry related package :PyPI:`liable` Unittest generator ? :PyPI:`pulp-or` and :PyPI:`pulp-py3` Linear programming modeller :PyPI:`pyaardvark` Python USB/PCI interface driver thingamy :PyPI:`aperturesynth` and :PyPI:`arches` Photographic manipulations :PyPI:`todo.py` :PyPI:`tomaty` Todo lists :PyPI:`mlab` Matlab wrapper :PyPI:`restview` RST viewer, seems interesting :PyPI:`apidoc` Documents ones api much like oxygen does :PyPI:`curious` Graph based data exploration tool :PyPI:`planar` 2D graphics library :PyPI:`SciPySim` Python simulation package :PyPI:`spreadsheet` Google/Python API for google sheets :PyPI:`hackr` Some sort of hackathon assistant :PyPI:`snafu` Function as a service thingamy, webbased dfunctions ? I'm not sure. :PyPI:`askbot` and :PyPI:`askbot-tuanpa` SO for django :PyPI:`nova6` BitTorrent client/search service :PyPI:`Literal` Random project :PyPI:`coloriffic` Determines the base colours in an image :PyPI:`towel-foundation` DRY Django development :PyPI:`soar` :PyPI:`robotframework-httpd` Robotics library :PyPI:`itermplot` Commandline plotting for matplotlib :PyPI:`scikit-tensor` :PyPI:`django-matrix-field` Multilinear Algebra and Tensor factorizations :PyPI:`calibrate` Generates callibration curves :PyPI:`Alto` and :PyPI:`Django-theming` Media ===== Known references in Media * `The Kinks - ApeMan<https://youtu.be/eEep67akIn4>`_
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/literature.rst
literature.rst
--------- Objective --------- ApeMans' objective is to achieve the behaviour described in this section; irrespective of Pythons' underlying import implementation. This is described in three steps the first two describe the necessary package/module structure and the third the expected behaviour. Site Structure ============== Consider the following file structure installed within ones site-packages, the simplest accomodated by ApeMan. :: site-packages/ overlay/ __init__.py _module_.py module.py Where the base module has the following content :file:`module.py`, :: print("{:40}|{}".format(__file__,__name__)) class Class() : def __str__() : return "Class" and the :term:`patch` for the module, :file:`overlay/_module_.py`, has the following content. :: print("{:40}|{}".format(__file__,__name__)) from module import * class Class(Class) : def __str__() : return "Overlay({})".format(super().__str__()) and the :term:`overlay`, itself, has the following content within its :file:`overlay/__init__.py`. :: print("{:40}|{}".format(__file__,__name__)) from apeman import ApeMan apeman = ApeMan() Project Structure ================= Further more consider a simple script one might be working on. :: PROJECT example.py Where :file:`example.py` has the following contents. :: print("{:40}|{}".format(__file__,__name__)) import overlay from module import Class print(Class()) Execution ========= Running the code in :file:`PROJECT/example.py` should output the following response. :: __main__.py ..\overlay\__init__.py ..\overlay\_module_.py ..\module.py Overlay(Class) Stepping through its execution should take one through the following sequence of events; the print statements are ignored for brevity : :file:`PROJECT/example.py`\ **[3]**\ :code:`import overlay` Finds and loads the :mod:`overlay` package executing its :file:`__init__` file and triggering the following actions : :file:`site-packages/overlay/__init__.py`\ **[3]**\ :code:`from apeman import ApeMan` Load the current ApeMan implementation recommended for this version of Python. :file:`site-packages/overlay/__init__.py`\ **[4]**\ :code:`apeman = ApeMan()` Install an instance of :class:`ApeMan` onto the front of :attr:`sys.meta_path`. :file:`PROJECT/example.py`\ **[4]**\ :code:`import module` Import :class:`Class` from :mod:`module` triggering the following actions :attr:`sys.meta_path[apeman]` ApeMan intercepts the search for :mod:`module` and loads :file:`site-packages/overlay/_module_.py` in place of :file:`site-packages/module.py`. :file:`site-packages/overlay/_module_.py`\ **[3]**\ :code:`from module import *` Imports all the objects, or the subset specified by :attr:`__all__`, from the scope of :mod:`module` into its own. :attr:`sys.meta_path[apeman]` Again ApeMan intercepts the search for :mod:`module` but passes the request on to :attr:`sys.meta_path[SourceFileLoader]` which loads and returns :file:`site-packages/module.py`. :file:`site-packages/overlay/_module_.py`\ **[4]**\ :code:`class Class(Class) : ...` :class:`module.Class` is now subclassed by and substituted for :class:`overlay._module_.Class` as the module completes it's execution and returns :file:`PROJECT/example.py`\ **[5]**\ :code:`print(Class())` Instantiating :class:`Class` now instantiates :class:`overlay._module_.Class` as opposed to :class:`module.Class` printing "Overlay(Class)" instead of "Class". .. note :: The structure described here is implemented within the mockup folder. Although the filenames differ and the print statements are removed or substituted by logging calls.
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/objective.rst
objective.rst
------ ApeMan ------ This section describes the design and implementation of ApeMan. One first considers how the `Ideal Overlay`_ might be structured and work from a user's standpoint before considering the `Ideal Mechanism`_ for implementing this from the developers point. Ideal Overlay ============= This and the following sections are discussed with respect to the following project structure. :: PROJECT/ # The root folder of ones project OVERLAY/ # The folder containing ones overlays _PACKAGE_/ # A package that one is pactching _MODULE_.py # A module within the package containing ones patches __init__.py # The file that sets up the ApeMan's OverlayImporter __main__.py # The primary file that one's users would execute The structure seems to encompass all of the edge cases one might encounter while implementing an overlay. It provides various entry points where the the overlay machinery or mechanism may be installed or replace the python machinery. The :file:`__main__.py` and :file:`OVERLAY/__init__.py` file(s) present the most sensible entry point(s). The package(s)/module(s) structure is nested to ensure the overlay machinery can handle complex cases. While the underscored ``PACKAGE`` and ``MODULE`` names prevent namespace clashes under various scenarious. .. _Apeman:Ideal Import: Ideal Import ------------ The ideal import statement that would trigger the above overlay of a package or module is illustrated. .. code-block :: python :caption: __main__.py from overlay import PACKAGE from PACKAGE import ... It allows the user to explicitly enable or disable an overlay by ``PACKAGE`` or ``MODULE`` name. There might even be scope to allow the ``*`` wild card to enable or disable an entire set of overlays within an :file:`OVERLAY`. A lesser form of this statement might also be possible and is provided for comparison. .. code-block :: python :caption: __main__.py import overlay from PACKAGE import ... The user is more restricted by this form and may only enable or disable the complete set of overlays under :file:`OVERLAY`. .. _Apeman:Module Substitution: Ideal Mechanism =============== Ideally an overlay should transparenlty substitute the original modules with their patched alternatives. The following figure shows how this might be implemented. The import is to redirected to an overlay which imports and wraps the requested module before returning itself in place of it. .. The initial import should be redirected to the overlay this performs it's own import of the wrapped module and returns itself in place of it. .. The initial import should be redircted to import the overlay instead. This in turn imports the original module which it wraps. Finally the overlay returns itself in in substitution. .. _fig:overlay: .. figure :: .static/figures/overlay.png :align: center :figwidth: 80% Ideally an import should be redirected to it's overlay, if any, which imports the original module internally returning itself in replacement. Preferably this should hook into the *Finder* and *Loader* phases of the mechanism. The Python documentation recommends one intercedes within the the *Impoter* layer and not the *Import* layer. Module Substitution ------------------- One mechanism for effecting overlays is by module swapping. If the overlay module being imported could catch the import of the original module being patched one could swap the two modules during an import. .. topic :: Example : Module Substitution Explicitly when one imports the overlay one sets up a trap to catch the import of ``PACKAGE`` from the :file:`__main__.py` .. code-block :: python :caption: :file:`__main__.py` import overlay import PACKAGE which maps it to the overlay file, ``overlay/_PACKAGE_.py``. This file is loaded and during its execution it imports the original ``PACKAGE``. .. code-block :: python :caption: overlay/_PACKAGE_.py from PACKAGE import * Now the original ``PACKAGE`` is installed as ``_PACKAGE_`` and the overlay as ``PACKAGE``. Later imports will then find and access the overlay in ``sys.modules``. While the overlay may access the original under ``_PACKAGE_`` in ``sys.modules``. The :download:`original implementation<34/original.py>` succeeded in implementing this to a degree. One is under the impression that the import machinery checks that the appropriate module is imported, returning a substitute is quite troublesome to get right. A more :download:`formal implemetnation <34/substitution.py>` followed shortly afterwards but this too failed to handle nested structures properly. Getting this right in the *Importer* layer is hampered by the submodule addressing the parent module, which is resolved within the *Import* layer and simply not accessible from a *Finder* or an *Importer*. The result is that the importer could remap ``MODULE`` to ``OVERLAY._MODULE_`` but could not remap ``PACKAGE.MODULE`` to ``OVERLAY._PACKAGE_._MODULE_`` unless one hooked into *Import* aswell. .. _Apeman:Module Replacement: Module Replacement ------------------ .. Module swapping does not appear to be possible within the current (Python 3.4) import API's. An alternative to `Module Substitution`_ involves replacing the original module with it's overlay. Since python provides scoping one may retain a reference to the original module within an overlay and present only the latter to later imports. This is akin to "hot swapping" modules. The overlay would replace the original module, within ``sys.modules``, by itself. Subsequent imports would see the overlay as the default module. Access to the original module could be acheived via the overlay. .. .. topic :: Example : Module Replacement .. When the overlay is loaded it is executed, importing the original module, which is installed in ``sys.modules`` and included in the overlays' scope. The overlay is then installed within ``sys.modules`` overwriting the original module. Access the original module is now via the overlay. The strategy is aggressive and complicates, or probably breaks, some of the import systems features. Importing sub-package and modules becomes rather tricky and one needs to track what one is importing. Module reloading is also going to be very tricky as one will have to overwrite both the overlay and the original in quick succession. .. topic :: Proposed Mechanism One means of addressing this is to perform name mangling. Ones main script would import the overlay as usual .. code-block :: python :caption: :file:`main.py` import overlay import PACKAGE but the patch would import the mangled variant of the package. One could include preceeding and succeeding underscores to indicate the call is from the overlay. .. code-block :: python :caption: :file:`overlay/_PACKAGE_.py` from _PACKAGE_ import * Allowing the importer to see the name ``PACKAGE`` when importing the overlay and ``_PACKAGE_`` when importing the original package/module. In this way a custom importer may distinguish between two imports in the sort of bidirectional situation encountered in overlays. This does not circumvent the underlying problem, however, since ``load_module`` still performs the actual import. ``load_module``, particularly the ``builtin.load_module``, checks for the module in ``sys.modules`` and returns this if it exists, this short circuits anything that one might have done in a customized *Importer*. In a new session where nothing is loaded this strategy does work since there is no module installed within ``sys.modules``. For reloads and sub-package/module access this becomes more tricky as now the root module in the overlay has replaced the original root module. Practical Mechanisms ==================== .. toctree :: :caption: ApeMan Variations :name: ApeMan Variations :hidden: Python 3.3 : ApeMan <33/apeman> Python 3.4 : ApeMan <34/apeman> Python 3.5 : ApeMan <35/apeman> Python 3.6 : ApeMan <36/apeman> Given the previous sections and the various observations one has made it is clear that this is not the simplest thing to implement. An overlay manager may have to hook into all three entry points in the import mechanism as shown below. .. figure :: .static/figures/structure/encase.png :align: center :figwidth: 80% The remainder of this section discusses the various implementations
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/apeman.rst
apeman.rst
------------ Contribution ------------ Users who come to rely upon ApeMan and wish to request a feature may reach me via e-mail. Ideally post the question on stack overflow and forward the link or address me in a comment as @carel. Source Code =========== The source code is available via git from the location : **[email protected]:opensource/apeman.git** Currently pull requests are not allowed as this is a private server but patches are accepted via mail. In time ApeMan will be moved to either GitLab.com or GitHub.com. Packaging and Installation ========================== To package ApeMan one may invoke the build command for a specific ``TARGET`` :: python setup.py build_TARGET Persons doing development development are advised to install ApeMan from the repository root using ``pip`` as follows :: pip install -e . Documentation ============= The documentation for ApeMan is written in |RST| and compiled using sphinx via the :file:`setup.py` script. To build the project documentation invoke the following command. :: python setup.py build_sphinx Use the :code:`-h` or :code:`--help` switch to see how this build process may be customized. Testing ======= Once the repository is setup one may run the ApeMan test suites as follows. :: python setup.py test .. note :: Due to the structure of ApeMan and it's dependance upon particular Python versions the test suites will barely succeed or fail catastrophically. This is, alas, the expected behaviour at this time. One still needs to guard all the tests against the ApeMan implementation, :class:`apeman.OverlayImpoter` or :class:`apeman.Importer`, that is selected in the background. This is dependant upon both the Python version and the preferred ApeMan implementation for that version of Python. ApeMan was largely developed in Python 3.5 and 3.6 and the tests mostly succed on these platforms. Behaviour on other versions of Python will likely fail. One is still setting up tox to ensure consistent behaviour upon different versions of Python. Diagnostics =========== The following Checks are largely used to understand what the Python import machinery is doing under the hood. These have not been ratified into unit tests but are noted here for future development. ``__import__`` -------------- It seems that the :attr:`__import__` assigned to :mod:`builtins` is not necessarily the same :meth:`__import__` that is defined in :mod:`importlib`. This seems to be the case atleast in older CPython (<=2.7 and <=3.3) implementations. The following script provides the simplest test of this. :: import importlib import builtins print(builtins.__import__ == importlib.__import__) The cause for the discrepency is that the import mechanism in older implementations was implemented in C rather then in Python. To check if the import in ones system is implemented in Python or in C one may run the following check :: import builtins import dis try : dis.dis(builtins.__import__) # Originally : wrapped within a print statement except : print("The dis module fails to dissassemble 'builtins.__import__' older versions of Python e.g. 2.7") An earlier, possibly misplaced invocation that I have also used is as follows. :: import inspect import importlib print(inspect.getsourcelines(importlib.__import__))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/contribution.rst
contribution.rst
------- Overlay ------- This section describes the best practices for developing ones own overlays. .. note :: This section is still being drafted and one is referred to either the :ref:`index:introduction` and :ref:`objective:Objective` pages. Structure ========= Whether ones overlay is made available globally or locally one structures their overlay(s) as follows:: OVERLAY/ # The root folder of the ApeMan overlay _PACKAGE_.py # The module containing ones patches, renamed after the source module or package ... # Further patches provided by the overlay __init__.py # The file invoking ApeMan; identifying it as an overlay Importing and invoking :class:`ApeMan` within the overlays' :file:`__init__.py` file :: from apeman import ApeMan; apeman = ApeMan() Local Overlay(s) ================ Locally an overlay may be created within ones project by simply including a folder, :file:`OVERLAY`, and an appropriate :file:`OVERLAY/__init__.py` file invoking ApeMan. :: PROJECT/ # The root folder for ones project PACKAGE/ # The root folder of ones package. OVERLAY/ # The root folder of the ApeMan overlay ... # The contents of the overlay __init__.py # The file invoking ApeMan; identifying it as an overlay ... # The other packages/modules in the package. __main__.py # The main script importing and using the patched module. Other modules within ones package may then invoke the overlay via relative import. :: import .OVERLAY from SOURCE import * ... Global Overlay(s) ================= Globally, an overlay, is provided as a separate, standalone package. :: PROJECT/ # The root folder for ones project OVERLAY/ # The root folder of the ApeMan Overlay ... # The contents of the overlay __main__.py # The main script importing and using the patched module. In this case the modules in ones package must invoke the overlay using an absolute import. :: import OVERLAY from SOURCE import * ... .. One must explicitly import the features they need as the `OverlayImporter` actually blocks further imports. .. Note that an overlay package is meant to reside alongside its sibling module to afford the most flexibility. .. Whether or not this is possible at every level within a package depends upon how python enforces scoping. Naming ====== It seems conventional upon PyPi to name the packages extending a framework in reference to the framework. One there fore recommends that one name any package that depends upon ApeMan as ApeMan-PACKAGE. .. todo :: This section is largely covered by the :ref:`index:Usage` section and should either be merged or deprecated.
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/overlay.rst
overlay.rst
-------------- Module Objects -------------- Modules are a specialized type that may be invokked as follows :: import types mod = types.ModuleType() Attributes ========== The following attributes are assigned to a module when it is instantiated. ``__name__`` The modules' name, this may be combined with ``__path__`` to determine the FQMN ``__file__`` Assoscitated source file (If any) ``__doc__`` Documentation string as is typically found near the top of a file inline with the imports and not nested under a ``if __name__ == '__main__' :`` statement ``__package__`` The name of the package, that is the FQMP. This must be specified to allow relative imports, this is not set when a script is run as is i.e. __name__ == "__main__" ``__path__`` The path to a package, this might be a .zip/.pyc/.py file for a vanilla package, a directory for a name spaced package or None for a python builtin module. Actually this is a list of paths the package will recurse through to find submodules. ``__spec__`` Module Specification, ths is set up so that the module might be loaded from a spec and a spec might be generated from a module. Patching Import =============== A minimal import implementation is provided by David Beasley as follows (David Beaseley @ 1:44) :: def load_module(name) : # Check mod in sys.modules # load = sys.modules.get(name) # if load is None : exnt = '.py' with open(name + extn,'r') as file : code = file.read() load = type.ModuleType(name) load.__file__ = name + extn code = compile(code, name + extn, 'exec') exec(code, load.__dict__) # Register mod in sys.modules and return it return load
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/machinery/modules.rst
modules.rst
==================== ApeMan in Python 3.6 ==================== .. automodule :: apeman.__36__ :members: :member-order: bysource :undoc-members: :private-members: :special-members: .. note:: I recently made a modification the the :meth:`__call__` method for the 3.6 version that should probably be ported to other implementations. When sphinx was building the documentation for a module that used apeman, apeman would get installed in the importer list and intercept some of the sphinx imports. I think sphinx fiddles with the import statement too and was passing a :obj:`level` key word argument, I have caught such asrguments in :obj:`kvps` but had not passed it through to Pythons import statement, which apeman sets as it's :attr:`self.imp` attribute, that is one was calling :code:`self.imp(name, *args)` instead of :code:`self.imp(name, *args, **kvps)`. .. oroginally the :obj: roles distinguished between function arguments and variables
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/36/apeman.rst
apeman.rst
# System import os import sys # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery import imp # Debugging import logging # Constants modsep = '.' class OverlayImporter(abc.MetaPathFinder, abc.Loader): # PEP302 prescribes the use of two different classes, a Finder # and a Loader, that find and load modules respectively. Each # respectively provides a find_module and a load_module method. # These two classes can be combined into a unified Importer. # # FQMN - Fuly Qualified Module name overlay.tiers This is what the user imports i.e. the handler or wrapper # # FQON - Fuly Qualified Overlay name tiers This is what is installed i.e. the overlay # FQAN - Fuly Qualified Hidden/Abstracted name _tiers_ This is what should have been installed but is now covered up i.e. the original # #*FQSN - Fuly Qualified System name overlay._tiers_ This is what the user really imports # # FQPN - Fuly Qualified Path name overlay\\tiers This is the relative path name # FQFN - Fuly Qualified Path name overlay\\tiers\\.__init__.py This is the relative file name (e.g. for a File rather then a Path loader) # # * This entry is probably redundant or meant to be deprecated # if __debug__ : dent = 0 def indent(self, label = None, char = " ", length = 12): message = "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) self.dent += 1 return message def undent(self, label = None, char = " ", length = 12): message = "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) self.dent -= 1 return message def ondent(self, label = None, char = " ", length = 12): return "{0}{1:{2}}".format(char*self.dent, label[:length-self.dent],length-self.dent) def __init__(self, *args, name = None, path = None, logger = logging.getLogger(__name__), **kvps): super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = None self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.path = path or inspect.getmodule(inspect.stack()[1][0]).__path__ # Used to reference __path__ somehow self.log = logger self.log.debug("{:12}: {}".format(self.ondent("Instance"), self.__class__)) def mapTarget(self, name) : """Maps request to the overlay module""" # Given overlay.tiers return tiers # Older Code return self.mask.format(name) # Newer Code return modsep.join([part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test]) # return modsep.join(name.split(modsep)[1:]) def mapToHidden(self,name) : """Maps request to a corresponding hidden module""" # This must be run upon the output of mapToTarget # Given overlay.tiers or tiers return _tiers_ # Older Code # N/A # Newer Code # for trap in self.trap : # parts = trap.split(modsep) return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapSource(self, name) : """Deprecated : Mapped the Overlay back to the module""" # Older Code mask = self.mask.split("{}") return name[len(mask[0]):-len(mask[-1])] # Newer Code # N/A def mapToSystem(self, name): """Maps a module to the corresponding overlay system path""" # This finds the first folder or file matching the module name # note that name must be processed beforehand using self,mapToSource stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.path) path = None while next(test, None) : path = next(item, None) if not os.path.isdir(path) : # [ref:2] path += '.py' return path def mapToFile(self, name, path): """ Similar to mapToSystem but for packages it tries to map to __init__.py files""" # Note one must premap the path FQMN using mapToSource # self.log.debug("MapToFile : {}".format(self.path)) stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in path) path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path def rename(self, name) : # Currently this assumes the module is one level deep within # the package, that is the following structure is expected # # package\ The folder containing the __init__.py you are reading # _module_ The module you are patching renamed with underscores # return modsep.join([item if enum!=1 else "_{}_".format(item) for enum, item in enumerate(name.split(modsep))]) def overlays(self) : # This is simply the list of modules that are patched under # this overlay. # pkg.util.walkpackages is apparently useful here modules = [os.path.splitext(item)[0] for item in os.listdir(self.path[0]) if os.path.splitext(item)[0] not in ["__init__","__pycache__"]] if self.trap : return [self.mapSource(os.path.splitext(item)[0]) for item in modules] else : return modules # def find_spec(self, name, path, target = None): # # One should use the module returned by find_module along # # with the function utils.spec_from_loader() to create a # # spec for the more modern API's. # # # # FQMN/name - name of the modules # # path - path entries for the module, that is the parent packages.__path__ attribute. # # target - previous module if the current one is being reloaded, none otherwise. # self.log.debug("{:12}: {} {}".format(self.ondent("Find Spec"), name, path, target)) # # spec = util.find_spec(name) # # self.log(spec) # self.loader=self # return self.find_module(name, path) # causes infinite recursion # # return None # def loader(self) : # return self # return self.load_module def find_module(self, name, path=None): # Deprecated use : # # Python > 3.3 use IMPORTLIB.UTIL.FIND_SPEC # Python = 3.3 use IMPORTLIB.FIND_LOADER # bits = name.split(modsep) self.log.debug("{0:12}> {1:<40} {2:<80}".format(self.indent("Find Mods"),name, str(path))) if len(bits) > 1 and self.mapTarget(bits[-1]) in self.overlays(): # Note : the clamp on bit length is to ensure the importer rolls back to root to import patched modules. self.path = path self.log.debug(" "*self.dent + "Discovered : {0:<40} {1:<80}".format(name,__file__)) return self if bits[-1] == self.trap : for meta in sys.meta_path : if meta is not self : self.temp = meta.find_module(name, path) if self.temp : self.log.debug(" "*self.dent + "Discovered : {}".format(name)) return self return None def load_module(self, name): # Deprecated replace with the classes in IMPORTLIB.MACHINERY # # If IMP.LOAD_MODULE was used with IMP.FIND_MODULE previously # then IMPORTLIB.IMPORT_MODULE is a better substitute. If not # then use the loader that pairs with the prior finder. That # is one of : # # IMPORTLIB.UTIL.FIND_SPEC <-> # IMPORTLIB.FIND_LOADER <-> # # self.dent += 1 self.log.debug(" "*self.dent + "Importing > {}".format(name)) parent, _, module = name.rpartition(modsep) if self.trap : self.trap = None self.log.debug(" "*self.dent + "Pass Trapped") temp = self.temp.load_module() sys.modules[self.mapTarget(name)] = temp self.log.debug(" "*self.dent + "Imported < {}".format(self.mapTarget(name))) # self.dent -= 1 return temp else : self.log.debug(" "*self.dent + "Pass Through {}".format(module)) # if module not in self.overlays(): # Not Importable # raise ImportError("%s can only be used to import pytz!",self.__class__.__name__) # Inclde module name and possibly modules if name in sys.modules: # Already Imported return sys.modules[name] # Modules' absolute path self.trap = module file, path, desc = imp.find_module(self.mapTarget(module), self.path) # NB !!! This was psuedo try: temp = imp.load_module(name, file, path, desc) finally: if file: file.close() sys.modules[module] = temp self.log.debug("{:10} < {}".format(self.undent("Imported"),module)) return temp if __name__ == "__main__" : # print("Main") import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger("__34__") logger.setLevel(logging.DEBUG) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) # General Import # from overlay import * # Targeted Import # from overlay import tiers # Nested Import # from overlay.tiers import first # Staggered Import from uppercase import tiers logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) from tiers import module_a logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{:24} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/34/original.py
original.py
# System import os import sys # Types import types # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery, _bootstrap as bootstrap import imp # Debugging import logging # Local Libraries try : from . import descriptors from . import utilities except SystemError: import descriptors import utilities # Constants modsep = '.' class OverlayImporter(abc.MetaPathFinder, abc.SourceLoader, utilities.Indentation): """ This class combines a Finder and a Loader into an Importer. .. inheritance-diagram:: apeman.__34__ :parts: 2 The strategy used maps overwrites the imported module with the overlay import under a different name Since Python imports are atomic one needs to trap modules being loaded and wrapped overlay.tiers is to be mapped to overlay._tiers_.py which is imported as tiers, while tiers, the original module is imported as _tiers_ .. note :: This is not an especially good implementation, it is not thread safe as it does not invoke module locks when loaded. """ # See section 5.5 in [1] to determine if the Path Based Finder # is a better fit for this class # # https://docs.python.org/3/reference/import.html root = descriptors.PathName() def __init__(self, *args, name = None, path = None, root = None, **kvps): super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = {} self.wrap = {} self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__) self.mods = self.modules() self.log = logging.getLogger(__name__) self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) def mapToTarget(self, name) : """Maps request to the overlay module""" return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : # This differs from overlays in that it recurses through the # folder structure to find python modules ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} # The 3.5 module should implement this. # # def find_spec(self, name, path, target = None) : # self.log.debug("{}> {:<40} {:<80}".format(self.indent("FS:" + self.name),name, str(path))) # spec = util.spec_from_file_location(self.mapToTarget(name), str(self.modules()[self.mapToTarget(name)])) # self.log.debug(spec) # self.trap[name] = spec.loader # spec.loader = self # self.log.debug(spec) # return spec # # def exec_module(self, *args, **kvps) : # self.log.debug("Exec_Module") # self.log.debug(args) # self.log.debug(kvps) # # def create_module(self, *args, **kvps) : # self.log.debug("Create_Module") # self.log.debug(args) # self.log.debug(kvps) def find_module(self, name, path=None): # self.log.debug("Find_module") self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) if self.mapToTarget(name) in self.mods : # User imports _PACKAGE_ # self.log.debug(self.undent("F:Trap")) self.trap[name] = self.mods.pop(self.mapToTarget(name)) return self if self.trap.pop(name) : # overlay imports PACKAGE # self.log.debug(self.undent("F:Wrap")) for meta in [meta for meta in sys.meta_path if meta is not self]: self.wrap[name] = self.wrap.get(name) or meta.find_module(name, path) return self # if name in self.wrap : # overlay imports PACKAGE # return self return None def load_module(self, name): # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) load = sys.modules.get(name) if name in self.trap : # One should strictly use SourceFileLoader here instead. # self.log.debug(self.ondent("L:Trap")) file = self.trap.get(name) load = types.ModuleType(self.mapToTarget(name)) with file.open('r') as data : code = data.read() # self.log.debug([key for key in sys.modules.keys() if name in key]) load.__file__ = str(file) code = compile(code, str(file), 'exec') sys.modules[name] = load # must occur before exec exec(code, load.__dict__) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(load.__version__) if name in self.wrap : # Note : importing PACKAGE as _PACKAGE_ fails. # This is due to the to the `builtin` importers preventing # name changes. To be explicit they can't find a funny # named module and one can't cross assign the module. One # can reassign it however # self.log.debug(self.ondent("L:Wrap")) spec = self.wrap.pop(name) load = spec.load_module() # self.log.debug([sys.modules[key] for key in sys.modules.keys() if name in key]) # self.log.debug(self.undent("L:Done")) return load # temp = self.modules() # file = str(temp[self.mapToTarget(name)]) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(self.mapToTarget(name), file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # OverlayLoader(name, file).load_module(self.mapToTarget(name)) # self.log.debug([key for key in sys.modules.keys() if name in key]) # self.log.debug(self.mapToTarget(name)) # self.log.debug(self.modules().keys()) # file = self.modules()[self.mapToTarget(name)] # # self.log.debug(file) # temp = machinery.SourceFileLoader(name, [str(self.root)]) # temp.load_module() # temp = machinery.SourceFileLoader(name, self.modules()[self.mapToTarget(name)]).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug([key for key in sys.modules.keys() if key in name]) # self.trap[name].load_module() # temp = OverlayLoader(name, str(self.trap[name])).load_module(modsep.join([self.name,name])) # temp = machinery.SourceFileLoader(name, str(self.trap[name])).load_module() # return temp # self.log.debug([key for key in sys.modules.keys() if key in name]) # # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # parent, _, module = name.partition(modsep) # Was rpartition # if name in self.trap : # This might break # # Handle Source Import # self.trap.pop(name) # self.log.debug(self.ondent("Pass Trapped")) # temp = self.temp.load_module() # sys.modules[self.mapTarget(name)] = temp # self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # return temp # else : # # Handle Overlay Import # if module in sys.modules: # Already Imported # return sys.modules[module] # Modules' absolute path # # Import the module # self.trap.append(module) # file = self.mapToRoot(name) # _name_ = self.mapToSource(name) # root,stem = self.pathParts(self.mapToSource(name)) # self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) # temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. # sys.modules[name] = temp # Using sys.modules[module] = temp fails # self.log.debug("{}< {}".format(self.undent("Imported"),temp)) # return temp if __name__ == "__main__" : # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger() # "__34__" logger.setLevel(logging.DEBUG) # Call Test Suites # import unittest # tests = { # "all" : 'test*.py', # "overlay" : '*Overlay.py', # "uppercase": '*UpperCase.py', # "tiers" : '*Tiers.py', # } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\tests') sys.path.append(__root__) import builtins def _import_(*args, importer = __import__) : # Hooks the import statement logger.debug("import : {}".format(args[0])) temp = importer(*args) # logger.debug(dir(temp)) logger.debug([temp.__name__, temp.__file__, temp.__package__, temp.__loader__]) return temp # Atomic Imports import uppercase builtins.__import__ = _import_ # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug("Primary") # import tiers # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug(tiers.__version__) logger.debug("Secondary") from tiers import module_a # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) # logger.debug(module_a.__version__) # from tiers import package_a # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'tiers' in key]) # logger.debug([sys.modules[key] for key in sys.modules.keys() if 'os' in key]) logger.debug(package_a.__version__) # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import # from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/34/replacement.py
replacement.py
# System import os import sys # Debugging from pdb import set_trace as db # Inspection import inspect # Iteration from itertools import zip_longest as izip, tee # Imports from importlib import util, abc ,machinery import imp # Debugging import logging # Constants modsep = '.' # def outer(name): # frame = inspect.stack()[1][0] # while name not in frame.f_locals: # frame = frame.f_back # if frame is None: # return None # return frame.f_locals[name] class OverlayImporter(abc.MetaPathFinder, abc.Loader): # PEP302 prescribes the use of two different classes, a Finder # and a Loader, that find and load modules respectively. Each # respectively provides a find_module and a load_module method. # These two classes can be combined into a unified Importer. # # FQMN - Fuly Qualified Module name overlay.tiers This is what the user imports i.e. the handler or wrapper # # FQON - Fuly Qualified Overlay name tiers This is what is installed i.e. the overlay # FQAN - Fuly Qualified Hidden/Abstracted name _tiers_ This is what should have been installed but is now covered up i.e. the original # #*FQSN - Fuly Qualified System name overlay._tiers_ This is what the user really imports # # FQPN - Fuly Qualified Path name overlay\\tiers This is the relative path name # FQFN - Fuly Qualified Path name overlay\\tiers\\.__init__.py This is the relative file name (e.g. for a File rather then a Path loader) # # * This entry is probably redundant or meant to be deprecated # # The strategy here is to map an import under a different name # # Given the following module to import # # overlay.tiers is to be mapped to overlay._tiers_.py which # is imported as tiers, while tiers, the original module is # imported as _tiers_ # # if __debug__ : # When Branching this into it's own class call it DebugLabels # This should be a standalone mixin, that is one should resist # the temptation to mixin logging. __indent__ = 0 __taglen__ = 18 def indent(self, label = None, char = " ", length = __taglen__): if label : message = "{0}{1:{2}}".format(char*self.__indent__, label[:length-self.__indent__], max(length-self.__indent__,1)) else : message = "" self.__indent__ += 1 return message def undent(self, label = None, char = " ", length = __taglen__): if label : message = "{0}{1:{2}}".format(char*self.__indent__, label[:length-self.__indent__],length-self.__indent__) else : message = "" self.__indent__ -= 1 return message def ondent(self, label = None, char = " ", length = __taglen__): return "{0}{1:{2}}".format(char*self.__indent__, label[:length-self.__indent__],length-self.__indent__) def __init__(self, *args, name = None, path = None, logger = logging.getLogger(__name__), **kvps): # When this code is executed the module importing layman.OverlayImporter # is already installed within sys.modules but the importer itself is not # yet registered within sys.meta_path, the latter happens after __init__ # It Ought to be possible to install the Importer in sys.Meta_Path from # here but this __init__ function but this might creating an undesired # side effect. It does however make for a convenient one liner : # # from layman import Overlay; OverlayImporter() # # versus the slightly longer # # import sys # from overlay import OverlayImporter # sys.meta_path.insert(0, OverlayImporter()) # # Note that in both cases the class must be instantiated. Later versions # of importlib seem to discourage this but I do not know why. super().__init__(*args, **kvps) self.mask = "_{}_" self.trap = [] self.path = {} # print(dir(inspect.getmodule(self))) # import traceback # from pprint import pprint # pprint([item for item in inspect.stack()[1:]]) # pprint(outer('__package__')) # pprint(dir(inspect.stack()[1][0])) # print(builtin.__qualname__) # pprint(inspect.stack()[1][0].f_globals) # print(inspect.getmoduleinfo(traceback.extract_stack()[-2][0])) # print(inspect.getframeinfo(inspect.stack()[1][0])) # print(dir(inspect.stack()[1][3])) # print(inspect.stack()[1][3].__qualname__) self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = path or inspect.getmodule(inspect.stack()[1][0]).__path__ # Used to reference __path__ somehow self.log = logger self.log.debug("{:{}}: {:40} {}".format(self.ondent("Instance"), self.__taglen__, str(self.__class__), [key for key in sys.modules.keys() if self.name in key])) # Mapping def pathParts(self,name) : """Splits the FQMN into the part for the importr and the overlay""" root, stem = [], [] [root.append(part) if test else stem.append(part) for test, part in izip(self.name.split(modsep),name.split(modsep))] return modsep.join(root), modsep.join(stem) def mapToSource(self,name) : """Maps request to the corresponding overlay module""" # Consider the use of util.resolve_name(name, path) # Given overlay.tiers return overlay._tiers_ return modsep.join([part if test else self.mask.format(part) for test, part in izip(self.name.split(modsep),name.split(modsep))]) def mapTarget(self, name) : """Maps request to the overlay module""" # Deprecated use self.pathParts(self.mapToSource(FQMN)) # Given overlay.tiers return tiers # Older Code return self.mask.format(name) # Newer Code return modsep.join([part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test]) # return modsep.join(name.split(modsep)[1:]) def mapToHidden(self,name) : """Maps request to a corresponding hidden module""" # This must be run upon the output of mapToTarget # Given overlay.tiers or tiers return _tiers_ # Older Code # N/A # Newer Code # for trap in self.trap : # parts = trap.split(modsep) return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def mapSource(self, name) : """Deprecated : Mapped the Overlay back to the module""" # Older Code mask = self.mask.split("{}") return name[len(mask[0]):-len(mask[-1])] # Newer Code # N/A def mapToSystem(self, name): """Maps a module to the corresponding overlay system path""" # This finds the first folder or file matching the module name # note that name must be processed beforehand using self,mapToSource stem = [part for test, part in izip(self.name.split(modsep),name.split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.root) path = None while next(test, None) : path = next(item, None) if not os.path.isdir(path) : # [ref:2] path += '.py' return path def mapToFile(self, name): """ Similar to mapToSystem but for packages it tries to map to __init__.py files""" # Note one used to pre-map the path FQMN using mapToSource, # this is done internally now # self.log.debug("MapToFile : {}".format(self.root)) stem = [part for test, part in izip(self.name.split(modsep),self.mapToSource(name).split(modsep)) if not test] test, item = tee(os.path.join(path, *stem) for path in self.path[name]) path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path def mapToRoot(self, name): """ Similar to mapToFile but from the Importers' Root Path""" # Note one used to pre-map the path FQMN using mapToSource, # this is done internally now # This is somewhat experimental stem = [part for test, part in izip(self.name.split(modsep),self.mapToSource(name).split(modsep)) if not test] self.log.debug("{}: {} {} {} ".format(self.ondent("MapToRoot"), stem, self.root, self.path[name])) test, item = tee(os.path.join(path, *stem) for path in self.root) # self.path[name] path = None while next(test, None) : path = next(item, None) # self.log.debug(path) if os.path.isdir(path) : # [ref:3] path = os.path.join(path, '__init__.py') else : path += '.py' return path def rename(self, name) : # - Deprecated # Currently this assumes the module is one level deep within # the package, that is the following structure is expected # # package\ The folder containing the __init__.py you are reading # _module_ The module you are patching renamed with underscores # return modsep.join([item if enum!=1 else "_{}_".format(item) for enum, item in enumerate(name.split(modsep))]) def overlays(self) : # - Deprecated # This is simply the list of modules that are patched under # this overlay. # pkg.util.walkpackages is apparently useful here modules = [os.path.splitext(item)[0] for item in os.listdir(self.root[0]) if os.path.splitext(item)[0] not in ["__init__","__pycache__"]] if self.trap : return [self.mapSource(os.path.splitext(item)[0]) for item in modules] else : return modules # def find_spec(self, name, path, target = None): # # One should use the module returned by find_module along # # with the function utils.spec_from_loader() to create a # # spec for the more modern API's. # # # # FQMN/name - name of the modules # # path - path entries for the module, that is the parent packages.__path__ attribute. # # target - previous module if the current one is being reloaded, none otherwise. # self.log.debug("{}: {} {}".format(self.ondent("Find Spec"), name, path, target)) # # spec = util.find_spec(name) # # self.log(spec) # self.loader=self # return self.find_module(name, path) # causes infinite recursion # # return None # def loader(self) : # return self # return self.load_module def find_module(self, name, path=None): # Deprecated use : # # Python > 3.3 use IMPORTLIB.UTIL.FIND_SPEC # Python = 3.3 use IMPORTLIB.FIND_LOADER # # path - List of File System Path # name - The FQMN # bits = name.split(modsep) self.log.debug("{}> {:<40} {:<80}".format(self.indent("F:" + self.name),name, str(path))) # Overlay Layer if name.startswith(self.name) : # len(bits) > 1 and self.mapToHidden(bits[1]) in self.overlays(): # Note : the clamp on bit length is to ensure the importer rolls back to root to import patched modules. self.path[name] = path self.log.debug("{0:12}: {1:<40} {2:<80}".format(self.undent("Finder"), self.mapToSource(name),__file__)) return self # Standard Layer if bits[-1:] in self.trap : for meta in sys.meta_path : if meta is not self : self.temp = meta.find_module(name, path) if self.temp : self.log.debug("{}: {1:<40}".format(self.ondent("Trapper"), name)) return self self.undent() return None def load_module(self, name): # Deprecated replace with the classes in IMPORTLIB.MACHINERY # # If IMP.LOAD_MODULE was used with IMP.FIND_MODULE previously # then IMPORTLIB.IMPORT_MODULE is a better substitute. If not # then use the loader that pairs with the prior finder. That # is one of : # # IMPORTLIB.UTIL.FIND_SPEC <-> # IMPORTLIB.FIND_LOADER <-> # self.log.debug("{}: {:<40}".format(self.indent("L:" + self.name),name)) parent, _, module = name.partition(modsep) # Was rpartition if name in self.trap : # This might break # Handle Source Import self.trap.pop(name) self.log.debug(self.ondent("Pass Trapped")) temp = self.temp.load_module() sys.modules[self.mapTarget(name)] = temp self.log.debug("{}< {}".format(self.undent("Imported"),self.mapTarget(name))) # self.dent -= 1 return temp else : # Handle Overlay Import # if module not in self.overlays(): # Not Importable # raise ImportError("%s can only be used to import pytz!",self.__class__.__name__) # Inclde module name and possibly modules if module in sys.modules: # Already Imported return sys.modules[module] # Modules' absolute path # Import the module self.trap.append(module) # Python 3.2 API #self.log.debug("{}: {:40} {:40}".format(self.ondent("Loader"),self.name, name, module)) # Needs to be more useful #file, path, desc = imp.find_module(self.mapTarget(module), self.path) #try: # temp = imp.load_module(name, file, path, desc) #finally: # if file: # file.close() # Python 3.3 and 3.4 API - It's a bit messy right now file = self.mapToRoot(name) _name_ = self.mapToSource(name) root,stem = self.pathParts(self.mapToSource(name)) self.log.debug("{}: {:18} -> {:18} {:80}".format(self.ondent("FileLoader"),root, stem, file)) temp = machinery.SourceFileLoader(name, file).load_module() # be weary here, re-assigning names is a bit finnicky and has a rollover impact. sys.modules[name] = temp # Using sys.modules[module] = temp fails self.log.debug("{}< {}".format(self.undent("Imported"),temp)) return temp if __name__ == "__main__" : # This section is primarily intended for developers # Setup Logging import logging logging.basicConfig(format = '%(message)s') logger = logging.getLogger("__34__") logger.setLevel(logging.DEBUG) # Call Test Suites import unittest tests = { "all" : 'test*.py', "overlay" : '*Overlay.py', "uppercase": '*UpperCase.py', "tiers" : '*Tiers.py', } # test = 'all' # suite = unittest.TestLoader().discover('..',tests[test]) # unittest.TextTestRunner(verbosity=1).run(suite) __root__ = os.path.join(os.path.dirname(os.path.abspath(__file__)),'..\\help') sys.path.append(__root__) # Implicit Root Import # from overlay import * # Test with/out __all__ defined # Explicit Root Import from uppercase import tiers # Explicit Nested import # from overlay.tiers import module_a # Explicit Nested import # from overlay.tiers.module_a import Alpha # print(Alpha()) # Explicit Staged import # from overlay import tiers # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # from tiers import module_a # logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key])) # logger.debug("\n".join(["{} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')])) # module_a.Alpha()
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/34/substitution.py
substitution.py
Import in 3.4 ============= During the migration from Python 3.3 to Python 3.4 a number of API calls were updated, deprecating most of ``imp`` library for ``importlib``. Inconveniently :func:`importlib.spec_from_module` (or :func:`importlib.spec_from_loader`, I can't remember) seems to have been excluded in this version of python. Instead it was included in the next version, Python 3.5. .. note :: API Changes Python now calls :func:`imp.find_loader`, which supercedes :func:`imp.find_module`, before calling :func:`imp.load_module`. This also saw the introduction of :func:`importlib.find_spec` which introduces the semaphore architecture. :func:`find_spec` ----------------- The aim of ``find_spec`` is to return a module specification. This may be done by calling `machinery.ModuleSpec` directly or by using one of the helper functions. The helper functions that are provided include ``util.spec_from_file`` and ``util.spec_from_loader``. The former is very strict about receiving a *file* name, *folder* names are not accepted. Specifically it will accept a packages' init file e.g. ``.\\PACKAGE\\__init__.py`` or module(s) e.g. ``.\\MODULE.py`` but not package directories e.g. ``.\\PACKAGE`` which excludes namespaced packages. The :func:`find_spec` function accepts both a module name and possibly a module path. The module path may be either relative or absolute, while the module path is always absolute. The :func:`util.resolve_name` function may be used to convert these arguments into a fully qualified module name (FQMN). .. topic :: Example : Standard :func:`find_spec` behaviour It is helpful to review how the built in ``find_spec`` responds for different package and module configurations. In both cases we are loading a package under the path ``E:\\Python\\overlay\\``. The first setup represented a traditional package, with an `__init__.py` file, the resulting spec included a loader and the location of this file. The submodule search location listed a single path, though one can supposedly extend this by appending paths to the `__path__` variable within the `__init__.py` file. :: ModuleSpec(name='overlay', loader=<_frozen_importlib.SourceFileLoader object at 0x0000000001283BA8>, origin='E:\\Python\\overlay\\__init__.py', submodule_search_locations=['E:\\Python\\overlay']) The second setup represented a NameSpaced package, it excluded the `__init__.py` file, resulting spec had no loader and listed it's origin as *namespace*. The submodule search location is now a ``_namespace`` object, which has list like properties but prevents popping. :: ModuleSpec(name='overlay', loader=None, origin='namespace', submodule_search_locations=_NamespacePath(['E:\\Python\\overlay'])) Had we been importing a module instead of a package then the `submodule_search_location` attribute would have been empty. One does not know how the other attributes would've differed.
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/34/import.rst
import.rst
==================== Apeman in Python 3.4 ==================== This implementation was really tricky. The python documentation at the time was all over the place and referenced three different import implementations. The final implementation is somewhat of a hack as a result. .. note :: There are currently four copies of the :file:`__34__.py` file. The :file:`__34__.original.py`, and it's successor :file:`__34__.substitution.py`, file exploit the :ref:`Apeman:Module Substitution` strategy, which fails for nested package structures. The :file:`__34__.replacement.py` file supersedes these two files and is considered the `better` implementation but relies upon :ref:`Apeman:Module Replacement`, but it too does not handle nesting. Finally :file:`__34__.py` goes full hog and hacks up and replaces the ``builtin.__import__`` method, since the previous cases could not successfully handle nested packages. .. figure :: ../figures/34/overlay.png :align: center :figwidth: 80% The Python 3.4 implementation hooks into the *Import* phase. Ideal Implemetation ------------------- .. todo :: Merge this with the :ref:`Observations:Isolation` section The ideal means of importing a module (See :ref:`Apeman:Ideal Import`) assumes that ``PACKAGE`` is an object (See :ref:`Observations:Isolation`) and that later imports would reference this object in some way. Python however performs each import in isolation passing strings and not the objects to subsequent imports (See `Import Isolation`). Is is possible to access these objects and make subsequent imports rely upon them but this in general not done (See `Import References`). Import ------ To perform an import one had to drop the underscores in the overlay and monkey patch ``__import__`` with OverlayImporter, a class, rather then a method. Finders ------- Loaders ------- The stock loaders provided by python do not seem to allow one to install a module under an alternate name. It is possible to load them under an alias however as in :: class IMPORTER : def load_module() : ... temp = LOADER.load_module() # Installs module in sys.modules under LOARDER.name sys.modules[ALIAS] = temp # Installs module in sys.modules under ALIAS This prevents one from doing things like ``sys.modules[ALIAS] = LOADER.load_module(ALIAS)``, which would be rather convenient. Actually this generates a ``KeyError`` since ``ALIAS`` and ``LOADER.name`` differ. The error results from ``LOADER.name`` not being registered within ``sys.modules`` and possibly relates to the built in importers failing to locate ``ALIAS`` within their scope. The current strategy, although rather radical, involves replacing the registered module with the overlay. This works because the overlay retains a reference to the original module while the main code pulls in the overlay. Implementation -------------- The layer manager in this case simply hooks into the import portion of the import mechanism as is shown in :ref:`import <fig:import>` .. automodule :: apeman.__34__ :members: :member-order: bysource
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/docs/34/apeman.rst
apeman.rst
import importlib import inspect import builtins import os from pathlib import Path # Debugging from pdb import set_trace as db from pprint import pprint modsep = '.' class OverlayImporter(object): def __init__(self, *args, name = None, path = None, root = None, _import_ = __import__, **kvps): super().__init__(*args, **kvps) self.mask = "{}" # Was "_{}_" self.name = name or inspect.getmodule(inspect.stack()[1][0]).__name__ self.root = Path(root or os.path.dirname(inspect.getmodule(inspect.stack()[1][0]).__file__)) self.mods = self.modules() # Substitutes Import Functionality builtins.__import__ = self self.imp = _import_ self.lom = [] def __call__(self, name, *args) : # # Hooks the import statement # # From the Python Docs we have the arguments for import as # follows : # # name - name - The module name to import # global - glb - The calling module's global scope # local - loc - The calling method's local scope, allows scoped imports # from - pks - Referred to as the from list # level - lvl - Indicates if the import is local or global # # Aaron Hall from Stack Overflow [1] describes the arguments # as follows for the import statement : # # name - The name of the package being imported # root - The in which one started # vars - The variables in the global use space # subs - The number of levels down that one is e.g. # # from .. import module is 2 sublevels down # # [1] http://stackoverflow.com/a/37308413/958580 # print(name if args else " - " + name, *[{arg['__name__']:[key for key in arg.keys()if not key.startswith("__") ]} if isinstance(arg, dict) else arg for arg in args]) if self.mapToTarget(name) in self.mods.keys() : if args[2] : # Treates thwe case that there is an import "from" list # temp = importlib.import_module(self.name + modsep + self.mapToTarget(name)) # This is a little black magic as we ignore the args and route through a different function temp = self.imp(self.name + modsep + self.mapToTarget(name), *args) # temp = self.imp(name, *args) else : temp = {name : importlib.import_module(self.name + modsep + self.mapToTarget(name)) for name in self.stack(name)} temp = temp[name.partition(modsep)[0]] # Complex key : temp[self.name + modsep + self.mapToTarget(name.partition(modsep)[0])] else : temp = self.imp(name, *args) # print(temp.__name__,temp.__package__,temp.__file__) # print(dir(temp)) return temp def mapToTarget(self, name) : """Maps request to the overlay module""" # Converts PACKAGE.MODULE to _PACKAGE_._MODULE_ (Originally I thought this mapped to overlay._PACKAGE_._MODULE_) return modsep.join([self.mask.format(part) for part in name.split(modsep)]) def modules(self) : """ Lists the overlays implemented within a directory """ ext = '.py' mod = lambda parts, ext : [part[:-len(ext)] if enum + 1 == len(parts) else part for enum, part in enumerate(parts)] lst = [(mod(file.relative_to(self.root).parts, ext), file) for file in self.root.rglob('*'+ext)] return {modsep.join(item[0][:-1]) if item[0][-1] == "__init__" else modsep.join(item[0]) : item[1] for item in lst} def stack(self, name) : parts = name.split('.') return [modsep.join(parts[:i]) for i in range(1,len(parts)+1)] if __name__ == "__main__" : import subprocess as su su.call(["C:\\Python\\64bit\\342\\python.exe","E:\\Python\\layman\\complex\\__main__.py"]) else : OverlayImporter()
ApeMan
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/_complex_/overlay/__init__.py
__init__.py
from collections import defaultdict from collections import Counter from itertools import izip import cPickle import logging import random import re NOTAG = '_N_' log = logging.getLogger('apertag') class Weight(object): def __init__(self): self.weight = 0.0 self.total = 0.0 self.iterations = 0 def __repr__(self): return '<Weight w: {:f} t: {:f} i: {:d}>'.format( self.weight, self.total, self.iterations) def defaultweights(): return defaultdict(Weight) class AveragedPerceptron(object): def __init__(self): self.weights = defaultdict(defaultweights) self.tags = set([NOTAG]) self.i = 0 def update(self, weights): """ Adjust the weights for non-zero feature-tag pairs In order to average the weights later, the sum of all feature weights accross all updates need to be stored. To avoid adding the current weight of every unchanged feature on every update, we record the update iteration when a weight actually changes, and the next time the weight is about to be changed (or when training is completed) the current weight multiplied by the number of unrecorded updates is added to the total before proceeding. """ self.i += 1 for (feature, tag), weight in weights.iteritems(): self.tags.add(tag) if weight: w = self.weights[feature][tag] # Update the weight sum with the last registered weight # for every iteration since it was updated w.total += (self.i - w.iterations) * w.weight # Update the weight and the total w.weight += weight w.total += w.weight # Store the update iteration w.iterations = self.i def score(self, features, tag): score = 0 for feature in features: if feature in self.weights: if tag in self.weights[feature]: score += self.weights[feature][tag].weight return score def average(self): """ Average the weights across all updates and reset totals """ for feature, tags in self.weights.iteritems(): for tag, w in tags.iteritems(): # Make sure all updates are accounted for in the total w.total += (self.i - w.iterations) * w.weight w.weight = w.total/self.i if self.i else 0 # Reset the feature so it's suitable for re-training. w.total = w.weight w.iterations = 0 class Tagger(object): """ A sequence labeler using an averaged perceptron model To avoid making assumptions about what kind of sequence data you are labeling, or the format of your features, the input to the tagger is simply sequences of feature value sets. Each set of values represent an observation to receive a tag. A feature value can be any python type, as long as it can be hashed, but it's important to note that the the values are used only in a binary fashion, i.e. either they exist in the context of the item being tagged or not; the nature of the value has no impact on the decision. A simple example illustrating an NP-chunker: >>> t = Tagger() >>> t.train([[['POS=DT','WRD=the'],['POS=NN','WRD=dog']]],[['NP-B','NP-I']]) >>> t.tag([['POS=DT','WRD=the'],['POS=NN','WRD=dog']]) ['NP-B', 'NP-I'] There is one crucial exception to all this featuratory freedom: Any features wishing to make use of the actual output tags need to signal this by formatting their value as a string with special tags that will be replaced by the corresponding tags from the current context during tagging. The tag format is "<Tn>", where n is the negative index of the tag relative to the current position. For example, if you are training a POS-tagger and you have a feature that looks at the current word and the previous output tag, and the current word is "dog", the feature could be encoded as "<T1>:dog". The tagger will expand this using its predicted label context into something like "DT:dog" (depending on your tag set and feature format, of course). An example illustrating a POS-tagger with output label features: >>> t = Tagger() >>> t.train([[['POS -1:<T-1>','W:the'],['POS -1:<T-1>','W:dog']]],[['DT','NN']]) >>> t.tag([['POS -1:<T-1>','W:the'],['POS -1:<T-1>','W:dog']]) ['DT', 'NN'] It is most likely a good idea to use this format for training as well, even though you (hopefully) have the output tags yourself at that point, to ensure the features are identical across training and tagging. If you don't require output tags for any of your features, you can slightly increase performance (especially for non-string features) by setting expand_features=False. """ def __init__(self, model=None, beam_size=3, iterations=10, expand_features=True): if isinstance(model, file): log.info('Loading model from {:s}'.format(model.name)) model = cPickle.load(model) self.model = model or AveragedPerceptron() if beam_size < 1: raise Exception('Beam must be >= 1') self.beam_size = beam_size self.iterations = iterations self.expand_features = expand_features self.tag_p = re.compile(r'<T-?(\d+)>') def _expanded_features(self, features, prev_tags): """ Generator that adds context tags to features A feature can be expanded with output tags from the current context using tags of the format <Tn>, where n is the negative index of the tag relative to the current position. Only negative indexes are allowed, but they may be specified signless. Example: >>> t = Tagger() >>> list(t._expanded_features(['POS -1:<T-1>','WRD=dog'],['VB','DT'])) ['POS -1:DT', 'WRD=dog'] >>> list(t._expanded_features(['POS-TRI:<T3>:<T2>:<T1>'],['VB','DT'])) ['POS-TRI:_N_:VB:DT'] """ for feature in features: try: matches = self.tag_p.finditer(feature) except TypeError: # Only string features can be expanded pass else: for m in matches: i = int(m.group(1)) try: tag = prev_tags[-i] except IndexError: tag = NOTAG feature = feature.replace(m.group(), tag) yield feature def _expanded_feature_seq(self, feature_seq, tags): """ Generator of expanded features for an entire sequence """ for i, features in enumerate(feature_seq): yield self._expanded_features(features, tags[:i]) def _bag_sequence(self, feature_seq, tag_seq): """ Return a counted set of feature-tag pairs for the given sequence Counted feature sets are used during training to determine which features differed between the training sequence and the predicted sequence, so their weights can be updated accordingly. Features requiring tag context are expanded using the provided tags first. """ if self.expand_features: feature_seq = self._expanded_feature_seq(feature_seq, tag_seq) return Counter((f,t) for fs, t in izip(feature_seq, tag_seq) for f in fs) def train(self, feature_seqs, tag_seqs, iterations=None): """ Set the model parameters and optimize the weights Arguments: feature_seqs -- A sequence of training sequences, each consisting of a sequence of feature value sequences. tag_seqs -- A sequence of tag sequences, providing the labels for the feature sequences. Example: >>> t = Tagger() >>> t.train([[['POS=DT','WRD=the'],['POS=NN','WRD=dog']]],[['NP-B','NP-I']]) Starting with an empty model, the trainer labels the supplied training data, evaluates the result and updates the model based on its mistakes. This process is repeated for a fixed number of iterations, then the feature weights are all averaged and the model is ready. """ iterations = iterations or self.iterations log.info('Reading training sequences') seqs = zip(feature_seqs, tag_seqs) num_seqs = len(seqs) log.info('Start training using {:d} sequences'.format(num_seqs)) for i in range(iterations): correct_seqs = 0 # Using the same order for each iteration is bad. random.shuffle(seqs) for seq_idx, (feature_seq, gold_tags) in enumerate(seqs): log.debug('Tagging sequence {:d} of {:d}'.format(seq_idx, num_seqs)) predicted_tags = self.tag(feature_seq) if predicted_tags != gold_tags: # If the predicted tag sequence is not correct, # the weights need to be adjusted. This is done # by generating a counted set (bag) of all # active feature-tag pairs for both the training # sequence and the predicted sequence, and then # subtract each pair count in the prediction bag # from the corresponding count in the training # bag. This way feature-tag pairs from the # training set that were missing in the prediction # set gets upweighted, and pairs from the # prediction set that were not in the training set # get downweighted. gold_bag = self._bag_sequence(feature_seq, gold_tags) # Recreate the expanded features used to predict this # particular sequence and bag them. Better to recreate # the expanded features once per sequence during # training than to add complexity to the tagger. prediction_bag = self._bag_sequence(feature_seq, predicted_tags) gold_bag.subtract(prediction_bag) self.model.update(gold_bag) else: correct_seqs += 1 log.info('Finished training iteration: %d Correct seqs: %d', i+1, correct_seqs) log.info('Averaging weights') self.model.average() log.info('Done training') def tag(self, feature_seq): paths = [(0,[])] for features in feature_seq: candidates = [] for path in paths: # Add local tag context from this path to features # that require it if self.expand_features: path_features = list(self._expanded_features(features, path[1])) else: path_features = features for tag in self.model.tags: score = path[0] + self.model.score(path_features, tag) candidates.append((score, path[1]+[tag])) # Prune the candidates candidates.sort(reverse=True) paths = candidates[:self.beam_size] paths.sort(reverse=True) return paths[0][1] def tag_(self, feature_seq): tags = [] for features in feature_seq: if self.expand_features: features = list(self._expanded_features(features, path[1])) tags.append(max([(self.model.score(features, tag), tag) for tag in self.model.tags])[1]) return tags def export_model(self, f): if isinstance(f, basestring): f = open(f, 'w') cPickle.dump(self.model, f, cPickle.HIGHEST_PROTOCOL) f.close() if __name__ == '__main__': import argparse from itertools import tee def read_sequences(f): """ Generate sequences as lists of columns from the input file """ sequence = [] for line in f: line = line.strip('\n') if not line and sequence: yield sequence sequence = [] else: sequence.append(line.split('\t')) def split_sequences(f): """ Create separate iterators for feature columns and tag column """ s1, s2 = tee(read_sequences(f)) features = [[row[:-1] for row in s] for s in s1] tags = [[row[-1] for row in s] for s in s2] return features, tags def train(args): t = Tagger(beam_size=args.beam_size, iterations=args.iterations) features, tags = split_sequences(args.training_sequences) t.train(features, tags) with args.model as f: t.export_model(args.model) def tag(args): t = Tagger(model=args.model, beam_size=args.beam_size) with args.tags as f: if args.eval: i,c = 0,0 seqs, tag_seqs = split_sequences(args.sequences) for seq, gold_tags in izip(seqs, tag_seqs): i += len(gold_tags) tags = t.tag(seq) c += sum(p == g for p,g in izip(tags, gold_tags)) f.write('\n'.join(tags) + '\n\n') print 'Accuracy: {:.2f}'.format(c/float(i)) else: for seq in read_sequences(args.sequences): tags = t.tag(seq) f.write('\n'.join(tags) + '\n\n') def test(args): log.setLevel(logging.CRITICAL) import doctest doctest.testmod() parser = argparse.ArgumentParser() # Logging options g = parser.add_mutually_exclusive_group() g.add_argument('-v', '--verbose', action='store_true', default=False, help='Log everything.') g.add_argument('-q', '--quiet', action='store_true', default=False, help='Log nothing.') subparsers = parser.add_subparsers() # Training options p = subparsers.add_parser('train') p.add_argument('training_sequences', type=argparse.FileType('r'), help='Read training sequences from this file. ' 'A sequences consists of lines of tab delimited feature ' 'columns with the output tag in the last column. ' 'Sequences are separated by double newlines. ' 'Use - for stdin.') p.add_argument('model', type=argparse.FileType('w'), help='Write the resulting model to this file. Use - for stdout.') p.add_argument('-b', '--beam-size', type=int, default=3, help='Number of best paths to keep at each prediction step.') p.add_argument('-i', '--iterations', type=int, default=10, help='Number iterations used when training.') p.set_defaults(func=train) # Tagging options p = subparsers.add_parser('tag') p.add_argument('model', type=argparse.FileType('r'), help='Read model from this file.') p.add_argument('sequences', type=argparse.FileType('r'), help='Read feature sequences from this file. ' 'A sequences consists of lines of tab delimited feature ' 'columns. Sequences are separated by double newlines. ' 'Use - for stdin.') p.add_argument('tags', type=argparse.FileType('w'), help='Write the resulting output sequences in a single column ' 'to this file. Use - for stdout.') p.add_argument('-b', '--beam-size', type=int, default=3, help='Number of best paths to keep at each prediction step') p.add_argument('-e', '--eval', action='store_true', help='Indicates that the input data is labeled. Uses last column to calculate accuracy.') p.set_defaults(func=tag) # Test options p = subparsers.add_parser('test') p.set_defaults(func=test) args = parser.parse_args() # Setup logging logging.basicConfig() if args.verbose: log.setLevel(logging.DEBUG) elif args.quiet: log.setLevel(logging.CRITICAL) else: log.setLevel(logging.INFO) args.func(args)
Apertag
/Apertag-1.0.2.linux-x86_64.tar.gz/usr/local/lib/python2.7/dist-packages/apertag.py
apertag.py
# ApexTrackerPy [![PyPI version](https://badge.fury.io/py/ApexTrackerPy.svg)](https://badge.fury.io/py/ApexTrackerPy) [![Build](https://github.com/nerrog/ApexTrackerPy/actions/workflows/Build.yml/badge.svg)](https://github.com/nerrog/ApexTrackerPy/actions/workflows/Build.yml) [![CodeQL](https://github.com/nerrog/ApexTrackerPy/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/nerrog/ApexTrackerPy/actions/workflows/codeql-analysis.yml) [![Downloads](https://pepy.tech/badge/apextrackerpy)](https://pepy.tech/project/apextrackerpy) [![Downloads](https://pepy.tech/badge/apextrackerpy/month)](https://pepy.tech/project/apextrackerpy) [![Downloads](https://pepy.tech/badge/apextrackerpy/week)](https://pepy.tech/project/apextrackerpy) Python library that calls the ApexLegends tracker API ApexLegendsトラッカーAPIを呼び出すpythonライブラリ # Install It will be installed by `pip3 install ApexTrackerPy`. You can also use git to install `pip3 install git+https://github.com/nerrog/ApexTrackerPy.git` `pip3 install ApexTrackerPy`でインストールされます gitを使用してインストールすることもできます `pip3 install git+https://github.com/nerrog/ApexTrackerPy.git` # usage [See the Github Wiki](https://github.com/nerrog/ApexTrackerPy/wiki) [Github Wiki](https://github.com/nerrog/ApexTrackerPy/wiki)を参照してください
ApexTrackerPy
/ApexTrackerPy-1.8.3.tar.gz/ApexTrackerPy-1.8.3/README.md
README.md
# Apfloat Apfloat is an arbitrary precision number library which supports numbers with any values and many operations with them. # Installation pip install Apfloat # Usage To use the library install it using the command shown in "Installation" section. Then, read the instructions below regarding how to use operations with Apfloat. ## Addition Addition of Apfloat objects can be implemented using the '+' operator. Input: Apfloat("5.12") + Apfloat("6.13") Output: Apfloat("11.25") ## Subtraction Subtraction of Apfloat objects can be implemented using the '-' operator. Input: Apfloat("7.22") - Apfloat("6.13") Output: Apfloat("1.09") ## Multiplication Multiplication of Apfloat objects can be implemented using the '*' operator. Input: Apfloat("5") * Apfloat("3") Output: Apfloat("15") ## Division Division of Apfloat objects can be implemented using the '/' operator. Input: Apfloat("5") / Apfloat("2") Output: Apfloat("2.5") ## Integer Division Integer Division of Apfloat objects can be implemented using the '/' operator. Input: Apfloat("5") // Apfloat("2") Output: Apfloat("2") ## Power Power of Apfloat objects can be implemented using the '**' operator. Input: Apfloat("5") ** Apfloat("2") Output: Apfloat("25") ## Modulo Modulo of Apfloat objects can be implemented using the '%' operator. Input: Apfloat("5") % Apfloat("2") Output: Apfloat("1") ## Convert to Integer Input: int(Apfloat("5.33")) Output: 5 ## Convert to Float Input: float(Apfloat("5.00")) Output: ~5.00 ## Convert to mpf Input: mpf(Apfloat("5.00")) Output: mpf("5.00") ## Square Input: Apfloat("3").squared() Output: Apfloat("9") ## Cube Input: Apfloat("3").cubed() Output: Apfloat("27") ## Trigonometric Functions sin, cos, tan, cosec, sec, and cot are usable trigonometric functions. They are called using the code with the format {trigonometric function name}(a big number object). For example, sin(Apfloat("0.5")) to get the value of sin(0.5). ## Hyperbolic Functions sinh, cosh, tanh, cosech, sech, and coth are usable hyperbolic functions. They are called using the code with the format {hyperbolic function name}(a big number object). For example, sinh(Apfloat("0.5")) to get the value of sinh(0.5). ## Factorial The function factorial(apfloat: Apfloat) will quickly get the factorial of any number. Input: factorial(Apfloat("6")) Output: Apfloat("720") ## Logarithms The logarithm of any number using any base can be quickly achieved by using the function log_base(apfloat: MPComplex, base: MPComplex or mpf or float or int) where apfloat is an MPComplex object and base is the base used for the logarithm operation. ## Square Root sqrt(apfloat: Apfloat) gets the square root of any number. Input: sqrt(Apfloat("81")) Output: Apfloat("9") ## Cube Root cbrt(apfloat: Apfloat) gets the cube root of any number. Input: cbrt(Apfloat("27")) Output: Apfloat("3") ## Checking for Prime Numbers is_prime(apfloat: Apfloat) checks whether a number is prime or not. Input: is_prime(Apfloat("7")) Output: True ## Getting GCD of Two Numbers gcd(a: Apfloat, b: Apfloat) gets the GCD of numbers a and b. Input: gcd(Apfloat("12"), Apfloat("8")) Output: Apfloat("4") ## Getting LCM of Two Numbers lcm(a: Apfloat, b: Apfloat) gets the LCM of numbers a and b. Input: lcm(Apfloat("6"), Apfloat("8")) Output: Apfloat("24")
Apfloat
/Apfloat-1.1.tar.gz/Apfloat-1.1/README.md
README.md
__author__ = "满眼乱世妖娆" __version__ = "0.0.1" try: from StringIO import StringIO except ImportError: from io import StringIO import unittest, time, sys, datetime from multiprocessing import Pool, Lock # from xml.sax import saxutils try: reload(sys) sys.setdefaultencoding('utf-8') except NameError: pass class OutputRedirector(object): """ Wrapper to redirect stdout or stderr """ def __init__(self, fp): self.fp = fp def write(self, s): self.fp.write(s) def writelines(self, lines): self.fp.writelines(lines) def flush(self): self.fp.flush() stdout_redirector = OutputRedirector(sys.stdout) stderr_redirector = OutputRedirector(sys.stderr) TestResult = unittest.TestResult class _TestResult(TestResult): # note: _TestResult is a pure representation of results. # It lacks the output and reporting ability compares to unittest._TextTestResult. def __init__(self, verbosity=1): TestResult.__init__(self) self.stdout0 = None self.stderr0 = None self.success_count = 0 self.failure_count = 0 self.error_count = 0 self.verbosity = verbosity # sdsdsdsdsdsdsdsdsdsds import io self.outputBuffer = io.StringIO() self.test_start_time = round(time.time(), 2) self.result = [] def startTest(self, test): TestResult.startTest(self, test) # just one buffer for both stdout and stderr self.outputBuffer = StringIO() stdout_redirector.fp = self.outputBuffer stderr_redirector.fp = self.outputBuffer self.stdout0 = sys.stdout self.stderr0 = sys.stderr sys.stdout = stdout_redirector sys.stderr = stderr_redirector def complete_output(self): """ Disconnect output redirection and return buffer. Safe to call multiple times. """ if self.stdout0: sys.stdout = self.stdout0 sys.stderr = self.stderr0 self.stdout0 = None self.stderr0 = None return self.outputBuffer.getvalue() def stopTest(self, test): # Usually one of addSuccess, addError or addFailure would have been called. # But there are some path in unittest that would bypass this. # We must disconnect stdout in stopTest(), which is guaranteed to be called. self.complete_output() def addSuccess(self, test): self.success_count += 1 TestResult.addSuccess(self, test) output = self.complete_output() self.result.append((0, test, output, '')) if self.verbosity > 1: # sys.stderr.write('ok') sys.stderr.write(str(test)) # sys.stderr.write('\n') else: # sys.stderr.write('.') pass def addError(self, test, err): self.error_count += 1 TestResult.addError(self, test, err) _, _exc_str = self.errors[-1] output = self.complete_output() self.result.append((2, test, output, _exc_str)) if self.verbosity > 1: # sys.stderr.write('E ') sys.stderr.write(str(test)) # sys.stderr.write('\n') else: # sys.stderr.write('E') pass def addFailure(self, test, err): self.failure_count += 1 TestResult.addFailure(self, test, err) _, _exc_str = self.failures[-1] output = self.complete_output() self.result.append((1, test, output, _exc_str)) if self.verbosity > 1: # sys.stderr.write('F ') sys.stderr.write(str(test)) sys.stderr.write('\n') else: # sys.stderr.write('F') pass class ReportSource(object): # 写头部 ReportHeader = """<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title>%(MultithreadingTestReport)s</title> <meta name="generator" content="HTMLTestReportCN 0.8.3"/> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> <link href="http://libs.baidu.com/bootstrap/3.0.3/css/bootstrap.min.css" rel="stylesheet"> <script src="http://libs.baidu.com/jquery/2.0.0/jquery.min.js"></script> <script src="http://libs.baidu.com/bootstrap/3.0.3/js/bootstrap.min.js"></script> <style type="text/css" media="screen">\n""" ReportHeaderCSS = """ body { font-family: Microsoft YaHei, Tahoma, arial, helvetica, sans-serif; padding: 20px; font-size: 100%; } table { font-size: 100%; } /* -- heading ---------------------------------------------------------------------- */ .heading { margin-top: 0ex; margin-bottom: 1ex; } .heading .description { margin-top: 4ex; margin-bottom: 6ex; } /* -- report ------------------------------------------------------------------------ */ #total_row { font-weight: bold; } .passCase { color: #5cb85c; } .failCase { color: #d9534f; font-weight: bold; } .errorCase { color: #f0ad4e; font-weight: bold; } .hiddenRow { display: none; } .testcase { margin-left: 2em; } </style> </head>\n""" ReportSummary = """<body> <div class='heading'> <h1 style="font-family: Microsoft YaHei">%(MultithreadingTestReport)s</h1> <p class='attribute'><strong>测试人员 : </strong> %(Tester)s </p> <p class='attribute'><strong>开始时间 : </strong> %(StartTime)s</p> <p class='attribute'><strong>合计耗时 : </strong> %(TotalTime)s</p> <p class='attribute'><strong>测试结果 : </strong> %(TestResult)s</p> <p class='description'></p> </div>\n""" ReportCaseSummary = """<p id='show_detail_line'> <a class="btn btn-primary" href='javascript:showCase(4)'> %(PassRate)s</a> <a class="btn btn-success" href='javascript:showCase(0)'> %(PassNum)s</a> <a class="btn btn-danger" href='javascript:showCase(2)'> %(fail)s</a> <a class="btn btn-info" href='javascript:showCase(3)'> %(CaseNum)s</a> </p> <table id='result_table' class="table table-condensed table-bordered table-hover"> <colgroup> <col align='left' /> <col align='right' /> <col align='right' /> <col align='right' /> <col align='right' /> </colgroup> <tr id='header_row' class="text-center active" style="font-weight: bold;font-size: 14px;"> <td>用例集/测试用例</td> <td>总计</td> <td>通过</td> <td>失败</td> <td>点击查看</td> </tr>\n""" # 写每一个case头部 ReportSuccessCaseSummary = """<tr class='success'> <td>%(TestName)s</td> <td class="text-center">%(Total)s</td> <td class="text-center">%(Pass)s</td> <td class="text-center">%(Fail)s</td> <td class="text-center"><a href="javascript:showClassDetail('%(CaseSequence)s',%(CountCase)s)" class="detail" id='%(CaseSequence)s'>点击查看</a></td> </tr>\n""" ReportFailCaseSummary = """<tr class='warning'> <td>%(TestName)s</td> <td class="text-center">%(Total)s</td> <td class="text-center">%(Pass)s</td> <td class="text-center">%(Fail)s</td> <td class="text-center"><a href="javascript:showClassDetail('%(CaseSequence)s',%(CountCase)s)" class="detail" id='%(CaseSequence)s'>点击查看</a></td> </tr>\n""" ReportSuccessCases = f"""<tr id='pt%(DivNumber)s' class='hiddenRow'> <td class='passCase'><div class='testcase'>%(CaseName)s</div></td> <td colspan='5' align='center'> <button id='btn_pt1_1' type="button" class="btn-xs btn btn-success" data-toggle="collapse" data-target='#div_pt%(DivNumber)s'>通过</button> <div id='div_pt%(DivNumber)s' class="collapse in"> <pre> %(Detail)s </pre> </div> </td> </tr>\n""" ReportFailureCases = f"""<tr id='et%(DivNumber)s' class='none'> <td class='errorCase'><div class='testcase'>%(CaseName)s</div></td> <td colspan='5' align='center'> <button id='btn_et1_1' type="button" class="btn-xs" data-toggle="collapse" data-target='#div_et%(DivNumber)s'>失败</button> <div id='div_et%(DivNumber)s' class="collapse in"> <pre> %(Detail)s </pre> </div> </td> </tr>\n""" ReportFloatNum = """<tr id='total_row' class="text-center info"> <td>总计</td> <td>%(CaseNum)s</td> <td>%(PassNum)s</td> <td>%(fail)s</td> <td>%(PassRate)s</td> </tr> </table>\n""" ReportJSCode = """<div style=" position:fixed;right:50px; bottom:30px; width:20px; height:20px;cursor:pointer"> <a href="#"><span class="glyphicon glyphicon-eject" style="font-size:30px;" aria-hidden="true"> </span></a></div> <script language="javascript" type="text/javascript"> output_list = Array(); $("button[id^='btn_pt']").addClass("btn btn-success"); $("button[id^='btn_ft']").addClass("btn btn-danger"); $("button[id^='btn_et']").addClass("btn btn-warning"); /*level 增加分类并调整,增加error按钮事件 --Findyou v0.8.2.3 0:Pass //pt none, ft hiddenRow, et hiddenRow 1:Failed //pt hiddenRow, ft none, et hiddenRow 2:Error //pt hiddenRow, ft hiddenRow, et none 3:All //pt none, ft none, et none 4:Summary //all hiddenRow */ //add Error button event --Findyou v0.8.2.3 function showCase(level) { trs = document.getElementsByTagName("tr"); for (var i = 0; i < trs.length; i++) { tr = trs[i]; id = tr.id; if (id.substr(0, 2) == 'ft') { if (level == 0 || level == 2 || level == 4) { tr.className = 'hiddenRow'; } else { tr.className = ''; } } if (id.substr(0, 2) == 'pt') { if (level == 1 || level == 2 || level == 4) { tr.className = 'hiddenRow'; } else { tr.className = ''; } } if (id.substr(0, 2) == 'et') { if (level == 0 || level == 1 || level == 4) { tr.className = 'hiddenRow'; } else { tr.className = ''; } } } //加入【详细】切换文字变化 --Findyou detail_class = document.getElementsByClassName('detail'); //console.log(detail_class.length) if (level == 3) { for (var i = 0; i < detail_class.length; i++) { detail_class[i].innerHTML = "点击收起" } } else { for (var i = 0; i < detail_class.length; i++) { detail_class[i].innerHTML = "点击查看" } } } //add Error button event --Findyou v0.8.2.3 function showClassDetail(cid, count) { var id_list = Array(count); var toHide = 1; for (var i = 0; i < count; i++) { tid0 = 't' + cid.substr(1) + '_' + (i + 1); tid = 'f' + tid0; tr = document.getElementById(tid); if (!tr) { tid = 'p' + tid0; tr = document.getElementById(tid); } if (!tr) { tid = 'e' + tid0; tr = document.getElementById(tid); } id_list[i] = tid; if (tr.className) { toHide = 0; } } for (var i = 0; i < count; i++) { tid = id_list[i]; //修改点击无法收起的BUG,加入【详细】切换文字变化 --Findyou if (toHide) { document.getElementById(tid).className = 'hiddenRow'; document.getElementById(cid).innerText = "点击查看" } else { document.getElementById(tid).className = ''; document.getElementById(cid).innerText = "点击收起" } } } function html_escape(s) { s = s.replace(/&/g, '&amp;'); s = s.replace(/</g, '&lt;'); s = s.replace(/>/g, '&gt;'); return s; } </script> </body> </html>\n""" """ 霸气的包名:Api_BiuBiu stream:报告路径,str属性,不要用 with open verbosity:报告等级,命令行展示内容我清空了。此参数用处不大 title:报告title executor:执行人名字 """ class Api_BiuBiu(ReportSource): def __init__(self, stream="测试报告.html", verbosity=1, title=None, executor=None): self.stream = stream self.verbosity = verbosity self.title = title if title else "多线程_测试报告" self.executor = str(executor) if executor else "外星人" self.startTime = str(datetime.datetime.now()) self.Test_consuming = 0 self.EveryCaseResults = {} self.ReportInformation = {} self.caseInformation = {} self.bottom = {} """ 根据是否有addTest属性判断是TestSuite还是class 然后启动多进程,列表多长启动多少进程,为了快 因为python的PLI全局解释锁,所以用进程,不用线程 """ def RunEveryCase(self, test): if not isinstance(test, list): print("\n为啥不传给我列表 ? 淘气 ~~~") exit(1) if len(test) <= 0: print("\n进程都准备好了你给我空列表 ? 不乖~~~") exit(1) work_list = [] pool = Pool(processes=len(test)) for i in test: if hasattr(i, 'addTest'): work_list.append(pool.apply_async(func=self.WorkingProcess, args=(i,))) else: suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(i)) work_list.append(pool.apply_async(func=self.WorkingProcess, args=(suite,))) pool.close() pool.join() for n in work_list: self.ClassIfication(n.get()) self.GenerateReport() """ 结果归类方便写入html """ def ClassIfication(self, Result): self.Test_consuming += Result[0] Sign = None for case in Result[1]: CaseName = str(case[1]).split()[1][1:-1] if not self.EveryCaseResults.get(CaseName): self.EveryCaseResults[CaseName] = [] Sign = CaseName self.EveryCaseResults[CaseName].append(case) else: if Sign == CaseName: self.EveryCaseResults[CaseName].append(case) else: self.EveryCaseResults[CaseName] = [] Sign = CaseName self.EveryCaseResults[CaseName].append(case) """ 这里是核心的多进程执行方法 把执行结果返回给主进程进行统计 """ def WorkingProcess(self, test): start_time = datetime.datetime.now() result = _TestResult(self.verbosity) test(result) end_time = datetime.datetime.now() return (end_time - start_time).seconds, result.result def Statistics(self): fail = 0 success = 0 for k, y in self.EveryCaseResults.items(): for i in y: if i[0] == 0: success += 1 else: fail += 1 CaseNum = sum([success, fail]) PassRate = round((success / sum([success, fail])) * 100, 2) TestResult = f"共{CaseNum},通过{success},错误{fail},通过率 = {PassRate} %" ReportInformation = {"Tester": self.executor, "StartTime": self.startTime, "TotalTime": f"{self.Test_consuming} 秒 (每个进程时间的和)", "TestResult": TestResult,"MultithreadingTestReport":self.title} caseInformation = {"CaseNum": f"所有数量: {CaseNum}", "PassNum": f"通过数量: {success}", "fail": f"失败数量: {fail}", "PassRate": f"通过率: {PassRate}%"} bottom = {"CaseNum": f"{CaseNum}", "PassNum": f"{success}", "fail": f"{fail}", "PassRate": f"{PassRate}%"} self.ReportInformation = ReportInformation self.caseInformation = caseInformation self.bottom = bottom def PassRateOfTest(self, OneTest): Pass = 0 Fail = 0 for i in OneTest: if i[0] == 0: Pass += 1 else: Fail += 1 PassOrNot = r"warning" if Fail > 0 else r"success" Information = {"Total": f"{sum([Pass, Fail])}", "Pass": f"{Pass}", "Fail": f"{Fail}", "CountCase": f"{len(OneTest)}", "PassOrFail": PassOrNot} return Information def GenerateReport(self): self.Statistics() SuiteNmuber = 0 with open(file=self.stream, mode="w", encoding="utf-8")as f: f.write(self.ReportHeader % {"MultithreadingTestReport":self.title}) f.write(self.ReportHeaderCSS) f.write(self.ReportSummary % self.ReportInformation) f.write(self.ReportCaseSummary % self.caseInformation) for k, y in self.EveryCaseResults.items(): SuiteNmuber += 1 CaseNumber = 0 OneCase = self.PassRateOfTest(y) OneCase["TestName"] = k OneCase["CaseSequence"] = f"c{SuiteNmuber}" if OneCase["PassOrFail"] == "success": del OneCase["PassOrFail"] f.write(self.ReportSuccessCaseSummary % OneCase) else: del OneCase["PassOrFail"] f.write(self.ReportFailCaseSummary % OneCase) for i in y: CaseNumber += 1 CaseDetail = {"DivNumber": f"{SuiteNmuber}_{CaseNumber}", "CaseName": str(i[1]).split()[0], "Detail": f"{i[2]}\n{i[3]}"} if i[0] == 0: f.write(self.ReportSuccessCases % CaseDetail) else: f.write(self.ReportFailureCases % CaseDetail) f.write(self.ReportFloatNum % self.bottom) f.write(self.ReportJSCode) print("\n-----------") print('报告生成成功')
ApiBiuBiu
/ApiBiuBiu-0.0.2-py3-none-any.whl/api_biubiu/api_biubiu.py
api_biubiu.py
Changelog ========= 1.3 (2016-05-08) ---------------- * Small fixes 1.2 (2014-01-22) ---------------- * `Feature #17 <https://github.com/SolutionsCloud/apidoc/issues/17>`_ Merge ApiDoc's binaries * `Feature #18 <https://github.com/SolutionsCloud/apidoc/issues/18>`_ Merge cli arguments -f and -d in one argument -i (input) * `Feature #21 <https://github.com/SolutionsCloud/apidoc/issues/21>`_ Add patternProperties and additionalProperties in objects * `Feature #22 <https://github.com/SolutionsCloud/apidoc/issues/22>`_ "properties" and "items" are optionals * `Feature #23 <https://github.com/SolutionsCloud/apidoc/issues/23>`_ The property "type" is optional * `Bug #26 <https://github.com/SolutionsCloud/apidoc/issues/26>`_ Searchbox hide wrongly some elements * `Bug #27 <https://github.com/SolutionsCloud/apidoc/issues/27>`_ Keyboard shortcut for diff on search's filtered items * `Feature #32 <https://github.com/SolutionsCloud/apidoc/issues/32>`_ Enhances the theme of the readthedocs 1.1 (2013-08-28) ---------------- * `Feature #6 <https://github.com/SolutionsCloud/apidoc/issues/6>`_ Reduce resources size * `Feature #8 <https://github.com/SolutionsCloud/apidoc/issues/8>`_ DiffMode on small devices * `Feature #9 <https://github.com/SolutionsCloud/apidoc/issues/9>`_ Link to the top * `Bug #10 <https://github.com/SolutionsCloud/apidoc/issues/10>`_ The keyword optional override cod * `Feature #11 <https://github.com/SolutionsCloud/apidoc/issues/11>`_ Inprove detected types * `Feature #12 <https://github.com/SolutionsCloud/apidoc/issues/12>`_ Add rules on properties * `Feature #13 <https://github.com/SolutionsCloud/apidoc/issues/13>`_ Add PATCH and OPTION method * `Feature #14 <https://github.com/SolutionsCloud/apidoc/issues/14>`_ Replace shortcup popin by bootstrap modal * `Feature #16 <https://github.com/SolutionsCloud/apidoc/issues/16>`_ Upgrade to Bootstrap 3 * `Bug #19 <https://github.com/SolutionsCloud/apidoc/issues/19>`_ Add constraints in sampled objects * `Bug #31 <https://github.com/SolutionsCloud/apidoc/issues/31>`_ Error "TemplateNotFound: default.html" in version 1.1 1.0 (2013-07-23) ---------------- * Initial release
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/CHANGES.rst
CHANGES.rst
ApiDoc ====== .. image:: https://travis-ci.org/SolutionsCloud/apidoc.png?branch=master :target: https://travis-ci.org/SolutionsCloud/apidoc .. image:: https://coveralls.io/repos/SolutionsCloud/apidoc/badge.png :target: https://coveralls.io/r/SolutionsCloud/apidoc .. image:: https://pypip.in/v/ApiDoc/badge.png :target: https://preview-pypi.python.org/project/ApiDoc .. image:: https://pypip.in/d/ApiDoc/badge.png :target: https://preview-pypi.python.org/project/ApiDoc .. image:: https://requires.io/github/SolutionsCloud/apidoc/requirements.png?branch=develop :target: https://requires.io/github/SolutionsCloud/apidoc/requirements/?branch=develop Summary ------- `ApiDoc <http://solutionscloud.github.io/apidoc>`_ is a documentation generator designed for API built with Python and given by `SFR Business Team <http://www.sfrbusinessteam.fr>`_. .. image:: https://raw.github.com/SolutionsCloud/apidoc/master/docs/source/_static/screenshot_sample.png * Demo: http://solutionscloud.github.io/apidoc/demo * Home Page: http://solutionscloud.github.io/apidoc * Documentation: http://apidoc.rtfd.org * Bug Tracker: https://github.com/SolutionsCloud/apidoc/issues * GitHub: https://github.com/SolutionsCloud/apidoc * PyPI: https://preview-pypi.python.org/project/ApiDoc * License: GPLv3+ Installation ------------ The fastest way to get started is by using the command line tool .. code-block:: console $ sudo apt-get install python3-pip $ pip3 install apidoc If the package python3-pip does not exists .. code-block:: console $ sudo apt-get install python3-setuptools $ sudo easy_install3 pip $ sudo pip3-2 install apidoc Try it ------ You can download a sample file and try to render it documentation .. code-block:: console $ mkdir apidoc $ cd apidoc $ wget https://raw.github.com/SolutionsCloud/apidoc/master/example/demo/source.yml $ apidoc -i source.yml -o output/index.html $ firefox output/index.html
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/README.rst
README.rst
import os import sys import re import json from distutils.cmd import Command from setuptools.command.test import test def read_requirements(file_name): return [i.strip() for i in open( os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "requirements", "%s" % file_name)) ).readlines() if len(i.strip()) > 0] def patch_requirements(requirements, file_name): modified_requirements = read_requirements(file_name) modified_packages = [re.split('[=><]+', x)[0] for x in modified_requirements] patched_requirements = modified_requirements return patched_requirements + [x for x in requirements if re.split('[=><]+', x)[0] not in modified_packages] class ApiDocTest(test): def finalize_options(self): test.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import pytest errno = pytest.main(self.test_args) sys.exit(errno) class Resource(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): resource_dir = os.path.realpath( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'apidoc', 'template', 'resource' ) ) resource_src_dir = os.path.realpath(os.path.join(resource_dir, 'src')) resource_src_js_dir = os.path.realpath(os.path.join(resource_src_dir, 'js')) resource_src_css_dir = os.path.realpath(os.path.join(resource_src_dir, 'css')) resource_src_less_dir = os.path.realpath(os.path.join(resource_src_dir, 'less')) resource_js_dir = os.path.realpath(os.path.join(resource_dir, 'js')) resource_css_dir = os.path.realpath(os.path.join(resource_dir, 'css')) try: os.system('wget -O "%s" "%s"' % ('/tmp/bootstrap-raw', 'http://getbootstrap.com/assets/js/raw-files.js')) assert os.path.exists('/tmp/bootstrap-raw'), 'Downloaded bootstrap zip not found' with open('/tmp/bootstrap-raw') as f: js_raw = json.loads(f.readline()[11:]) less_raw = json.loads(f.readline()[13:]) js_full = '' for js_file in ["transition.js", "modal.js", "scrollspy.js", "tooltip.js", "popover.js", "affix.js"]: js_full += js_raw[js_file] with open('%s/bootstrap.js' % resource_src_js_dir, 'w') as f: f.write(js_full) if not os.path.exists('%s/bootstrap' % resource_src_less_dir): os.mkdir('%s/bootstrap' % resource_src_less_dir) less_full = '' for less_file in less_raw.keys(): with open('%s/bootstrap/%s' % (resource_src_less_dir, less_file), 'w') as f: f.write(less_raw[less_file]) for less_file in ['variables.less', 'mixins.less', 'type.less', 'buttons.less', 'button-groups.less', 'theme.less', 'scaffolding.less', 'code.less', 'grid.less', 'utilities.less', 'normalize.less', 'component-animations.less', 'popovers.less', 'navbar.less', 'responsive-utilities.less', 'jumbotron.less', 'tooltip.less', 'tables.less', 'wells.less', 'forms.less', 'print.less', 'navs.less', 'modals.less', 'close.less']: less_full += '@import "bootstrap/%s";\n' % less_file less_full += '@import "variables.less";\n' with open('%s/bootstrap.less' % resource_src_less_dir, 'w') as f: f.write(less_full) os.system('lessc %s %s' % ('%s/bootstrap.less' % resource_src_less_dir, '%s/bootstrap.css' % resource_src_css_dir)) finally: os.remove('/tmp/bootstrap-raw') pass os.system('wget -O "%s" "%s"' % ('%s/jquery.min.js' % resource_src_js_dir, 'http://code.jquery.com/jquery-2.0.3.min.js')) assert os.path.exists('%s/jquery.min.js' % resource_src_js_dir), 'Downloaded jquery file not found' os.system('wget -O "%s" "%s"' % ('%s/mousetrap.min.js' % resource_src_js_dir, 'http://cdn.craig.is/js/mousetrap/mousetrap.min.js')) assert os.path.exists('%s/mousetrap.min.js' % resource_src_js_dir), 'Downloaded mousetrap file not found' os.system('lessc -x "%s/apidoc.less" "%s/apidoc.css"' % (resource_src_less_dir, resource_src_css_dir)) for folder in [resource_css_dir, resource_js_dir]: if not os.path.exists(folder): os.makedirs(folder) self._compress("css", ["%s/bootstrap.css" % resource_src_css_dir, "%s/apidoc.css" % resource_src_css_dir, "%s/font.css" % resource_css_dir], "%s/combined.css" % resource_css_dir) assert os.path.exists('%s/combined.css' % resource_css_dir), 'Combined css file not found' self._compress("css", ["%s/bootstrap.css" % resource_src_css_dir, "%s/apidoc.css" % resource_src_css_dir, "%s/font-embedded.css" % resource_css_dir], "%s/combined-embedded.css" % resource_css_dir) assert os.path.exists('%s/combined-embedded.css' % resource_css_dir), 'Combined embedded css file not found' self._compress("js", ["%s/jquery.min.js" % resource_src_js_dir, "%s/bootstrap.js" % resource_src_js_dir, "%s/mousetrap.min.js" % resource_src_js_dir, "%s/apidoc.js" % resource_src_js_dir], "%s/combined.js" % resource_js_dir) assert os.path.exists('%s/combined.js' % resource_js_dir), 'Combined js file not found' def _merge_files(self, input_files, output_file): """Combine the input files to a big output file""" # we assume that all the input files have the same charset with open(output_file, mode='wb') as out: for input_file in input_files: out.write(open(input_file, mode='rb').read()) def _compress(self, format, input_files, output_file): import yuicompressor import tempfile handle, merged_filename = tempfile.mkstemp(prefix='minify') os.close(handle) try: self._merge_files(input_files, merged_filename) os.system('java -jar %s --type %s -o %s --charset utf-8 %s' % (yuicompressor.get_jar_filename(), format, output_file, merged_filename)) finally: os.remove(merged_filename)
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/setup_cmd/__init__.py
__init__.py
from apidoc.lib.util.enum import Enum from apidoc.object.source_raw import Object as ObjectRaw from apidoc.object.source_raw import Constraintable from apidoc.object import Comparable class Root(): """Root object of sources elements for templates """ def __init__(self): """Class instantiation """ super().__init__() self.configuration = None self.versions = [] self.method_categories = [] self.type_categories = [] class Element(): """Element """ def __init__(self, element): """Class instantiation """ self.name = element.name self.description = element.description class ElementVersioned(): """Element """ def __init__(self, element): """Class instantiation """ self.name = element.name self.description = [] class Version(Element, Comparable): """Element Version """ def __init__(self, version): """Class instantiation """ super().__init__(version) self.label = version.label self.uri = version.uri self.major = version.major self.minor = version.minor self.status = version.status def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (int(self.major), int(self.minor), str(self.label), str(self.name)) class Category(Element, Comparable): """Element Category """ def __init__(self, category): """Class instantiation """ super().__init__(category) self.label = category.label self.order = category.order def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (int(self.order), str(self.label), str(self.name)) class TypeCategory(Category): """Element TypeCategory """ def __init__(self, category): """Class instantiation """ super().__init__(category) self.types = [] class MethodCategory(Category): """Element MethodCategory """ def __init__(self, category): """Class instantiation """ super().__init__(category) self.methods = [] class Method(ElementVersioned, Comparable): def __init__(self, method): """Class instantiation """ super().__init__(method) self.label = method.label self.method = method.method self.code = [] self.full_uri = [] self.absolute_uri = [] self.changes_status = {} self.request_headers = [] self.request_parameters = [] self.request_body = [] self.response_codes = [] self.response_body = [] self.versions = [] self.samples = {} @property def request_uri_parameters(self): return [x for x in self.request_parameters if not x.value.is_query_string] @property def request_query_string_parameters(self): return [x for x in self.request_parameters if x.value.is_query_string] def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.label), str(self.name)) class MultiVersion(Comparable): class Changes(Enum): """List of availables Change for this element """ none = 1 new = 2 updated = 3 deleted = 4 def __init__(self, value, version): self.versions = [version] self.value = value def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (self.value, sorted(self.versions)) class Parameter(Element, Comparable): def __init__(self, parameter): """Class instantiation """ super().__init__(parameter) self.type = parameter.type self.optional = parameter.optional self.generic = parameter.generic self.is_internal = self.type in ObjectRaw.Types or self.type is ObjectRaw.Types.type def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (not self.generic, str(self.name), str(self.description)) class RequestParameter(Parameter): def __init__(self, parameter): """Class instantiation """ super().__init__(parameter) self.position = parameter.position @property def is_query_string(self): return self.position < 0 def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (0 if self.position >= 0 else 1, not self.generic, str(self.name), str(self.description)) def get_comparable_values_for_ordering(self): """Return a tupple of values representing the unicity of the object """ return (0 if self.position >= 0 else 1, int(self.position), str(self.name), str(self.description)) class ResponseCode(Element, Comparable): def __init__(self, parameter): """Class instantiation """ super().__init__(parameter) self.code = parameter.code self.message = parameter.message self.generic = parameter.generic def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (not self.generic, int(self.code), str(self.message), str(self.description)) class Type(ElementVersioned, Comparable): """Element Type """ def __init__(self, type): """Class instantiation """ super().__init__(type) self.name = type.name self.format = TypeFormat(type.format) self.changes_status = {} self.item = [] self.versions = [] self.samples = {} def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name)) class TypeFormat(): """Element Type """ def __init__(self, type_format): """Class instantiation """ self.sample = [] self.pretty = [] self.advanced = [] class Object(Element, Comparable): """Element Object """ @classmethod def factory(cls, object_source): """Return a proper object """ if object_source.type is ObjectRaw.Types.object: return ObjectObject(object_source) elif object_source.type not in ObjectRaw.Types or object_source.type is ObjectRaw.Types.type: return ObjectType(object_source) elif object_source.type is ObjectRaw.Types.array: return ObjectArray(object_source) elif object_source.type is ObjectRaw.Types.dynamic: return ObjectDynamic(object_source) elif object_source.type is ObjectRaw.Types.const: return ObjectConst(object_source) elif object_source.type is ObjectRaw.Types.enum: return ObjectEnum(object_source) else: return Object(object_source) def __init__(self, object): """Class instantiation """ super().__init__(object) self.type = object.type self.optional = object.optional if isinstance(object, Constraintable): self.constraints = object.constraints def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name), str(self.description), str(self.type), bool(self.optional), str(self.constraints) if isinstance(self, Constraintable) else "") class ObjectObject(Object): """Element ObjectObject """ def __init__(self, object): """Class instantiation """ super().__init__(object) self.properties = {} self.pattern_properties = {} self.additional_properties = [] class ObjectArray(Object): """Element ObjectArray """ def __init__(self, object): """Class instantiation """ super().__init__(object) self.items = None class ObjectDynamic(Object): """Element ObjectDynamic """ def __init__(self, object): """Class instantiation """ super().__init__(object) self.items = None class ObjectConst(Object): """Element ObjectConst """ def __init__(self, object): """Class instantiation """ super().__init__(object) self.const_type = object.const_type self.value = object.value class ObjectEnum(Object): """Element ObjectEnum """ def __init__(self, object): """Class instantiation """ super().__init__(object) self.values = [] self.descriptions = [] def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name), str(self.description), str(self.constraints)) class EnumValue(Object): """Element ObjectEnum """ def __init__(self, object): """Class instantiation """ super().__init__(object) class ObjectType(Object): """Element ObjectType """ def __init__(self, object): """Class instantiation """ super().__init__(object) self.type_name = object.type_name self.values = []
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/object/source_dto.py
source_dto.py
from apidoc.object.source_raw import Object as ObjectRaw from apidoc.object.source_raw import Constraintable from apidoc.object import Comparable class Type(): def __init__(self, type_raw): self.name = type_raw.name self.sample = type_raw.get_sample() self.pretty = type_raw.format.pretty self.advanced = type_raw.format.advanced self.item = Object.factory(type_raw.item) class Method(Comparable): def __init__(self, method_raw): self.name = method_raw.name self.method = method_raw.method self.code = method_raw.code self.message = method_raw.message self.full_uri = method_raw.full_uri self.absolute_uri = method_raw.absolute_uri self.request_headers = [Parameter(x) for x in method_raw.request_headers.values()] self.request_parameters = dict((name, Parameter(x)) for name, x in method_raw.request_parameters.items()) self.request_body = Object.factory(method_raw.request_body) self.response_body = Object.factory(method_raw.response_body) def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name)) class Parameter(Comparable): def __init__(self, parameter_raw): self.name = parameter_raw.name self.optional = parameter_raw.optional self.sample = parameter_raw.get_sample() self.position = parameter_raw.position @property def is_query_string(self): return self.position < 0 def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name), str(self.sample)) class Object(): @classmethod def factory(cls, object_raw): """Return a proper object """ if object_raw is None: return None if object_raw.type is ObjectRaw.Types.object: return ObjectObject(object_raw) elif object_raw.type is ObjectRaw.Types.type: return ObjectType(object_raw) elif object_raw.type is ObjectRaw.Types.array: return ObjectArray(object_raw) elif object_raw.type is ObjectRaw.Types.dynamic: return ObjectDynamic(object_raw) elif object_raw.type is ObjectRaw.Types.const: return ObjectConst(object_raw) elif object_raw.type is ObjectRaw.Types.enum: return ObjectEnum(object_raw) else: return Object(object_raw) def __init__(self, object_raw): self.name = object_raw.name self.type = object_raw.type self.optional = object_raw.optional self.sample = object_raw.get_sample() if isinstance(object_raw, Constraintable): self.constraints = object_raw.constraints else: self.constraints = {} class ObjectObject(Object): def __init__(self, object_raw): super().__init__(object_raw) self.properties = dict((name, Object.factory(x)) for name, x in sorted(object_raw.properties.items())) self.pattern_properties = dict((name, Object.factory(x)) for name, x in sorted(object_raw.pattern_properties.items())) self.additional_properties = None if object_raw.additional_properties is None else Object.factory(object_raw.additional_properties) class ObjectArray(Object): def __init__(self, object_raw): super().__init__(object_raw) self.items = Object.factory(object_raw.items) self.sample_count = object_raw.sample_count class ObjectDynamic(Object): def __init__(self, object_raw): super().__init__(object_raw) self.items = Object.factory(object_raw.items) class ObjectConst(Object): def __init__(self, object_raw): super().__init__(object_raw) self.const_type = object_raw.const_type self.value = object_raw.value class ObjectEnum(Object): def __init__(self, object_raw): super().__init__(object_raw) self.values = object_raw.values class ObjectType(Object): def __init__(self, object_raw): super().__init__(object_raw) if object_raw.type_object is not None: self.type_object = Object.factory(object_raw.type_object.item)
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/object/source_sample.py
source_sample.py
from apidoc.lib.util.enum import Enum from apidoc.object import Comparable class Root(): """Root object of sources elements """ def __init__(self): """Class instantiation """ super().__init__() self.configuration = Configuration() self.versions = {} self.categories = {} self.methods = {} self.types = {} self.references = {} class Element(): """Generic element """ def __init__(self): """Class instantiation """ super().__init__() self.name = None self.description = None class Sampleable(): """Element who can provide samples """ def __init__(self): """Class instantiation """ super().__init__() self.sample = None def get_sample(self): """Return the a sample for the element """ if self.sample is None: return self.get_default_sample() return self.sample def get_default_sample(self): """Return default value for the element """ return "my_%s" % self.name class Constraintable(): """Element who can provide constraints """ def __init__(self): """Class instantiation """ super().__init__() self.constraints = {} class Displayable(): """Element who can be displayed """ def __init__(self): """Class instantiation """ super().__init__() self.display = True self.label = "" class Configuration(Element): """Element Configuration """ def __init__(self): """Class instantiation """ super().__init__() self.uri = None self.title = None class Version(Element, Displayable, Comparable): """Element Version """ class Status(Enum): """List of availables Status for this element """ current = 1 beta = 2 deprecated = 3 draft = 4 def __init__(self): """Class instantiation """ super().__init__() self.uri = None self.full_uri = None self.major = 1 self.minor = 0 self.status = Version.Status("current") self.methods = {} self.types = {} self.references = {} def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (int(self.major), int(self.minor), str(self.name)) class Category(Element, Displayable): """Element Category """ def __init__(self, name): """Class instantiation """ super().__init__() self.name = name self.label = name self.order = 99 class Method(Element, Displayable, Comparable): """Element Method """ class Methods(Enum): """List of availables Methods for this element """ get = 1 post = 2 put = 3 delete = 4 head = 5 option = 6 patch = 7 @property def message(self): """Return default message for this element """ if self.code != 200: for code in self.response_codes: if code.code == self.code: return code.message raise ValueError("Unknown response code \"%s\" in \"%s\"." % (self.code, self.name)) return "OK" def __init__(self): """Class instantiation """ super().__init__() self.code = 200 self.uri = None self.absolute_uri = None self.full_uri = None self.category = None self.method = Method.Methods("get") self.request_headers = {} self.request_parameters = {} self.request_body = None self.response_codes = [] self.response_body = None def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name)) class Parameter(Element, Sampleable): """Element Parameter """ def __init__(self): """Class instantiation """ super().__init__() self.type = None self.optional = False self.generic = False self.type_object = None self.position = 0 def get_object(self): object = Object.factory(self.type, None) object.name = self.name return object def get_default_sample(self): """Return default value for the element """ if self.type not in Object.Types or self.type is Object.Types.type: return self.type_object.get_sample() else: return self.get_object().get_sample() class ResponseCode(Element): """Element ResponseCode """ def __init__(self): """Class instantiation """ super().__init__() self.code = 200 self.message = None self.generic = False class Type(Element, Comparable, Sampleable): """Element Type """ def __init__(self): """Class instantiation """ super().__init__() self.format = TypeFormat() self.category = None self.item = None def get_sample(self): """Return the a sample for the element """ if self.item is not None: return self.item.get_sample() else: return super().get_sample() def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name)) class TypeFormat(): """Element TypeFormat """ def __init__(self): """Class instantiation """ super().__init__() self.pretty = None self.advanced = None class Constraint(Comparable): """An oobject's constraint """ def __init__(self, name, constraint): """Class instantiation """ super().__init__() self.name = name self.constraint = constraint def __str__(self): return '%s: %s' % (self.name, str(self.constraint)) def __repr__(self): return "%s(%r)" % (self.__class__, self.__dict__) def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name)) class Object(Element, Sampleable): """Element Object """ class Types(Enum): """List of availables Types for this element """ object = 1 array = 2 number = 3 string = 4 boolean = 5 none = 6 reference = 7 type = 8 dynamic = 9 const = 10 enum = 11 integer = 12 any = 13 @classmethod def factory(cls, str_type, version): """Return a proper object """ type = Object.Types(str_type) if type is Object.Types.object: object = ObjectObject() elif type is Object.Types.array: object = ObjectArray() elif type is Object.Types.number: object = ObjectNumber() elif type is Object.Types.integer: object = ObjectInteger() elif type is Object.Types.string: object = ObjectString() elif type is Object.Types.boolean: object = ObjectBoolean() elif type is Object.Types.reference: object = ObjectReference() elif type is Object.Types.type: object = ObjectType() elif type is Object.Types.none: object = ObjectNone() elif type is Object.Types.dynamic: object = ObjectDynamic() elif type is Object.Types.const: object = ObjectConst() elif type is Object.Types.enum: object = ObjectEnum() else: object = Object() object.type = type object.version = version return object def __init__(self): """Class instantiation """ super().__init__() self.type = None self.optional = False class ObjectObject(Object, Constraintable): """Element ObjectObject """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("object") self.properties = {} self.pattern_properties = {} self.additional_properties = None class ObjectArray(Object, Constraintable): """Element ObjectArray """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("array") self.items = None self.sample_count = 2 class ObjectNumber(Object, Constraintable): """Element ObjectNumber """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("number") def get_default_sample(self): """Return default value for the element """ return '13.37' class ObjectInteger(Object, Constraintable): """Element ObjectInteger """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("integer") def get_default_sample(self): """Return default value for the element """ return '42' class ObjectString(Object, Constraintable): """Element ObjectString """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("string") class ObjectBoolean(Object, Constraintable): """Element ObjectBoolean """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("boolean") def get_default_sample(self): """Return default value for the element """ return True class ObjectNone(Object, Constraintable): """Element ObjectNone """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("none") class ObjectDynamic(Object, Constraintable): """Element ObjectDynamic """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("dynamic") self.items = None def get_default_sample(self): """Return default value for the element """ return { "key1": "my_%s" % self.name, "key2": "sample" } class ObjectConst(Object, Constraintable): """Element ObjectConst """ class Types(Enum): """List of availables Primaries for this element """ string = 1 boolean = 2 number = 3 integer = 4 def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("const") self.const_type = ObjectConst.Types.string self.value = None def get_default_sample(self): """Return default value for the element """ return self.value class ObjectEnum(Object, Constraintable): def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("enum") self.values = [] self.descriptions = [] def get_default_sample(self): """Return default value for the element """ if not self.values: return super().get_default_sample() return self.values[0] class EnumValue(Object, Comparable): def get_comparable_values(self): """Return a tupple of values representing the unicity of the object """ return (str(self.name), str(self.description)) class ObjectReference(Object): """Element ObjectReference """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("reference") self.reference_name = None class ObjectType(Object, Constraintable): """Element ObjectType """ def __init__(self): """Class instantiation """ super().__init__() self.type = Object.Types("type") self.type_name = None self.type_object = None def get_default_sample(self): """Return default value for the element """ if self.type_object is None: return super().get_default_sample() return self.type_object.get_sample()
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/object/source_raw.py
source_raw.py
import sys import os sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) import time import logging import logging.config import argparse from datetime import datetime from apidoc import __version__ from apidoc.lib.fswatcher.observer import Observer from apidoc.lib.fswatcher.callbackHandler import CallbackHandler from apidoc.lib.util.serialize import json_repr from apidoc.factory.config import Config as ConfigFactory from apidoc.factory.source import Source as SourceFactory from apidoc.factory.template import Template as TemplateFactory from apidoc.object.config import Config as ConfigObject from apidoc.service.source import Source as SourceService from apidoc.service.config import Config as ConfigService from apidoc.service.parser import Parser as FileParser class ApiDoc(): """Base command-line interface for ApiDoc """ dry_run = False watch = False traceback = False config = None def __init__(self): """Initialyze the command """ self.parser = argparse.ArgumentParser(description=self.__doc__) self.parser.add_argument( "-c", "--config", type=str, metavar="CONFIG", help="configuration file" ) self.parser.add_argument( "-i", "--input", nargs='+', type=str, metavar="DIRECTORY OR FILE", help="directories and/or files containing documentation\'s source files" ) self.parser.add_argument( "-o", "--output", type=str, metavar="FILE", help="rendered output file" ) self.parser.add_argument( "-v", "--version", action='version', version='%(prog)s ' + __version__ ) self.parser.add_argument( "-n", "--no-validate", action='store_const', const=True, help="disable validation" ) self.parser.add_argument( "-a", "--arguments", nargs='+', type=str, metavar="ARGUMENT", help="documentation\'s arguments arg1=value1 arg2=value2" ) self.parser.add_argument( "-y", "--dry-run", action='store_const', const=True, help="analyse config's and source's files without building the documentation" ) self.parser.add_argument( "-w", "--watch", action='store_const', const=True, help="re-render the documentation each time a source's file or a template's file changes" ) self.parser.add_argument( "-q", "--quiet", action='store_const', const=True, help="does not display logging information below warning level" ) self.parser.add_argument( "-qq", "--silence", action='store_const', const=True, help="does not display any logging information" ) self.parser.add_argument( "-t", "--traceback", action='store_const', const=True, help="display traceback when an exception raised" ) self.logger = logging.getLogger() def _init_config(self): """return command's configuration from call's arguments """ options = self.parser.parse_args() if options.config is None and options.input is None: self.parser.print_help() sys.exit(2) if options.config is not None: configFactory = ConfigFactory() config = configFactory.load_from_file(options.config) else: config = ConfigObject() if options.input is not None: config["input"]["locations"] = [str(x) for x in options.input] if options.arguments is not None: config["input"]["arguments"] = dict((x.partition("=")[0], x.partition("=")[2]) for x in options.arguments) if options.output is not None: config["output"]["location"] = options.output if options.no_validate is not None: config["input"]["validate"] = not options.no_validate if options.dry_run is not None: self.dry_run = options.dry_run if options.watch is not None: self.watch = options.watch if options.traceback is not None: self.traceback = options.traceback if options.quiet is not None: self.logger.setLevel(logging.WARNING) if options.silence is not None: logging.disable(logging.CRITICAL) configService = ConfigService() configService.validate(config) self.config = config """Build documentation from sources each time a source or template files is modified """ def main(self): """Run the command """ self._init_config() if self.dry_run: return self.run_dry_run() elif self.watch: return self.run_watch() else: return self.run_render() def _get_sources(self): now = datetime.now() try: sourceService = SourceService() sourceFactory = SourceFactory() sources = sourceFactory.create_from_config(self.config) sourceService.validate(sources) self.logger.debug("Parse sources in %s." % (datetime.now() - now)) except: if self.traceback: self.logger.exception("Failed to parse sources") else: self.logger.error("Failed to parse sources") raise return sources def _render_template(self, sources): now = datetime.now() try: templateFactory = TemplateFactory() template = templateFactory.create_from_config(self.config) template.render(sources, self.config) self.logger.debug("Render template in %s." % (datetime.now() - now)) except: if self.traceback: self.logger.exception("Failed to render template") else: self.logger.error("Failed to render template") raise def run_dry_run(self): try: sources = self._get_sources() except: pass print(json_repr(sources)) def run_render(self): try: sources = self._get_sources() self._render_template(sources) except: pass def run_watch(self): configService = ConfigService() self.logger.info("Start watching") self._watch_refresh_source(None) observer = Observer() template_handler = CallbackHandler(self._watch_refresh_template) source_handler = CallbackHandler(self._watch_refresh_source) template_path = os.path.dirname(configService.get_template_from_config(self.config)) observer.add_handler(template_path, template_handler) if (self.config["input"]["locations"] is not None): for location in self.config["input"]["locations"]: observer.add_handler(location, source_handler) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() def _watch_refresh_source(self, event): """Refresh sources then templates """ self.logger.info("Sources changed...") try: self.sources = self._get_sources() self._render_template(self.sources) except: pass def _watch_refresh_template(self, event): """Refresh template's contents """ self.logger.info("Template changed...") try: self._render_template(self.sources) except: pass def main(): """Main function to run command """ configParser = FileParser() logging.config.dictConfig( configParser.load_from_file(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'settings', 'logging.yml')) ) ApiDoc().main() if __name__ == '__main__': main()
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/command/run.py
run.py
from apidoc.object.source_raw import ResponseCode as ObjectResponseCode from apidoc.factory.source.element import Element as ElementFactory from apidoc.lib.util.cast import to_boolean class ResponseCode(ElementFactory): """ ResponseCode Factory """ default_messages = { 100: "Continue", 101: "Switching Protocols", 200: "OK", 201: "Created", 202: "Accepted", 203: "Non-Authoritative Information", 204: "No Content", 205: "Reset Content", 206: "Partial Content", 300: "Multiple Choices", 301: "Moved Permanently", 302: "Found", 303: "See Other", 304: "Not Modified", 305: "Use Proxy", 307: "Temporary Redirect", 400: "Bad Request", 401: "Unauthorized", 402: "Payment Required", 403: "Forbidden", 404: "Not Found", 405: "Method Not Allowed", 406: "Not Acceptable", 407: "Proxy Authentication Required", 408: "Request Time-out", 409: "Conflict", 410: "Gone", 411: "Length Required", 412: "Precondition Failed", 413: "Request Entity Too Large", 414: "Request-URI Too Large", 415: "Unsupported Media Type", 416: "Requested range not satisfiable", 417: "Expectation Failed", 500: "Internal Server Error", 501: "Not Implemented", 502: "Bad Gateway", 503: "Service Unavailable", 504: "Gateway Time-out", 505: "HTTP Version not supported" } def create_from_dictionary(self, datas): """Return a populated object ResponseCode from dictionary datas """ if "code" not in datas: raise ValueError("A response code must contain a code in \"%s\"." % repr(datas)) code = ObjectResponseCode() self.set_common_datas(code, str(datas["code"]), datas) code.code = int(datas["code"]) if "message" in datas: code.message = str(datas["message"]) elif code.code in self.default_messages.keys(): code.message = self.default_messages[code.code] if "generic" in datas: code.generic = to_boolean(datas["generic"]) return code
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/factory/source/responseCode.py
responseCode.py
from apidoc.object.source_dto import Root as ObjectRoot from apidoc.object.source_raw import Category from apidoc.object.source_raw import Object as ObjectRaw from apidoc.object.source_dto import Version from apidoc.object.source_dto import MethodCategory, TypeCategory from apidoc.object.source_dto import Method, Type from apidoc.object.source_dto import MultiVersion from apidoc.object.source_dto import Parameter, RequestParameter, ResponseCode from apidoc.object.source_dto import Object from apidoc.object.source_sample import Type as TypeSample from apidoc.object.source_sample import Method as MethodSample class RootDto(): """ Root Factory """ def create_from_root(self, root_source): """Return a populated Object Root from dictionnary datas """ root_dto = ObjectRoot() root_dto.configuration = root_source.configuration root_dto.versions = [Version(x) for x in root_source.versions.values()] for version in sorted(root_source.versions.values()): hydrator = Hydrator(version, root_source.versions, root_source.versions[version.name].types) for method in version.methods.values(): hydrator.hydrate_method(root_dto, root_source, method) for type in version.types.values(): hydrator.hydrate_type(root_dto, root_source, type) self.define_changes_status(root_dto) return root_dto def define_changes_status(self, root_dto): sorted_version = sorted(root_dto.versions) items = [] for category in root_dto.method_categories: items = items + category.methods for category in root_dto.type_categories: items = items + category.types for item in items: new = False for version in sorted_version: if version.name not in item.changes_status.keys(): if new: item.changes_status[version.name] = MultiVersion.Changes.deleted new = False else: item.changes_status[version.name] = MultiVersion.Changes.none else: if not new: item.changes_status[version.name] = MultiVersion.Changes.new new = True class Hydrator(): def __init__(self, version, versions, types): self.version_name = version.name self.versions = versions self.types = types def hydrate_method(self, root_dto, root_source, method): categories = dict((category.name, category) for category in root_dto.method_categories) if method.category not in categories.keys(): if method.category in root_source.categories.keys(): category = MethodCategory(root_source.categories[method.category]) else: category = MethodCategory(Category(method.category)) root_dto.method_categories.append(category) else: category = categories[method.category] methods = dict((method.name, method) for method in category.methods) if method.name in methods.keys(): method_dto = methods[method.name] method_dto.changes_status[self.version_name] = MultiVersion.Changes.none else: method_dto = Method(method) category.methods.append(method_dto) method_dto.changes_status[self.version_name] = MultiVersion.Changes.new method_dto.versions.append(self.version_name) for parameter in method.request_parameters.values(): parameter.position = method.full_uri.find("{%s}" % parameter.name) request_parameters = [RequestParameter(parameter) for parameter in method.request_parameters.values()] request_headers = [Parameter(parameter) for parameter in method.request_headers.values()] response_codes = [ResponseCode(parameter) for parameter in method.response_codes] changes = 0 changes += self.hydrate_value(method_dto.description, method.description) changes += self.hydrate_value(method_dto.full_uri, method.full_uri) changes += self.hydrate_value(method_dto.absolute_uri, method.absolute_uri) changes += self.hydrate_value(method_dto.code, method.code) changes += self.hydrate_list(method_dto.request_headers, sorted(request_headers)) changes += self.hydrate_list(method_dto.request_parameters, sorted(request_parameters)) changes += self.hydrate_list(method_dto.response_codes, sorted(response_codes)) changes += self.hydrate_object(method_dto.request_body, method.request_body) changes += self.hydrate_object(method_dto.response_body, method.response_body) if changes > 0 and method_dto.changes_status[self.version_name] is MultiVersion.Changes.none: method_dto.changes_status[self.version_name] = MultiVersion.Changes.updated method_dto.samples[self.version_name] = MethodSample(method) def hydrate_type(self, root_dto, root, type): categories = dict((category.name, category) for category in root_dto.type_categories) if type.category not in categories.keys(): if type.category in root.categories.keys(): category = TypeCategory(root.categories[type.category]) else: category = TypeCategory(Category(type.category)) root_dto.type_categories.append(category) else: category = categories[type.category] types = dict((type.name, type) for type in category.types) if type.name in types.keys(): type_dto = types[type.name] type_dto.changes_status[self.version_name] = MultiVersion.Changes.none else: type_dto = Type(type) category.types.append(type_dto) type_dto.changes_status[self.version_name] = MultiVersion.Changes.new type_dto.versions.append(self.version_name) changes = 0 changes += self.hydrate_value(type_dto.description, type.description) changes += self.hydrate_value(type_dto.format.pretty, type.format.pretty) changes += self.hydrate_value(type_dto.format.advanced, type.format.advanced) changes += self.hydrate_object(type_dto.item, type.item) if changes > 0 and type_dto.changes_status[self.version_name] is MultiVersion.Changes.none: type_dto.changes_status[self.version_name] = MultiVersion.Changes.updated type_dto.samples[self.version_name] = TypeSample(type) def hydrate_value(self, dto_value, source_value): if source_value is None: return 0 changes = 0 find = False for versioned_value in dto_value: if versioned_value.value == source_value: versioned_value.versions.append(self.version_name) find = True changes += 1 if self.has_changed(versioned_value) else 0 if not find: dto_value.append(MultiVersion(source_value, self.version_name)) return 1 return changes def hydrate_list(self, dto_list, source_list): changes = 0 for source_value in source_list: find = False for versioned_value in dto_list: if versioned_value.value == source_value: versioned_value.versions.append(self.version_name) find = True changes += 1 if self.has_changed(versioned_value) else 0 if not find: dto_list.append(MultiVersion(source_value, self.version_name)) changes += 1 return changes def hydrate_object(self, dto_object, source_object): if source_object is None: return 0 source_dto = Object.factory(source_object) changes = 0 find = None for versioned_value in dto_object: if versioned_value.value == source_dto: versioned_value.versions.append(self.version_name) find = versioned_value changes += 1 if self.has_changed(versioned_value) else 0 if find is None: changes += 1 if source_dto.type is ObjectRaw.Types.object: for (property_name, property_value) in source_object.properties.items(): source_dto.properties[property_name] = [] changes += self.hydrate_object(source_dto.properties[property_name], property_value) for (property_name, property_value) in source_object.pattern_properties.items(): source_dto.pattern_properties[property_name] = [] changes += self.hydrate_object(source_dto.pattern_properties[property_name], property_value) changes += self.hydrate_object(source_dto.additional_properties, source_object.additional_properties) elif source_dto.type is ObjectRaw.Types.array: source_dto.items = [] changes += self.hydrate_object(source_dto.items, source_object.items) elif source_dto.type is ObjectRaw.Types.dynamic: source_dto.items = [] changes += self.hydrate_object(source_dto.items, source_object.items) elif source_dto.type is ObjectRaw.Types.enum: source_dto.values = [] source_dto.descriptions = [] self.hydrate_list(source_dto.values, sorted(source_object.values)) self.hydrate_list(source_dto.descriptions, sorted(source_object.descriptions)) elif source_dto.type is ObjectRaw.Types.type: source_dto.type_object = source_object.type_object dto_object.append(MultiVersion(source_dto, self.version_name)) else: if source_dto.type is ObjectRaw.Types.object: for (property_name, property_value) in source_object.properties.items(): if find.value.type is ObjectRaw.Types.object and property_name in find.value.properties.keys(): changes += self.hydrate_object(find.value.properties[property_name], property_value) else: find.value.properties[property_name] = [] changes += self.hydrate_object(find.value.properties[property_name], property_value) for (property_name, property_value) in source_object.pattern_properties.items(): if find.value.type is ObjectRaw.Types.object and property_name in find.value.pattern_properties.keys(): changes += self.hydrate_object(find.value.pattern_properties[property_name], property_value) else: find.value.pattern_properties[property_name] = [] changes += self.hydrate_object(find.value.pattern_properties[property_name], property_value) changes += self.hydrate_object(find.value.additional_properties, source_object.additional_properties) elif source_dto.type is ObjectRaw.Types.array: changes += self.hydrate_object(find.value.items, source_object.items) elif source_dto.type is ObjectRaw.Types.dynamic: changes += self.hydrate_object(find.value.items, source_object.items) elif source_dto.type is ObjectRaw.Types.enum: changes += self.hydrate_list(find.value.values, source_object.values) changes += self.hydrate_list(find.value.descriptions, source_object.descriptions) return changes def has_changed(self, multi_version): previous_version = self.get_previous_version() if previous_version is None: return False return previous_version.name not in multi_version.versions def get_previous_version(self): previous = None for version in sorted(self.versions.values()): if version.name == self.version_name: return previous previous = version raise ValueError("Unable to find current version in Version list")
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/factory/source/rootDto.py
rootDto.py
from apidoc.object.source_raw import ObjectObject, ObjectArray, ObjectNumber, ObjectInteger, ObjectString, ObjectBoolean, ObjectReference, ObjectType, ObjectNone, ObjectDynamic, ObjectConst, ObjectEnum, EnumValue, Constraint, Constraintable from apidoc.object.source_raw import Object as ObjectRaw from apidoc.factory.source.element import Element as ElementFactory from apidoc.lib.util.cast import to_boolean class Object(ElementFactory): """ Object Factory """ def create_from_name_and_dictionary(self, name, datas): """Return a populated object Object from dictionary datas """ if "type" not in datas: str_type = "any" else: str_type = str(datas["type"]).lower() if str_type not in ObjectRaw.Types: type = ObjectRaw.Types("type") else: type = ObjectRaw.Types(str_type) if type is ObjectRaw.Types.object: object = ObjectObject() if "properties" in datas: object.properties = self.create_dictionary_of_element_from_dictionary("properties", datas) if "patternProperties" in datas: object.pattern_properties = self.create_dictionary_of_element_from_dictionary("patternProperties", datas) if "additionalProperties" in datas: if isinstance(datas["additionalProperties"], dict): object.additional_properties = self.create_from_name_and_dictionary("additionalProperties", datas["additionalProperties"]) elif not to_boolean(datas["additionalProperties"]): object.additional_properties = None else: raise ValueError("AdditionalProperties doe not allow empty value (yet)") elif type is ObjectRaw.Types.array: object = ObjectArray() if "items" in datas: object.items = self.create_from_name_and_dictionary("items", datas["items"]) else: object.items = ObjectObject() if "sample_count" in datas: object.sample_count = int(datas["sample_count"]) elif type is ObjectRaw.Types.number: object = ObjectNumber() elif type is ObjectRaw.Types.integer: object = ObjectInteger() elif type is ObjectRaw.Types.string: object = ObjectString() elif type is ObjectRaw.Types.boolean: object = ObjectBoolean() if "sample" in datas: object.sample = to_boolean(datas["sample"]) elif type is ObjectRaw.Types.reference: object = ObjectReference() if "reference" in datas: object.reference_name = str(datas["reference"]) elif type is ObjectRaw.Types.type: object = ObjectType() object.type_name = str(datas["type"]) elif type is ObjectRaw.Types.none: object = ObjectNone() elif type is ObjectRaw.Types.dynamic: object = ObjectDynamic() if "items" in datas: object.items = self.create_from_name_and_dictionary("items", datas["items"]) if "sample" in datas: if isinstance(datas["sample"], dict): object.sample = {} for k, v in datas["sample"].items(): object.sample[str(k)] = str(v) else: raise ValueError("A dictionnary is expected for dynamic\s object in \"%s\"" % name) elif type is ObjectRaw.Types.const: object = ObjectConst() if "const_type" in datas: const_type = str(datas["const_type"]) if const_type not in ObjectConst.Types: raise ValueError("Const type \"%s\" unknwon" % const_type) else: const_type = ObjectConst.Types.string object.const_type = const_type if "value" not in datas: raise ValueError("Missing const value") object.value = datas["value"] elif type is ObjectRaw.Types.enum: object = ObjectEnum() if "values" not in datas or not isinstance(datas['values'], list): raise ValueError("Missing enum values") object.values = [str(value) for value in datas["values"]] if "descriptions" in datas and isinstance(datas['descriptions'], dict): for (value_name, value_description) in datas["descriptions"].items(): value = EnumValue() value.name = value_name value.description = value_description object.descriptions.append(value) descriptions = [description.name for description in object.descriptions] for value_name in [x for x in object.values if x not in descriptions]: value = EnumValue() value.name = value_name object.descriptions.append(value) else: object = ObjectRaw() self.set_common_datas(object, name, datas) if isinstance(object, Constraintable): self.set_constraints(object, datas) object.type = type if "optional" in datas: object.optional = to_boolean(datas["optional"]) return object def set_constraints(self, object, datas): for option in ['maxItems', 'minItems', 'uniqueItems', 'maxLength', 'minLength', 'pattern', 'format', 'enum', 'default', 'multipleOf', 'maximum', 'exclusiveMaximum', 'minimum', 'exclusiveMinimum']: if option in datas: object.constraints[option] = Constraint(option, datas[option]) if 'constraints' in datas: for name, constraint in datas['constraints'].items(): object.constraints[name] = Constraint(name, constraint)
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/factory/source/object.py
object.py
import collections from apidoc.object.source_raw import Sampleable, Displayable from apidoc.lib.util.cast import to_boolean class Element(): """ Populate Helper Factory """ def set_common_datas(self, element, name, datas): """Populated common data for an element from dictionnary datas """ element.name = str(name) if "description" in datas: element.description = str(datas["description"]).strip() if isinstance(element, Sampleable) and element.sample is None and "sample" in datas: element.sample = str(datas["sample"]).strip() if isinstance(element, Displayable): if "display" in datas: element.display = to_boolean(datas["display"]) if "label" in datas: element.label = datas["label"] else: element.label = element.name def create_dictionary_of_element_from_dictionary(self, property_name, datas): """Populate a dictionary of elements """ response = {} if property_name in datas and datas[property_name] is not None and isinstance(datas[property_name], collections.Iterable): for key, value in datas[property_name].items(): response[key] = self.create_from_name_and_dictionary(key, value) return response def create_list_of_element_from_dictionary(self, property_name, datas): """Populate a list of elements """ response = [] if property_name in datas and datas[property_name] is not None and isinstance(datas[property_name], list): for value in datas[property_name]: response.append(self.create_from_dictionary(value)) return response def get_enum(self, property, enum, datas): """Factory enum type """ str_property = str(datas[property]).lower() if str_property not in enum: raise ValueError("Unknow enum \"%s\" for \"%s\"." % (str_property, property)) return enum(str_property)
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/factory/source/element.py
element.py
import os from copy import deepcopy from apidoc.service.validator import Validator from apidoc.service.parser import Parser from apidoc.service.merger import Merger from apidoc.service.extender import Extender from apidoc.factory.source.root import Root as RootFactory from apidoc.factory.source.rootDto import RootDto as RootDtoFactory from apidoc.object.source_raw import ObjectObject, Category from apidoc.lib.util.decorator import add_property @add_property("validator", Validator) @add_property("parser", Parser) @add_property("merger", Merger) @add_property("extender", Extender) @add_property("root_source_factory", RootFactory) @add_property("root_dto_factory", RootDtoFactory) class Source(): """Create source object """ extender_paths = ( "categories/?", "versions/?", "versions/?/methods/?", "versions/?/types/?", "versions/?/references/?", ) def create_from_config(self, config): """ Create a well populated Root object """ raw_sources = self.get_sources_from_config(config) sources = self.format_sources_from_config(raw_sources, config) if config["input"]["validate"]: self.validator.validate_sources(sources) root = self.root_source_factory.create_from_dictionary(sources) self.replace_references(root) self.add_missing_categories(root) self.hide_filtered_elements(root, config["filter"]) self.remove_hidden_elements(root) self.remove_unused_types(root) self.replace_types(root) return self.root_dto_factory.create_from_root(root) def format_sources_from_config(self, raw_sources, config): """ Create a well populated Root object """ merged_source = self.merger.merge_sources(raw_sources) merged_source = self.inject_arguments_in_sources(merged_source, config["input"]["arguments"]) return self.extender.extends(merged_source, paths=self.extender_paths) def get_sources_from_config(self, config): """Load a set of source's file defined in the config """ sources = [] if (config["input"]["locations"] is not None): for location in config["input"]["locations"]: if os.path.isdir(location): sources.extend(self.parser.load_all_from_directory(location)) else: sources.append(self.parser.load_from_file(location)) return sources def inject_arguments_in_sources(self, sources, arguments): """ replace arguments in sources """ if arguments is not None: for (argument, value) in arguments.items(): sources = self.replace_argument(sources, argument, value) return sources def replace_argument(self, element, argument, value): """Replace sources arguments by value injected in config """ if isinstance(element, list): return [self.replace_argument(x, argument, value) for x in element] elif isinstance(element, dict): return dict((x, self.replace_argument(y, argument, value)) for (x, y) in element.items()) elif isinstance(element, str): return element.replace("${%s}" % argument, value) else: return element def hide_filtered_elements(self, root, config_filter): """Remove filter defined in config """ if (config_filter["versions"]["includes"] is not None): for version in (version for version in root.versions.values() if version.name not in config_filter["versions"]["includes"]): version.display = False if (config_filter["versions"]["excludes"] is not None): for version in (version for version in root.versions.values() if version.name in config_filter["versions"]["excludes"]): version.display = False if (config_filter["categories"]["includes"] is not None): for category in (category for category in root.categories.values() if category.name not in config_filter["categories"]["includes"]): category.display = False if (config_filter["categories"]["excludes"] is not None): for category in (category for category in root.categories.values() if category.name in config_filter["categories"]["excludes"]): category.display = False def remove_unused_types(self, root): used_types = self.get_used_types(root) for version in root.versions.values(): version.types = dict((type_name, type_value) for type_name, type_value in version.types.items() if type_name in used_types) def remove_hidden_elements(self, root): """Remove elements marked a not to display """ root.versions = dict((x, y) for x, y in root.versions.items() if y.display) hidden_categories = [category.name for category in root.categories.values() if not category.display] for version in root.versions.values(): version.methods = dict((x, y) for x, y in version.methods.items() if y.display and y.category not in hidden_categories) def add_missing_categories(self, root): """Remove elements marked a not to display """ categories = [method.category for version in root.versions.values() for method in version.methods.values() if method.category not in root.categories.keys()] + \ [type.category for version in root.versions.values() for type in version.types.values() if type.category not in root.categories.keys()] for category_name in categories: root.categories[category_name] = Category(category_name) def replace_references(self, root): """Remove elements marked a not to display """ for version in root.versions.values(): for method in version.methods.values(): method.request_body = self.replace_references_in_object(method.request_body, version.references) method.response_body = self.replace_references_in_object(method.response_body, version.references) for type in version.types.values(): type.item = self.replace_references_in_object(type.item, version.references) def replace_types(self, root): """Remove elements marked a not to display """ for version in root.versions.values(): for method in version.methods.values(): method.request_body = self.replace_types_in_object(method.request_body, version.types) method.response_body = self.replace_types_in_object(method.response_body, version.types) for parameter in method.request_parameters.values(): self.replace_types_in_parameter(parameter, version.types) for parameter in method.request_headers.values(): self.replace_types_in_parameter(parameter, version.types) for type in version.types.values(): type.item = self.replace_types_in_object(type.item, version.types) def replace_references_in_object(self, object, references): """Remove elements marked a not to display """ if object is None: return object if object.type is ObjectObject.Types.reference: object = self.get_reference(object, references) self.replace_references_in_object(object, references) elif object.type is ObjectObject.Types.array: object.items = self.replace_references_in_object(object.items, references) elif object.type is ObjectObject.Types.dynamic: object.items = self.replace_references_in_object(object.items, references) elif object.type is ObjectObject.Types.object: for (property_name, property_value) in object.properties.items(): object.properties[property_name] = self.replace_references_in_object(property_value, references) for (property_name, property_value) in object.pattern_properties.items(): object.pattern_properties[property_name] = self.replace_references_in_object(property_value, references) if object.additional_properties: object.additional_properties = self.replace_references_in_object(object.additional_properties, references) return object def replace_types_in_object(self, object, types): """Remove elements marked a not to display """ if object is None: return object if object.type is ObjectObject.Types.type: if object.type_name not in types.keys(): raise ValueError("Type \"%s\" unknow" % object.type_name) object.type_object = types[object.type_name] elif object.type is ObjectObject.Types.array: object.items = self.replace_types_in_object(object.items, types) elif object.type is ObjectObject.Types.dynamic: object.items = self.replace_types_in_object(object.items, types) elif object.type is ObjectObject.Types.object: for (property_name, property_value) in object.properties.items(): object.properties[property_name] = self.replace_types_in_object(property_value, types) for (property_name, property_value) in object.pattern_properties.items(): object.pattern_properties[property_name] = self.replace_types_in_object(property_value, types) if object.additional_properties: object.additional_properties = self.replace_types_in_object(object.additional_properties, types) return object def replace_types_in_parameter(self, parameter, types): if parameter.type not in ObjectObject.Types: parameter.type_object = types[parameter.type] return parameter def get_used_types(self, root): types = [] for version in root.versions.values(): for method in version.methods.values(): types += self.get_used_types_in_object(method.request_body) types += self.get_used_types_in_object(method.response_body) types += [parameter.type for parameter in method.request_parameters.values() if parameter.type not in ObjectObject.Types] types += [parameter.type for parameter in method.request_headers.values() if parameter.type not in ObjectObject.Types] for type in version.types.values(): types += self.get_used_types_in_object(type.item) return list({}.fromkeys(types).keys()) def get_used_types_in_object(self, object): """Remove elements marked a not to display """ types = [] if object is None: return types if object.type is ObjectObject.Types.type: types += [object.type_name] elif object.type is ObjectObject.Types.array: types += self.get_used_types_in_object(object.items) elif object.type is ObjectObject.Types.dynamic: types += self.get_used_types_in_object(object.items) elif object.type is ObjectObject.Types.object: for property in object.properties.values(): types += self.get_used_types_in_object(property) for property in object.pattern_properties.values(): types += self.get_used_types_in_object(property) if object.additional_properties: types += self.get_used_types_in_object(object.additional_properties) return types def get_reference(self, object, references): reference = deepcopy(references[object.reference_name]) reference.name = object.name reference.optional = object.optional if object.description is not None: reference.description = object.description if reference.type is ObjectObject.Types.reference: return self.get_reference(reference, references) return reference
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/factory/source/__init__.py
__init__.py
from copy import deepcopy from apidoc.service.merger import Merger from apidoc.lib.util.decorator import add_property from apidoc.lib.util.cast import to_boolean @add_property("merger", Merger) class Extender(): """Provide tool to managed extentions """ def extends(self, datas, paths, separator="/", extends_key="extends", inherit_key="inherit", removed_key="removed"): """Extend a dict with configurations defined in path """ self.datas = deepcopy(datas) self.paths = paths self.separator = separator self.extends_key = extends_key self.inherit_key = inherit_key self.removed_key = removed_key for path in paths: self.current_path = path self.extend_path(self.datas, path) self.datas = self.delete_removed(self.datas) self.clean_tags(self.datas) return self.datas def extend_path(self, datas, path, keys=[]): """Extend a dict for a specific path """ (key, dot, next_path) = path.partition(self.separator) if key == "": self.apply_extends(datas, keys) return if key == "?": for (key_element, element) in datas.items(): self.extend_path(element, next_path, keys + [key_element]) elif key not in datas: return else: self.extend_path(datas[key], next_path, keys) def apply_extends(self, target, keys): """Apply a "extend" property in a dict """ if not isinstance(target, (list, dict)) or self.extends_key not in target or not target[self.extends_key]: return if not isinstance(target[self.extends_key], list): target[self.extends_key] = [target[self.extends_key]] while len(target[self.extends_key]) > 0: extends_path = target[self.extends_key][0] target[self.extends_key].remove(extends_path) extends_path_part = extends_path.split(self.separator) path = self.get_location_from_keys(keys[0:len(keys) - len(extends_path_part)] + extends_path_part) self.merge(target, path, keys) def get_location_from_keys(self, keys): """Return a location by replacing ? in path by keys """ full_extends_path_buffer = [] full_extends_path = [] key_index = 0 for key in self.current_path.split(self.separator): if key == "?": full_extends_path += full_extends_path_buffer + [keys[key_index]] full_extends_path_buffer = [] key_index += 1 else: full_extends_path_buffer.append(key) return self.separator.join([str(x) for x in full_extends_path]) def get_keys_from_location(self, location): """Return keys from a location """ keys = [] splited_location = location.split(self.separator) key_index = 0 for key in self.current_path.split(self.separator): if key == "?": keys.append(splited_location[key_index]) key_index += 1 return keys def merge(self, target_datas, extend_location, keys): """Merge the source datas (located by extend_location) with target_datas """ path = self.get_location_from_keys(keys) if extend_location == path: raise ValueError("Recucive inclusion in\"%s\"" % extend_location) source_datas = self.get_datas(extend_location) source_keys = self.get_keys_from_location(extend_location) self.apply_extends(source_datas, source_keys) self.merger.merge_extends(target_datas, deepcopy(source_datas), self.inherit_key) def get_datas(self, extend_location): """Retrieve datas from location """ root = self.datas for key in extend_location.split(self.separator): if key not in root: raise ValueError("Unable to find the key \"%s\" in \"%s\"" % (key, extend_location)) else: root = root[key] return root def delete_removed(self, datas): """Delete sub properties flagged as removed """ if isinstance(datas, dict): if self.removed_key in datas and to_boolean(datas[self.removed_key]): return None new_datas = {} for key in datas: cleaned = self.delete_removed(datas[key]) if cleaned is not None: new_datas[key] = cleaned return new_datas elif isinstance(datas, list): new_datas = [] for item in datas: cleaned = self.delete_removed(item) if cleaned is not None: new_datas.append(cleaned) return new_datas else: return datas def clean_tags(self, datas): """Remove temporary tags """ if isinstance(datas, dict): if self.removed_key in datas: del(datas[self.removed_key]) if self.inherit_key in datas: del(datas[self.inherit_key]) if self.extends_key in datas: del(datas[self.extends_key]) for key in datas: self.clean_tags(datas[key]) elif isinstance(datas, list): for item in datas: self.clean_tags(item)
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/service/extender.py
extender.py
import os import shutil import sys import logging class Template(): """Provide tool to managed templates """ def __init__(self): """Class instantiation """ self.input = "default.html" self.output = "stdout" self.env = None def render(self, sources, config, out=sys.stdout): """Render the documentation as defined in config Object """ template = self.env.get_template(self.input) output = template.render(sources=sources, layout=config["output"]["layout"], config=config["output"]) if self.output == "stdout": out.write(output) else: dir = os.path.dirname(self.output) if not os.path.exists(dir): os.makedirs(dir) if config["output"]["template"] == "default": if config["output"]["componants"] == "local": for template_dir in self.env.loader.searchpath: files = ( os.path.join(template_dir, "resource", "js", "combined.js"), os.path.join(template_dir, "resource", "css", "combined.css"), os.path.join(template_dir, "resource", "font", "apidoc.eot"), os.path.join(template_dir, "resource", "font", "apidoc.woff"), os.path.join(template_dir, "resource", "font", "apidoc.ttf"), os.path.join(template_dir, "resource", "font", "source-code-pro.eot"), os.path.join(template_dir, "resource", "font", "source-code-pro.woff"), os.path.join(template_dir, "resource", "font", "source-code-pro.ttf"), ) for file in files: filename = os.path.basename(file) dirname = os.path.basename(os.path.dirname(file)) if not os.path.exists(os.path.join(dir, dirname)): os.makedirs(os.path.join(dir, dirname)) if os.path.exists(file): shutil.copyfile(file, os.path.join(dir, dirname, filename)) else: logging.getLogger().warn('Missing resource file "%s". If you run apidoc in virtualenv, run "%s"' % (filename, "python setup.py resources")) if config["output"]["componants"] == "remote": for template_dir in self.env.loader.searchpath: files = ( os.path.join(template_dir, "resource", "js", "combined.js"), os.path.join(template_dir, "resource", "css", "combined-embedded.css"), os.path.join(template_dir, "resource", "font", "apidoc.eot"), os.path.join(template_dir, "resource", "font", "apidoc.woff"), os.path.join(template_dir, "resource", "font", "apidoc.ttf"), os.path.join(template_dir, "resource", "font", "source-code-pro.eot"), os.path.join(template_dir, "resource", "font", "source-code-pro.woff"), os.path.join(template_dir, "resource", "font", "source-code-pro.ttf"), ) for file in files: filename = os.path.basename(file) dirname = os.path.basename(os.path.dirname(file)) if not os.path.exists(os.path.join(dir, dirname)): os.makedirs(os.path.join(dir, dirname)) if os.path.exists(file): shutil.copyfile(file, os.path.join(dir, dirname, filename)) else: logging.getLogger().warn('Missing resource file "%s". If you run apidoc in virtualenv, run "%s"' % (filename, "python setup.py resources")) open(self.output, "w").write(output)
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/service/template.py
template.py
from apidoc.lib.util.cast import to_boolean class Merger(): """Provide tool to merge elements """ def merge_extends(self, target, extends, inherit_key="inherit", inherit=False): """Merge extended dicts """ if isinstance(target, dict): if inherit and inherit_key in target and not to_boolean(target[inherit_key]): return if not isinstance(extends, dict): raise ValueError("Unable to merge: Dictionnary expected") for key in extends: if key not in target: target[str(key)] = extends[key] else: self.merge_extends(target[key], extends[key], inherit_key, True) elif isinstance(target, list): if not isinstance(extends, list): raise ValueError("Unable to merge: List expected") target += extends def merge_sources(self, datas): """Merge sources files """ datas = [data for data in datas if data is not None] if len(datas) == 0: raise ValueError("Data missing") if len(datas) == 1: return datas[0] if isinstance(datas[0], list): if len([x for x in datas if not isinstance(x, list)]) > 0: raise TypeError("Unable to merge: List expected") base = [] for x in datas: base = base + x return base if isinstance(datas[0], dict): if len([x for x in datas if not isinstance(x, dict)]) > 0: raise TypeError("Unable to merge: Dictionnary expected") result = {} for element in datas: for key in element: if key in result: result[key] = self.merge_sources([result[key], element[key]]) else: result[key] = element[key] return result if len([x for x in datas if isinstance(x, (dict, list))]) > 0: raise TypeError("Unable to merge: List not expected") raise ValueError("Unable to merge: Conflict") def merge_configs(self, config, datas): """Merge configs files """ if not isinstance(config, dict) or len([x for x in datas if not isinstance(x, dict)]) > 0: raise TypeError("Unable to merge: Dictionnary expected") for key, value in config.items(): others = [x[key] for x in datas if key in x] if len(others) > 0: if isinstance(value, dict): config[key] = self.merge_configs(value, others) else: config[key] = others[-1] return config
ApiDoc
/ApiDoc-1.4.0.tar.gz/ApiDoc-1.4.0/apidoc/service/merger.py
merger.py
import base64 import json from contextlib import contextmanager from datetime import datetime import random import time from typing import Dict, BinaryIO, Union from hmac import HMAC from hashlib import sha1, sha256 from urllib import parse from requests.auth import AuthBase from api_layer.api import BasicApi, Action, Protocol class TencentAuth(AuthBase): sign_key = "" key_time = "" expire_time = -1 sign_mode = None sign_method = None service_name = None def __init__(self, config): self.expire_seconds = config.expire_seconds or 10 self.secret_key = config.secret_key or "" self.secret_id = config.secret_id or "" self.mode = config.use_mode or "headers" def build(self): time_now = int(time.time()) end_time_stamp = time_now + self.expire_seconds self.expire_time = end_time_stamp key_time = f"{time_now};{end_time_stamp}" self.sign_key = HMAC( self.secret_key.encode("utf8"), key_time.encode("utf8"), "sha1" ).hexdigest().lower() self.key_time = key_time def build_kv(self, path_url): plist = path_url.split("?", 2) _base = plist[1] if len(plist) == 2 else None if not _base: return "", "" _param = {} for _signal_param in _base.split("&"): if "=" in _signal_param: k, v = tuple(_signal_param.split("=", 2)) else: k = _signal_param v = "" if k not in _param: _param[k] = v else: if isinstance(_param[k], list): _param[k].append(v) else: _param[k] = [_param[k], v] klist = sorted(_param.keys()) # 字典序排序 _values = [] for k in klist: _p = _param.get(k, "") if isinstance(_p, list): _p = sorted(_p) _values.append("&".join([f"{i}={_p}" for i in _p])) else: _values.append(f"{k}={_p}") _value = "&".join(_values) # XXX: 需要注意,这里有缺陷,因为lower中的dict_order会受到影响 _k = ";".join(klist).lower() return _k, _value def build_header_kv(self, headers): _klist = sorted(headers.keys()) _values = [] for k in _klist: _v = headers.get(k, "") _values.append(f"{k.lower()}={parse.quote(_v, safe=[])}") _value = "&".join(_values) _k = ";".join(_klist).lower() return _k, _value def use_signature(self, signature: Dict[str, str], r): if self.mode == "headers": _vs = [] for k, v in signature.items(): _vs.append(f"{k}={v}") r.headers["Authorization"] = "&".join(_vs) elif self.mode == "args": r.prepare_url(r.url, signature) def custom_auth(self, r): if time.time() > self.expire_time: self.build() pk, v = self.build_kv(r.path_url) hk, hv = self.build_header_kv(r.headers) http_string = "\n".join([ r.method.lower(), r.path_url.split("?")[0], v, hv, ""]) signed_string = "\n".join([ "sha1", self.key_time, sha1(http_string.encode("utf8")).hexdigest().lower(), "" ]) signed_header = HMAC( self.sign_key.encode("utf8"), signed_string.encode("utf8"), "sha1" ).hexdigest().lower() signature = { "q-sign-algorithm": "sha1", "q-ak": self.secret_id, "q-sign-time": self.key_time, "q-key-time": self.key_time, "q-header-list": hk, "q-url-param-list": pk, "q-signature": signed_header } self.use_signature(signature, r) return r def v3_auth(self, r): body_dict = json.loads(r.body or "{}") method = r.method.upper() path_url = r.path_url.split("?") uri = path_url[0] query = path_url[1] if len(path_url) != 1 else "" hkv = list(zip( [i.lower() for i in r.headers.keys()], r.headers.values())) hkv.sort() cheaders = "\n".join([f"{k}:{v.lower()}" for k, v in hkv]) + "\n" signed_headers = ";".join([i for i, _ in hkv]) body = b"" if r.body is None else r.body \ if isinstance(r.body, bytes) else r.body.encode("utf8") hash_request_payload = sha256(body).hexdigest() ws = "\n".join(( method, uri, query, cheaders, signed_headers, hash_request_payload)) al = "TC3-HMAC-SHA256" rt = int(time.time()) cs = datetime.utcfromtimestamp(rt).strftime("%Y-%m-%d") + "/" + \ self.service_name.lower() + "/tc3_request" hcr = sha256(ws.encode("utf8")).hexdigest() ws2 = "\n".join((al, str(rt), cs, hcr)) sd = HMAC( ("TC3" + self.secret_key).encode("utf8"), datetime.utcfromtimestamp(rt).strftime("%Y-%m-%d").encode("utf8"), "SHA256").digest() ss = HMAC( sd, self.service_name.encode("utf8"), "SHA256").digest() ss2 = HMAC( ss, b"tc3_request", "SHA256").digest() signature = HMAC( ss2, ws2.encode("utf8"), "SHA256").hexdigest() authorization = al + " Credential=" + \ self.secret_id + "/" + cs + ", " + \ "SignedHeaders=" + signed_headers + \ ", Signature=" + signature qs = dict([_q.split("=", 1) for _q in query.split("&") if query]) qs.update(body_dict) r.headers["Authorization"] = authorization r.headers["X-TC-Action"] = qs.get("Action") r.headers["X-TC-Version"] = qs.get("Version") r.headers["X-TC-Region"] = qs.get("Region") r.headers["X-TC-Timestamp"] = str(rt) return r @contextmanager def basic(self, sign_method="SHA1"): self.sign_mode = "basic" self.sign_method = sign_method yield self.sign_mode = None self.sign_method = None @contextmanager def use_v3(self, service_name): self.sign_mode = "v3" self.service_name = service_name yield self.sign_mode = None def __call__(self, r): if self.sign_mode == "v3": return self.v3_auth(r) elif self.sign_mode == "basic": return r else: return self.custom_auth(r) class TencentCloudApi(BasicApi): name = "tencent_api" url = "https://service.cos.myqcloud.com" protocol = Protocol.http def __init__(self, config): self.auth = TencentAuth(config) @Action def cos_list_buckets( self, region: Union[None, str] = None ): """ 列出指定区域,或者所有区域的存储桶列表 :param region: 区域 :return: """ url = None if region is not None: url = f"https://cos.{region}.mycloud.com" return { "url": url, "headers": { "date": datetime.now().isoformat() }, "params": {} } @Action(action_type="PUT") def cos_put_object( self, object_key: str, bucket_name: str, app_id: str, region: str, content: BinaryIO, content_type: str = "text/plain" ): """ cos文件上传 :param object_key: 文件路径 :param bucket_name: 存储桶名称 :param app_id: 应用名称 :param region: 区域名称 :param content: 文件内容 :param content_type: 文件类型 """ url = f"https://{bucket_name}-{app_id}.cos.{region}.myqcloud.com" return { "url": url, "path": object_key, "headers": { "content-type": content_type }, "data": content } @Action(action_type="GET") def cos_get_object( self, object_key: str, bucket_name: str, app_id: str, region: str ): """ cos文件上传 :param object_key: 文件路径 :param bucket_name: 存储桶名称 :param app_id: 应用名称 :param region: 区域名称 """ url = f"https://{bucket_name}-{app_id}.cos.{region}.myqcloud.com" return { "url": url, "path": object_key, } @Action(action_type="GET") def scf_put_function( self, region: str, handler: str, func_name: str, cos_bucket_name: str = "", cos_object_key: str = "", cos_bucket_region: str = "", zip_file: str = "", namespace: str = "", env_id: str = "", publish: str = "False", code: str = "", code_source: str = "" ): """ scf函数更新 :param region: 函数所在区域 :param handler: 函数的主入口 :param func_name: 函数名称 :param cos_bucket_name: 指定的cos的bucket的名称 :param cos_object_key: 指定的cos的object_key :param cos_bucket_region: 指定的cos存储桶的区域 :param zip_file: zipfile b64file :param namespace: scf namepspace :param env_id: environment id :param publish: publish mode true means deirect deploy default is flase :param code: source code :param code_source: code's origin (zip, cos, git) """ url = "https://scf.tencentcloudapi.com" basic_dict = { "Action": "UpdateFunctionCode", "Version": "2018-04-16", "Region": region, "Handler": handler, "FunctionName": func_name, } extra_param_set = ( ("CosBucketName", cos_bucket_name), ("CosObjectName", cos_object_key), ("CosBucketRegion", cos_bucket_region), ("ZipFile", zip_file), ("Namespace", namespace), ("EnvId", env_id), ("Publish", publish), ("Code", code), ("CodeSource", code_source) ) for k, v in extra_param_set: if v: basic_dict[k] = v return { "url": url, "params": basic_dict, "headers": { "Host": "scf.tencentcloudapi.com", "Content-Type": "application/x-www-form-urlencoded" }, "data": "" } @Action def dns_record_list( self, domain: str, offset: int = 0, length: int = 20, sub_domain: Union[None, str] = None, record_type: Union[None, str] = None, q_project_id: Union[None, int] = None ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "RecordList", "offset": offset, "domain": domain, "length": length } extra_param_set = ( ("subDomain", sub_domain), ("recordType", record_type), ("qProjectId", q_project_id) ) return self.dns_build_params(url, "get", basic_dict, extra_param_set) @Action def dns_record_create( self, domain: str, sub_domain: str, record_type: str, record_line: str, value: str, ttl: int = 600, mx: Union[None, int] = None ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "RecordCreate", "domain": domain, "subDomain": sub_domain, "recordType": record_type, "recordLine": record_line, "value": value } extra_param_set = ( ("ttl", ttl), ("mx", mx) ) return self.dns_build_params(url, "get", basic_dict, extra_param_set) @Action def dns_record_modify( self, domain: str, record_id: int, sub_domain: str, record_type: str, record_line: str, value: str, ttl: int = 600, mx: Union[None, int] = None ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "RecordModify", "recordId": record_id, "domain": domain, "subDomain": sub_domain, "recordType": record_type, "recordLine": record_line, "value": value } extra_param_set = ( ("ttl", ttl), ("mx", mx) ) return self.dns_build_params(url, "get", basic_dict, extra_param_set) @Action def dns_record_status( self, domain: str, record_id: int, status: str ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "RecordStatus", "recordId": record_id, "domain": domain, "status": status } extra_param_set = () return self.dns_build_params(url, "get", basic_dict, extra_param_set) @Action def dns_record_delete( self, domain: str, record_id: int ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "RecordDelete", "domain": domain, "recordId": record_id } return self.dns_build_params(url, "get", basic_dict, ()) @Action def dns_domain_create( self, domain: str, project_id: Union[None, int] = None ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "DomainCreate", "domain": domain } extra_param_set = ( ("projectId", project_id), ) return self.dns_build_params(url, "get", basic_dict, extra_param_set) @Action def dns_domain_status( self, domain: str, status: str ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "SetDomainStatus", "domain": domain, "status": status } return self.dns_build_params(url, "get", basic_dict) @Action def dns_domain_list( self, offset: int = 0, length: int = 20, q_project_id: Union[None, int] = None ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "DomainList", "offset": offset, "length": length } extra_param_set = ( ("qProjectId", q_project_id), ) return self.dns_build_params(url, "get", basic_dict, extra_param_set) @Action def dns_domain_delete( self, domain: str, ): url = "cns.api.qcloud.com/v2/index.php" basic_dict = { "Action": "DomainDelete", "domain": domain } return self.dns_build_params(url, "get", basic_dict) def dns_build_params(self, url, method, basic_dict, extra_param_set = ()): url_base = "https://" + url for k, v in extra_param_set: if v: basic_dict[k] = v basic_dict = self.signature_request( method, "cns.api.qcloud.com/v2/index.php", basic_dict) return { "url": url_base, "params": basic_dict, "headers": {}, "path": "" } def signature_request(self, method, url, params): params["Timestamp"] = int(time.time()) params["Nonce"] = random.randint(10000, 99999) params["SignatureMethod"] = f"Hmac{self.auth.sign_method}" params["SecretId"] = self.auth.secret_id klist = sorted(params.keys()) plist = [] for k in klist: plist.append(f"{k}={params.get(k)}") # build str src_str = f"{method.upper()}{url}?{'&'.join(plist)}" sign_str = base64.b64encode( HMAC( self.auth.secret_key.encode("utf8"), src_str.encode("utf8"), self.auth.sign_method.lower() ).digest() ) params["Signature"] = sign_str return params
ApiLayer
/ApiLayer-0.0.6.tar.gz/ApiLayer-0.0.6/src/api_layer/tencent_cloud.py
tencent_cloud.py
from copy import deepcopy from enum import auto, Flag from functools import partial from typing import Any, Dict, Union, List from flask import Flask from requests import Session, Request class Hooks: def __init__(self, func): self.func = func def __call__(self, *args, **kwargs): return self.func(*args, **kwargs) def bind_instance(self, instance): self.func = partial(self.func, instance) class ActionState(Flag): success = auto() failed = auto() pending = auto() class MetaAction(type): def __call__(cls, *args, **kwargs): if args and callable(args[0]): return super().__call__(name=args[0].__name__)(args[0]) else: return super().__call__(*args, **kwargs) class Action(metaclass=MetaAction): instance = None action_name: str = "" # 动作名称 action_type: str = "GET" # 动作类型 action_target: str = "" # 动作目标 action_payload: Dict[str, Any] = { "hooks": [], "url": "" } # 动作负载 args = () kwargs = {} def __init__( self, name: str = None, path: str = "", action_type: str = "GET"): self.action_name = name self.action_payload["path"] = path self.action_type = action_type def __call__(self, *args, **kwargs): func = None if args: func = args[0] if self.action_name is None: self.action_name = func.__name__ if callable(func): self.func = func else: new_action = deepcopy(self) new_action.args = args new_action.kwargs = kwargs return new_action return self def __repr__(self): return f"<Action {self.action_name} at {hex(id(self))}>" def build_payload(self): self.action_payload.update( self.func(self.instance, *self.args, **self.kwargs)) def bind_instance(self, instance): self.instance = instance for hook in self.action_payload.get("hooks", []): hook.bind_instance(instance) def hook(self, hook): self.action_payload["hooks"].append(hook) class ActionResult: state: ActionState = ActionState.pending def __init__(self, result_basic): self.basic_result = result_basic @property def result(self): return self.basic_result class Protocol(Flag): http = auto() tcp = auto() udp = auto() undefined = auto() class ProtocolTool: protocol = Protocol.undefined @classmethod def build(cls, protocol: Protocol): ins = None for _subcls in cls.__subclasses__(): if _subcls.protocol in protocol: ins = _subcls() break return ins def bind_config(self, config: Dict[str, Any]): raise NotImplementedError() class HttpProtocolTool(ProtocolTool): protocol = Protocol.http basic_url = "" # 基础网址 basic_auth = None # 鉴权 basic_hooks = [] # 基础hooks def bind_config(self, config: Dict[str, Any]) -> ProtocolTool: self.basic_url = config.get("basic_url", "") self.basic_auth = config.get("basic_auth", None) self.basic_hooks = config.get("basic_hooks", []) return self def build_request(self, method, payload): url = payload.pop("url", "") or self.basic_url req = Request( method=method, url=url + payload.pop("path", "/"), auth=self.basic_auth, hooks={ "response": self.basic_hooks + payload.pop("hooks", []) }, **payload ) prepped = req.prepare() return prepped def do(self, action: Action): if action is None: return None action.build_payload() _payload = action.action_payload _action_type = action.action_type prepared_req = self.build_request(_action_type, _payload) with Session() as s: r = s.send(prepared_req) return r class MetaBasicApi(type): def __new__(cls, cls_name, cls_bases, cls_dict): hooks = [] actions = [] for k, v in cls_dict.items(): if isinstance(v, Hooks): hooks.append(v) elif isinstance(v, Action): actions.append((v.action_name or k, v)) cls_dict["hooks"] = hooks cls_dict["actions"] = dict(actions) new_cls = super().__new__(cls, cls_name, cls_bases, cls_dict) return new_cls def __call__(cls, *args, **kwargs): ins = super().__call__(*args, **kwargs) for action in ins.actions.values(): action.bind_instance(ins) for hook in ins.hooks: hook.bind_instance(ins) return ins class BasicApi(metaclass=MetaBasicApi): protocol = Protocol.http url = "" auth = None actions = {} @classmethod def bind_flask_app(cls, app: Flask): api = {} for SubCls in cls.__subclasses__(): sub_instance = SubCls(app.app_config) api[sub_instance.name] = sub_instance app.api = type("Api", (), api)() def do_action(self, action: Union[str, Action]) -> ActionResult: """ 执行动作 :param action: 定义的动作 :return: Action Request 动作执行的结果 """ if isinstance(action, str): action: Action = self.actions.get(action, None) action_result = self.protocol_tools.do(action) return ActionResult(action_result) def do_actions( self, actions: List[Union[str, Action]]) -> List[ActionResult]: """ 批量执行动作 :param actions: 动作列表 :return: """ results = [] for action in actions: print(action) results.append(self.do_action(action)) return results @property def config(self): return { "basic_auth": self.auth, "basic_url": self.url, "basic_hooks": self.hooks } @property def protocol_tools(self): return ProtocolTool.build(self.protocol).bind_config(self.config)
ApiLayer
/ApiLayer-0.0.6.tar.gz/ApiLayer-0.0.6/src/api_layer/api.py
api.py
# Simple League of Legends Wrapper in Python It creates a small sqlite Database where it can saves multiple Api-Keys. Every Request returns the response from the server and the Api-Key object. This makes it possible to save the Data like the encryptedAccountId with the right Api-Key-ID and for further request where you use the encryptedAccountId you provide the corresponding Api-Key-ID. ## installation ``pip install ApiLeagueOfLegends`` ## usage save you Api-Keys to the local database ```python from league_of_legends_api.Database.database import Database keys = ['RGAPI-XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX', 'RGAPI-XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX', 'RGAPI-XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX'] db = Database() db.save_keys_to_database(keys) ``` use the saved keys ````python from league_of_legends_api.Database.database import Database from league_of_legends_api.Api.leaugue_api import SummonerV4 db = Database() keys = db.load_keys_in() summoner = SummonerV4(keys, region='euw1') response, key = summoner.get_summoner_by_name("SaItySurprise") print(response, key) ```` ``{'id': str, 'accountId': str, 'puuid': str, 'name': 'SaItySurprise', 'profileIconId': 3552, 'revisionDate': 1556578547000, 'summonerLevel': 154, 'api_key_id': 4} ApiKey: key:RGAPI-XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX, id: 4``
ApiLeagueOfLegends
/ApiLeagueOfLegends-0.6.tar.gz/ApiLeagueOfLegends-0.6/README.md
README.md
import time import requests import json class LeagueRequest: def __init__(self, region): self.url = f"https://{region}.api.riotgames.com" self.session = requests.Session() def _check_response_header(self, key): """should check the rate limits""" print(f'X-Rate-Limit-Type: {key.x_rate_limit_type}') print(f'Next retry in : {key.retry_after}') time.sleep(int(key.retry_after)) def _request_handling(self, url, key): """Just the request""" try: response = self.session.get(self.url + url, headers=json.loads(key.header)) except Exception as e: print("request exception: " + str(e)) return None self._update_api_key_headers(key, response) return response def _update_api_key_headers(self, key, response): header = response.headers if 'X-Rate-Limit-Type' and 'Retry-After' in header.keys(): key.retry_after = header['Retry-After'] key.x_rate_limit_type = header['X-Rate-Limit-Type'] def send_request(self, url, key): """sends the request to RiotsGames""" response = self._request_handling(url, key) if response is not None: # with only response: it didnt work by response code like 404 or 403 or 429 if response.status_code == 403: print('Update you API-Key !, message: ' + str(response.json())) exit() return None while response.status_code == 429: self._check_response_header(key) response = self._request_handling(url, key) if response.status_code == 404: print(f'Status Code 404: {response.json()}') return None elif response: if response.status_code == 200: response = response.json() response['api_key_id'] = key.id return response return None @classmethod def _json_request_handling(cls, url): """Just the request""" try: response = requests.get(url) except Exception as e: print("request exception: " + str(e)) return None return response @classmethod def send_json_request(cls, url): """sends the request to DataDragon""" response = cls._json_request_handling(url) if response is not None: # with only response: it didnt work by response code like 404 or 403 or 429 if response.status_code == 403: print('Update you API-Key !, message: ' + str(response.json())) exit() return None while response.status_code == 429: response = cls._json_request_handling(url) if response.status_code == 404: print(f'Status Code 404: {response.json()}') return None elif response: if response.status_code == 200: response = response.json() return response return None
ApiLeagueOfLegends
/ApiLeagueOfLegends-0.6.tar.gz/ApiLeagueOfLegends-0.6/src/league_of_legends_api/Tools/leaugue_request.py
leaugue_request.py
from league_of_legends_api.Tools.leaugue_request import LeagueRequest from multiprocessing.dummy import Pool import random class Base: def __init__(self, api_keys, region="euw1", **kwargs): self.request = LeagueRequest(region) self.keys = api_keys def _get_random_key(self): return random.choice(self.keys) def _get_key_by_id(self, _id): for key in self.keys: if key.id == _id: return key print(f'Key with id {_id} no valid, using random key') return self._get_random_key() class ChampionMasteryV4(Base): """Champion mastery V4""" def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def get_champion_masteries_by_summoner_id(self, summoner_id, api_key_id): """All champion Masterypoints by summoner ID""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/champion-mastery/v4/champion-masteries/by-summoner/{summoner_id}', key), key def get_champion_mastery_by_summoner_id_and_champion_id(self, summoner_id, champion_id, api_key_id): """champion mastery points by summoner id and champion ID""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/champion-mastery/v4/champion-masteries/by-summoner/{summoner_id}/' f'by-champion/{champion_id}', key), key def get_summoner_mastery_score_by_summoner_id(self, summoner_id, api_key_id): """champion Mastery Score by Summoner ID""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/champion-mastery/v4/scores/by-summoner/{summoner_id}', key), key class SummonerV4(Base): """Summoner""" def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def get_summoner_by_account_id(self, account_id, api_key_id): """Get a summoner by account ID""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/summoner/v4/summoners/by-account/{account_id}', key), key def get_summoner_by_name(self, summoner_name): """Get summoner by summoner name""" key = self._get_random_key() return self.request.send_request(f'/lol/summoner/v4/summoners/by-name/{summoner_name}', key), key def get_summoner_by_id(self, summoner_id, api_key_id): """Get summoner by summoner ID""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/summoner/v4/summoners/{summoner_id}', key), key def get_summoner_by_puuid(self, puuid, api_key_id): """Get summoner by summoner PUUID""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/summoner/v4/summoners/by-puuid/{puuid}', key), key class ChampionV3(Base): """ChampionV3""" def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def get_champion_rotation(self): """Gives you the free champs to play""" key = self._get_random_key() return self.request.send_request('/lol/platform/v3/champion-rotations', key) class LolStatusV3(Base): """LoL Status V3""" def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def get_shared_data(self): """Gives you the current Status of all League Services Requests to this API are not counted against the application Rate Limits.""" key = self._get_random_key() return self.request.send_request('/lol/status/v3/shard-data', key) class LeagueV4(Base): def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def get_grandmaster_leagues_by_queue_id(self, queue): """Returns List of Grandmaster Players by queue. :param queue: 'RANKED_SOLO_5x5', 'RANKED_FLEX_SR', 'RANKED_FLEX_TT' :return: LeagueListDTO """ key = self._get_random_key() return self.request.send_request(f'/lol/league/v4/grandmasterleagues/by-queue/{queue}', key), key def get_master_leagues_by_queue_id(self, queue): """Returns List of Master Players by queue. :param queue: 'RANKED_SOLO_5x5', 'RANKED_FLEX_SR', 'RANKED_FLEX_TT' :return: LeagueListDTO """ key = self._get_random_key() return self.request.send_request(f'/lol/league/v4/masterleagues/by-queue/{queue}', key), key def get_challenger_leagues_by_queue_id(self, queue): """Returns List of Challenger Players by queue. :param queue: 'RANKED_SOLO_5x5', 'RANKED_FLEX_SR', 'RANKED_FLEX_TT' :return: LeagueListDTO """ key = self._get_random_key() return self.request.send_request(f'/lol/league/v4/challengerleagues/by-queue/{queue}', key), key def get_league_entries_by_summoner_id(self, summoner_id, api_key_id): """Returns a list of all Leagues for the Summoner ID :return: Set[LeagueEntryDTO]""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/league/v4/entries/by-summoner/{summoner_id}', key), key def get_league_entries_by_queue_tier_division(self, queue, tier, division, page=1): """Returns a list of all Summoners in the given league :param queue: 'RANKED_SOLO_5x5', 'RANKED_FLEX_SR', 'RANKED_FLEX_TT' :param tier: 'IRON', 'BRONZE', 'SILVER', 'GOLD', 'PLATINUM', 'DIAMOND' :param division: 'IV', 'III', 'II', 'I' :param page: 1 or higher, Starts with given page :return: Set[LeagueEntryDTO] """ key = self._get_random_key() return self.request.send_request(f'/lol/league/v4/entries/{queue}/{tier}/{division}?page={page}', key), key def get_leagues_by_league_id(self, league_id): """ Warning: Consistently looking up league ids that don't exist will result in a blacklist. :param league_id: :return: LeagueListDTO """ key = self._get_random_key() return self.request.send_request(f'/lol/league/v4/leagues/{league_id}', key), key class MatchV4(Base): def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def run_pool_request_match(self, list_of_match_ids): """Run Max 100 ids at the same time""" with Pool(100) as p: pm = p.imap_unordered(self.get_matches_by_match_id, list_of_match_ids) return [i for i in pm if i] def run_pool_request_timeline(self, list_of_match_ids): """Run Max 100 ids at the same time""" with Pool(100) as p: pm = p.imap_unordered(self.get_match_timeline_by_match_id, list_of_match_ids) return [i for i in pm if i] def get_matches_by_match_id(self, match_id): """""" key = self._get_random_key() return self.request.send_request(f'/lol/match/v4/matches/{match_id}', key), key def get_match_timeline_by_match_id(self, match_id): """Not all matches have timeline data""" key = self._get_random_key() return self.request.send_request(f'/lol/match/v4/timelines/by-match/{match_id}', key), key def get_match_list_by_account_id(self, account_id, api_key_id, **kwargs): """ :param account_id: :param api_key_id: :param kwargs: champion, queue, season, endTime, beginTime, endIndex, beginIndex :return: MatchlistDto """ optional = '' if kwargs: optional = '?' + '&'.join(['%s=%s' % (key, value) for (key, value) in kwargs.items()]) print(optional) key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/match/v4/matchlists/by-account/{account_id}{optional}', key), key def get_matches_by_tournament_code(self, tournament_code): """""" key = self._get_random_key() return self.request.send_request(f'/lol/match/v4/matches/by-tournament-code/{tournament_code}/ids', key), key def get_match_by_match_id_tournament_code(self, match_id, tournament_code): """""" key = self._get_random_key() return self.request.send_request(f'/lol/match/v4/matches/{match_id}/by-tournament-code/{tournament_code}', key), key class SpectatorV4(Base): def __init__(self, api_keys, **kwargs): Base.__init__(self, api_keys, **kwargs) def get_active_game_by_summoner_id(self, summoner_id, api_key_id): """""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f'/lol/spectator/v4/active-games/by-summoner/{summoner_id}', key), key def get_featured_games(self): """""" key = self._get_random_key() return self.request.send_request('/lol/spectator/v4/featured-games', key), key class ThirdPartyCodeV4(Base): def __init__(self, api_keys, **kwargs): Base.__init__(api_keys, **kwargs) def get_third_party_code_by_summoner_id(self, summoner_id, api_key_id): """""" key = self._get_key_by_id(api_key_id) return self.request.send_request(f' /lol/platform/v4/third-party-code/by-summoner/{summoner_id}', key), key class LeagueStaticDataDragon: @staticmethod def get_all_champions_static(language='de_DE', version=None): """All Champions if version is None it takes the newest one""" if version is None: response = LeagueRequest.send_json_request('https://ddragon.leagueoflegends.com/realms/na.json') version = response['n']['champion'] return LeagueRequest.send_json_request(f'http://ddragon.leagueoflegends.com/cdn/{version}/data/{language}/champion.json') @staticmethod def get_all_items_static(language='de_DE', version=None): """All Items if version is None it takes the newest one""" if version is None: response = LeagueRequest.send_json_request('https://ddragon.leagueoflegends.com/realms/na.json').json() version = response['n']['item'] return LeagueRequest.send_json_request(f'http://ddragon.leagueoflegends.com/cdn/{version}/data/{language}/item.json')
ApiLeagueOfLegends
/ApiLeagueOfLegends-0.6.tar.gz/ApiLeagueOfLegends-0.6/src/league_of_legends_api/Api/leaugue_api.py
leaugue_api.py
import os import sys from sqlalchemy.sql import text from typing import List import sqlalchemy from dotmap import DotMap def log(msg: any) -> None: print(msg, file=sys.stderr) log("Extended builder 2.0") # using SQLAlchemy 2 """ test curl -X 'POST' \ 'http://localhost:5656/api/udfEmployeeInLocation/udfEmployeeInLocation' \ -H 'accept: application/vnd.api+json' \ -H 'Content-Type: application/json' \ -d '{ "location": "Sweden" }' returning this (array of strings, not json): {'result': ["(1, 'Nikita', 'Sweden')", "(4, 'John', 'Sweden')"]} expected this (verified for GA; alert: arrays of strings instead of objects): {"result":[{"Id":1,"Location":"Sweden","Name":"Nikita"},{"Id":4,"Location":"Sweden","Name":"John"}]} """ sqlalchemy2 = True class DotDict(dict): """ dot.notation access to dictionary attributes """ # thanks: https://stackoverflow.com/questions/2352181/how-to-use-a-dot-to-access-members-of-dictionary/28463329 __getattr__ = dict.get __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ class TvfBuilder(object): def __init__(self, db_url, project_directory): self.db_url = db_url self.project_directory = project_directory self.number_of_services = 0 self.tvf_services = [] ''' TVFs have cols, SCFs do not ''' self.tvf_contents = """# coding: utf-8 from sqlalchemy.dialects.mysql import * from sqlalchemy import Boolean, Column, DECIMAL, DateTime, Float, ForeignKey, Integer, LargeBinary, String, Table, Text, UniqueConstraint, text from sqlalchemy import * from sqlalchemy.orm import relationship from sqlalchemy.sql.sqltypes import NullType from sqlalchemy.ext.declarative import declarative_base from flask_sqlalchemy import SQLAlchemy from safrs import SAFRSAPI, jsonapi_rpc from safrs import JABase, DB import util ######################################################################################################################## # Classes describing database for SqlAlchemy ORM, initially created by schema introspection. # from safrs import SAFRSBase import safrs Base = declarative_base() metadata = Base.metadata ######################################################################################################################## """ def build_tvf_class(self, cols: List[DotDict]): self.tvf_services.append(cols[0].Function) self.tvf_contents += f't_{cols[0].Function} = Table( # define result for {cols[0].Function}\n' self.tvf_contents += f'\t"{cols[0].Function}", metadata,\n' col_count = 0 for each_col in cols: self.tvf_contents += f'\tColumn("{each_col.Column}", ' if each_col.Data_Type == "int": self.tvf_contents += f'Integer)' elif each_col.Data_Type == "nvarchar": self.tvf_contents += f'String({each_col.Char_Max_Length}))' else: # TODO - support additional data types self.tvf_contents += f'String(8000))' col_count += 1 if col_count < len(cols): self.tvf_contents += ",\n" else: self.tvf_contents += ")\n" self.tvf_contents += f'\n\n' def get_os_url(self, url: str) -> str: """ idiotic fix for windows (\ --> \\\\) """ return url.replace('\\', '\\\\') def build_tvf_service(self, args: List[DotDict]): ''' sample service @staticmethod @jsonapi_rpc(http_methods=['POST'], valid_jsonapi=False) def udfEmployeeInLocation(location): """ description: expose TVF - udfEmployeeInLocation args: location : value """ sql_query = DB.text("SELECT * FROM udfEmployeeInLocation(:location)") use_mapping_rows = False if use_mapping_rows: mapping_rows = [] with DB.engine.begin() as connection: for dict_row in connection.execute(sql_query, dict(location=location)): mapping_rows.append(dict_row._data) response = {"result": mapping_rows} return response with DB.engine.begin() as connection: query_result = connection.execute(sql_query, dict(location=location)).all() rows = util.rows_to_dict(query_result) return {"result": rows} ''' if args[0].ObjectName not in self.tvf_services: log(f'.. Skipping Scalar Value Function: {args[0].ObjectName}') else: self.tvf_contents += f'class {args[0].ObjectName}(JABase):\n' self.tvf_contents += f'\t"""\n\t\tdescription: define service for {args[0].ObjectName}\n\t"""\n\n' self.tvf_contents += f'\t_s_type = "{args[0].ObjectName}"\n\n' self.tvf_contents += f"\t@staticmethod\n" self.tvf_contents += f"\t@jsonapi_rpc(http_methods=['POST'], valid_jsonapi=False)\n" # def udfEmployeeInLocationWithName(location, Name): self.tvf_contents += f"\tdef {args[0].ObjectName}(" arg_number = 0 has_args = args[0].ParameterName is not None if has_args: for each_arg in args: self.tvf_contents += each_arg.ParameterName[1:] arg_number += 1 if arg_number < len(args): self.tvf_contents += ", " self.tvf_contents += "):\n" self.tvf_contents += f'\t\t"""\n' self.tvf_contents += f"\t\tdescription: expose TVF - {args[0].ObjectName}\n" self.tvf_contents += f"\t\targs:\n" if has_args: for each_arg in args: self.tvf_contents += f'\t\t\t{each_arg.ParameterName[1:]} : value\n' self.tvf_contents += f'\t\t"""\n' # sql_query = DB.text("SELECT * FROM udfEmployeeInLocationWithName(:location, :Name)") self.tvf_contents += f'\t\tsql_query = DB.text("SELECT * FROM {args[0].ObjectName}(' # :arg)")\n' arg_number = 0 if has_args: for each_arg in args: self.tvf_contents += ":" + each_arg.ParameterName[1:] arg_number += 1 if arg_number < len(args): self.tvf_contents += ", " self.tvf_contents += ')")\n' # query_result = connection.execute(sql_query, dict(location=location)).all() self.tvf_contents += f"\t\twith DB.engine.begin() as connection:\n" self.tvf_contents +=f'\t\t\tquery_result = connection.execute(sql_query, dict(' arg_number = 0 if has_args: for each_arg in args: self.tvf_contents += each_arg.ParameterName[1:] + "=" + each_arg.ParameterName[1:] arg_number += 1 if arg_number < len(args): self.tvf_contents += ", " self.tvf_contents += ")).all()\n" self.tvf_contents += "\t\t\trows = util.rows_to_dict(query_result)\n" self.tvf_contents += '\t\t\tresponse = {"result": rows}\n' self.tvf_contents += f'\t\treturn response\n' self.tvf_contents += f'\n\n' def write_tvf_file(self): """ write tvf_contents -> api/tvf.py """ file_name = self.get_os_url(self.project_directory + '/api/tvf.py') tvf_file = open(file_name, 'w') tvf_file.write(self.tvf_contents) tvf_file.close() def append_expose_services_file(self): """ append import to -> append_expose_services_file """ import_statement = f'\n\n from api import tvf\n' import_statement += f' tvf.expose_tvfs(api)\n' file_name = self.get_os_url(self.project_directory + '/api/customize_api.py') expose_services_file = open(file_name, 'a') expose_services_file.write(import_statement) expose_services_file.close() def run(self): """ call by ApiLogicServer CLI -- scan db_url schema for TVFs, create api/tvf.py for each TVF: class t_<TVF_Name> -- the model class <TVF_Name> -- the service """ print(f'extended_builder.extended_builder("{self.db_url}", "{self.project_directory}"') cols_sql = """ SELECT TABLE_CATALOG AS [Database], TABLE_SCHEMA AS [Schema], TABLE_NAME AS [Function], COLUMN_NAME AS [Column], DATA_TYPE AS [Data_Type], CHARACTER_MAXIMUM_LENGTH AS [Char_Max_Length] FROM INFORMATION_SCHEMA.ROUTINE_COLUMNS WHERE TABLE_NAME IN (SELECT ROUTINE_NAME FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_TYPE = 'FUNCTION' AND DATA_TYPE = 'TABLE') ORDER BY TABLE_NAME, COLUMN_NAME; """ engine = sqlalchemy.create_engine(self.db_url, echo=False) # sqlalchemy sqls... cols = [] current_table_name = "" with engine.connect() as connection: # first, get all the TVF cols & build class result = connection.execute(text(cols_sql)) for row in result: # row eg: ('SampleDB', 'dbo', 'fn_Data_u_CDM_BusinessProcess_yyyy', 'Document', 'char', 10) # print(f'TVF cols - fields: {row._fields}') # print(f'TVF cols - values: {row}') log(f'col row: {row}, database: {row.Database}') function_name = row.Function if function_name != current_table_name: if len(cols) > 0: self.number_of_services += 1 self.build_tvf_class(cols) current_table_name = function_name cols = [] cols.append(row) if sqlalchemy2: connection.commit() connection.close() print("\n\n now process args") else: engine.dispose() # fixed some no-result errors if len(cols) > 0: self.number_of_services += 1 self.build_tvf_class(cols) # eg, udfEmployeeInLocationWithName args_sql = """ SELECT SCHEMA_NAME(SCHEMA_ID) AS [Schema] ,SO.name AS [ObjectName] ,SO.Type_Desc AS [ObjectType (UDF/SP)] ,P.parameter_id AS [ParameterID] ,P.name AS [ParameterName] ,TYPE_NAME(P.user_type_id) AS [ParameterDataType] ,P.max_length AS [ParameterMaxBytes] ,P.is_output AS [IsOutPutParameter] FROM sys.objects AS SO LEFT OUTER JOIN sys.parameters AS P ON SO.OBJECT_ID = P.OBJECT_ID WHERE SO.Type_Desc = 'SQL_INLINE_TABLE_VALUED_FUNCTION' OR SO.Type_Desc = 'SQL_TABLE_VALUED_FUNCTION' ORDER BY [Schema], SO.name, P.parameter_id """ args = [] current_object_name = "" with engine.connect() as connection: # next, get all the TVF args result = connection.execute(text(args_sql)) for row in result: # print(f'TVF args - fields: {row._fields}') # print(f'TVF args - values: {row}') log(f'arg row: {row})') # , database: {row.Database}') object_name = row.ObjectName if object_name != current_object_name: if len(args) > 0: self.build_tvf_service(args) current_object_name = object_name args = [] args.append(row) # connection.close() if len(args) > 0: self.build_tvf_service(args) self.tvf_contents += f'def expose_tvfs(api):\n' for each_service in self.tvf_services: self.tvf_contents += f'\tapi.expose_object({each_service})\n' self.tvf_contents += f'\n# {self.number_of_services} services created.\n' self.write_tvf_file() self.append_expose_services_file() """ args db_url - use this to open the target database, e.g. for meta data project_directory - the created project... create / alter files here """ def extended_builder(db_url: str, project_directory: str): """ Illustrate Extended Builder -- CLI calls EB to create / update project files. See: https://apilogicserver.github.io/Docs/Project-Builders/ Expose TVFs (Sql Server Table Valued Functions) as apis Scan db_url schema for TVFs, create api/tvf.py: * Create api/tvf.py -- - for each TVF found in db_url: - class t_<TVF_Name> -- the model - class <TVF_Name> -- the service - at end, add endpoints to safrs api - executed on import * Update api/customize.api to import tvf Example APILogicServer run --project_name='~/dev/servers/sqlserver-types' \ \b --extended_builder='*' \ \b --db_url='mssql+pyodbc://sa:Posey3861@localhost:1433/SampleDB?driver=ODBC+Driver+17+for+SQL+Server?trusted_connection=no' Args: db_url (str): SQLAlchemy db uri project_directory (str): project location """ log(f'extended_builder.extended_builder("{db_url}", "{project_directory}"') tvf_builder = TvfBuilder(db_url, project_directory) tvf_builder.run()
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/extended_builder.py
extended_builder.py
__version__ = "09.02.18" recent_changes = \ f'\n\nRecent Changes:\n' +\ "\t08/22/2023 - 09.02.18: Devops container/compose, Multi-arch dockers, add-auth with db_url, auth docker dbs, meta api \n"\ "\t07/04/2023 - 09.01.00: SQLAlchemy 2 typed-relns/attrs, Docker: Python 3.11.4 & odbc18 \n"\ "\t06/24/2023 - 09.00.01: PyMysql \n"\ "\t06/22/2023 - 09.00.00: Optimistic Locking, safrs 310, SQLAlchemy 2.0.15 \n"\ "\t05/01/2023 - 08.03.06: allocation sample \n"\ "\t04/26/2023 - 08.03.00: virt attrs (Issue 56), safrs 3.0.2, readme updates, LogicBank 1.8.4 \n"\ "\t04/13/2023 - 08.02.00: integratedConsole, logic logging (66), table relns fix (65) \n"\ "\t04/06/2023 - 08.01.24: create_image, bugfix for excluded table relationships \n"\ "\t02/15/2023 - 08.00.01: Declarative Authorization and Authentication, Werkzeug==2.2.3 \n"\ "\t01/10/2023 - 07.00.04: Portable projects, server_proxy \n"\ "\t01/06/2023 - 07.00.00: Multi-db, sqlite test dbs, tests run, security prototype, env config \n"\ "\t12/21/2022 - 06.05.00: Devops, env db uri, api endpoint names, git-push-new-project \n"\ "\t11/22/2022 - 06.03.06: Image, Chkbox, Dialects, run.sh, SQL/Server url change, stop endpoint, Chinook Sqlite \n"\ "\t10/02/2022 - 06.02.00: Option infer_primary_key, Oct1 SRA (issue 49), cleanup db/api setup, += postgres dvr \n"\ "\t09/15/2022 - 06.01.00: Multi-app Projects \n"\ "\t08/28/2022 - 06.00.01: Admin App show_when & cascade add. Simplify Codespaces swagger url & use default config \n"\ "\t06/12/2022 - 05.02.22: No pyodbc by default, model customizations simplified, better logging \n"\ "\t05/04/2022 - 05.02.03: alembic for database migrations, admin-merge.yaml \n"\ "\t04/27/2022 - 05.01.02: copy_children, with support for nesting (children and grandchildren, etc.) \n"\ "\t03/27/2022 - 05.00.06: Introducing Behave test framework, LogicBank bugfix \n"\ "\t12/26/2021 - 04.00.05: Introducing the admin app, with Readme Tutorial \n"\ "\t11/13/2021 - 03.50.01: rebuild-from-database/model, improved relationship support, port conflict msg \n"\ "\t09/15/2021 - 03.00.09: auto-create .devcontainer for vscode, configure network, python & debug \n"\ from contextlib import closing import yaml temp_created_project = "temp_created_project" # see copy_if_mounted import socket import subprocess from os.path import abspath from os.path import realpath from pathlib import Path from shutil import copyfile import shutil import importlib.util from flask import Flask import logging, logging.config import datetime from typing import NewType import sys import os import platform import importlib def is_docker() -> bool: """ running docker? dir exists: /home/api_logic_server """ path = '/home/api_logic_server' path_result = os.path.isdir(path) # this *should* exist only on docker env_result = "DOCKER" == os.getenv('APILOGICSERVER_RUNNING') assert path_result == env_result return path_result def get_api_logic_server_dir() -> str: """ :return: ApiLogicServer dir, eg, /Users/val/dev/ApiLogicServer """ running_at = Path(__file__) python_path = running_at.parent.absolute() return str(python_path) current_path = os.path.abspath(os.path.dirname(__file__)) with open(f'{get_api_logic_server_dir()}/logging.yml','rt') as f: config=yaml.safe_load(f.read()) f.close() logging.config.dictConfig(config) log = logging.getLogger(__name__) debug_value = os.getenv('APILOGICSERVER_DEBUG') if debug_value is not None: debug_value = debug_value.upper() if debug_value.startswith("F") or debug_value.startswith("N"): log.setLevel(logging.INFO) else: log.setLevel(logging.DEBUG) logging.getLogger('create_from_model.api_logic_server_utils').setLevel(logging.DEBUG) logging.getLogger('sqlacodegen_wrapper.sqlacodegen.sqlacodegen.codegen').setLevel(logging.DEBUG) logging.getLogger('api_logic_server_cli.sqlacodegen_wrapper.sqlacodegen_wrapper').setLevel(logging.DEBUG) logging.getLogger('create_from_model.model_creation_services').setLevel(logging.DEBUG) log.debug("Patch to enable import of outer directories") sys.path.append(get_api_logic_server_dir()) # e.g, on Docker: export PATH="/home/api_logic_server/api_logic_server_cli" api_logic_server_path = os.path.dirname(get_api_logic_server_dir()) # e.g: export PATH="/home/api_logic_server" sys.path.append(api_logic_server_path) from create_from_model.model_creation_services import ModelCreationServices import sqlacodegen_wrapper.sqlacodegen_wrapper as expose_existing_callable import create_from_model.api_logic_server_utils as create_utils import api_logic_server_cli.create_from_model.uri_info as uri_info from api_logic_server_cli.cli_args_project import Project from api_logic_server_cli.cli_args_base import OptLocking api_logic_server_info_file_name = get_api_logic_server_dir() + "/api_logic_server_info.yaml" api_logic_server_info_file_dict = {} # last-run (debug, etc) info """ contains last-run info, debug switches to show args, etc """ if Path(api_logic_server_info_file_name).is_file(): api_logic_server_info_file = open(api_logic_server_info_file_name) api_logic_server_info_file_dict = yaml.load(api_logic_server_info_file, Loader=yaml.FullLoader) api_logic_server_info_file.close() last_created_project_name = api_logic_server_info_file_dict.get("last_created_project_name","") default_db = "default = nw.sqlite, ? for help" default_project_name = "ApiLogicProject" os_cwd = os.getcwd() default_bind_key_url_separator = "-" # admin app fails with "/" or ":" (json issues?) if is_docker(): default_project_name = "/localhost/ApiLogicProject" # MetaData = NewType('MetaData', object) MetaDataTable = NewType('MetaDataTable', object) def create_app(config_filename=None, host="localhost"): import safrs app = Flask("API Logic Server") import api_logic_server_cli.config as app_logic_server_config app.config.from_object(app_logic_server_config.Config) db = safrs.DB db.init_app(app) return app def delete_dir(dir_path, msg): """ :param dir_path: delete this folder :return: """ use_shutil_debug = True if use_shutil_debug: # credit: https://linuxize.com/post/python-delete-files-and-directories/ # and https://stackoverflow.com/questions/1213706/what-user-do-python-scripts-run-as-in-windows import errno, os, stat, shutil def handleRemoveReadonly(func, path, exc): excvalue = exc[1] if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 func(path) else: raise if msg != "": log.debug(f'{msg} Delete dir: {dir_path}') use_callback = False if use_callback: shutil.rmtree(dir_path, ignore_errors=False, onerror=handleRemoveReadonly) else: try: shutil.rmtree(dir_path) except OSError as e: if "No such file" in e.strerror: pass else: log.debug("Error: %s : %s" % (dir_path, e.strerror)) else: # https://stackoverflow.com/questions/22948189/how-to-solve-the-directory-is-not-empty-error-when-running-rmdir-command-in-a try: remove_project = create_utils.run_command(f'del /f /s /q {dir_path} 1>nul') except: pass try: remove_project = create_utils.run_command(f'rmdir /s /q {dir_path}') # no prompt, no complaints if non-exists except: pass def recursive_overwrite(src, dest, ignore=None): """ copyTree, with overwrite thanks: https://stackoverflow.com/questions/12683834/how-to-copy-directory-recursively-in-python-and-overwrite-all """ if os.path.isdir(src): if not os.path.isdir(dest): os.makedirs(dest) files = os.listdir(src) if ignore is not None: ignored = ignore(src, files) else: ignored = set() for f in files: if f not in ignored: recursive_overwrite(os.path.join(src, f), os.path.join(dest, f), ignore) else: shutil.copyfile(src, dest) def create_nw_tutorial(project_name, api_logic_server_dir_str): """ copy tutorial from docs, and link to it from readme.md 1. prototype/nw/readme.md is the short preamble to "go see tutorial" 2. append the standard readme Alert: 2 copies of the Tutorial: * ~/dev/ApiLogicServer/api_logic_server_cli/prototypes/nw/Tutorial.md * ~/dev/Org-ApiLogicServer/Docs/docs/Tutorial.md * cli version is master --> * cp api_logic_server_cli/project_prototype_nw/Tutorial.md ../Org-ApiLogicServer/Docs/docs/Tutorial.md """ project_readme_file_path = project_name + '/readme.md' # brief 'go read tutorial' - add std readme standard_readme_file_path = str(Path(api_logic_server_dir_str).\ joinpath('prototypes/base').joinpath("readme.md")) with open(project_readme_file_path, 'a') as project_readme_file: with open(standard_readme_file_path) as standard_readme_file: project_readme_file.write(standard_readme_file.read()) def create_project_with_nw_samples(project, msg: str) -> str: """ clone prototype to project directory, copy sqlite db, and remove git folder update config.py - SQLALCHEMY_DATABASE_URI if nw/nw+, inject sample logic/declare_logic and api/customize_api. nw, allocation etc databases are resolved in api_logic_server_utils.get_abs_db_url() :param project a ProjectRun :param msg log.debuged, such as Create Project: :return: return_abs_db_url (e.g., reflects sqlite copy to project/database dir) """ import tempfile cloned_from = project.from_git tmpdirname = "" with tempfile.TemporaryDirectory() as tmpdirname: if project.merge_into_prototype: pass else: remove_project_debug = True if remove_project_debug and project.project_name != ".": delete_dir(realpath(project.project_directory), "1.") from_dir = project.from_git api_logic_server_dir_str = str(get_api_logic_server_dir()) # fixme not req'd if project.from_git.startswith("https://"): cmd = 'git clone --quiet https://github.com/valhuber/ApiLogicServerProto.git ' + project.project_directory cmd = f'git clone --quiet {project.from_gitfrom_git} {project.project_directory}' result = create_utils.run_command(cmd, msg=msg) # "2. Create Project") delete_dir(f'{project.project_directory}/.git', "3.") else: if from_dir == "": from_dir = (Path(api_logic_server_dir_str)).\ joinpath('prototypes/base') # /Users/val/dev/ApiLogicServer/project_prototype log.debug(f'{msg} {os.path.realpath(project.project_directory)}') log.debug(f'.. ..Clone from {from_dir} ') cloned_from = from_dir try: if project.merge_into_prototype: # create project over current (e.g., docker, learning center) # tmpdirname = tempfile.TemporaryDirectory() # preserve files like Tech_Bits.md recursive_overwrite(project.project_directory, str(tmpdirname)) # save, restore @ end delete_dir(str(Path(str(tmpdirname)) / ".devcontainer"), "") # except, do NOT restore these delete_dir(str(Path(str(tmpdirname)) / "api"), "") delete_dir(str(Path(str(tmpdirname)) / "database"), "") delete_dir(str(Path(str(tmpdirname)) / "logic"), "") delete_dir(str(Path(str(tmpdirname)) / "security"), "") delete_dir(str(Path(str(tmpdirname)) / "test"), "") delete_dir(str(Path(str(tmpdirname)) / "ui"), "") if os.path.exists(str(Path(str(tmpdirname)) / "api_logic_server_run.py" )): os.remove(str(Path(str(tmpdirname)) / "api_logic_server_run.py")) delete_dir(realpath(project.project_directory), "") recursive_overwrite(from_dir, project.project_directory) # ApiLogic Proto -> current (new) project else: shutil.copytree(from_dir, project.project_directory) # normal path (fails if project_directory not empty) except OSError as e: raise Exception(f'\n==>Error - unable to copy to {project.project_directory} -- see log below' f'\n\n{str(e)}\n\n' f'Suggestions:\n' f'.. Verify the --project_name argument\n' f'.. If you are using Docker, verify the -v argument\n\n') if project.nw_db_status in ["nw", "nw+"]: log.debug(".. ..Copy in nw customizations: logic, custom api, readme, tests, admin app") nw_dir = (Path(api_logic_server_dir_str)).\ joinpath('prototypes/nw') # /Users/val/dev/ApiLogicServer/api_logic_server_cli/project_prototype recursive_overwrite(nw_dir, project.project_directory) create_nw_tutorial(project.project_directory, api_logic_server_dir_str) if project.nw_db_status in ["nw-"]: log.debug(".. ..Copy in nw- customizations: readme, perform_customizations") nw_dir = (Path(api_logic_server_dir_str)).\ joinpath('prototypes/nw_no_cust') # /Users/val/dev/ApiLogicServer/project_prototype_nw_no_cust recursive_overwrite(nw_dir, project.project_directory) if project.db_url in ["allocation"]: log.debug(".. ..Copy in allocation customizations: readme, logic, tests") nw_dir = (Path(api_logic_server_dir_str)).\ joinpath('prototypes/allocation') # /Users/val/dev/ApiLogicServer/project_prototype_allocation recursive_overwrite(nw_dir, project.project_directory) if project.db_url == "mysql+pymysql://root:p@localhost:3306/classicmodels": log.debug(".. ..Copy in classicmodels customizations") proto_dir = (Path(api_logic_server_dir_str)).\ joinpath('prototypes/classicmodels') recursive_overwrite(proto_dir, project.project_directory) if project.db_url == "postgresql://postgres:p@localhost/postgres": log.debug(".. ..Copy in postgres customizations") proto_dir = (Path(api_logic_server_dir_str)).\ joinpath('prototypes/postgres') recursive_overwrite(proto_dir, project.project_directory) create_utils.replace_string_in_file(search_for="creation-date", replace_with=str(datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S")), in_file=f'{project.project_directory}/readme.md') create_utils.replace_string_in_file(search_for="api_logic_server_version", replace_with=__version__, in_file=f'{project.project_directory}/readme.md') create_utils.replace_string_in_file(search_for="api_logic_server_template", replace_with=f'{from_dir}', in_file=f'{project.project_directory}/readme.md') create_utils.replace_string_in_file(search_for="api_logic_server_project_directory", replace_with=f'{project.project_directory}', in_file=f'{project.project_directory}/readme.md') create_utils.replace_string_in_file(search_for="api_logic_server_api_name", replace_with=f'{project.api_name}', in_file=f'{project.project_directory}/readme.md') create_utils.replace_string_in_file(search_for="replace_opt_locking", replace_with=f'{project.opt_locking}', in_file=f'{project.project_directory}/config.py') create_utils.replace_string_in_file(search_for="replace_opt_locking_attr", replace_with=f'{project.opt_locking_attr}', in_file=f'{project.project_directory}/api/system/opt_locking/opt_locking.py') do_fix_docker_for_vscode_dockerfile = False # not required - multi-arch docker if do_fix_docker_for_vscode_dockerfile: # print(f'\n> Created for platform.machine(): {platform.machine()}\n') if platform.machine() in('arm64', 'aarch64'): # in ("i386", "AMD64", "x86_64") log.debug(f'\n>> .. arm - {platform.machine()}\n') create_utils.replace_string_in_file(search_for="apilogicserver/api_logic_server", replace_with=f'apilogicserver/api_logic_server_local', in_file=f'{project.project_directory}/.devcontainer/For_VSCode.dockerfile') return_abs_db_url = project.abs_db_url copy_sqlite = True if copy_sqlite == False or "sqlite" not in project.abs_db_url: db_uri = get_windows_path_with_slashes(project.abs_db_url) create_utils.replace_string_in_file(search_for="replace_db_url", replace_with=db_uri, in_file=f'{project.project_directory}/config.py') create_utils.replace_string_in_file(search_for="replace_db_url", replace_with=db_uri, in_file=f'{project.project_directory}/database/alembic.ini') create_utils.replace_string_in_file(search_for="replace_db_url", replace_with=db_uri, in_file=f'{project.project_directory}/database/db_debug.py') else: """ sqlite - copy the db (relative fails, since cli-dir != project-dir) """ # strip sqlite://// from sqlite:////Users/val/dev/ApiLogicServer/api_logic_server_cli/database/nw-gold.sqlite db_loc = project.abs_db_url.replace("sqlite:///", "") target_db_loc_actual = str(project.project_directory_path.joinpath('database/db.sqlite')) copyfile(db_loc, target_db_loc_actual) config_url = str(project.api_logic_server_dir_path) # build this: SQLALCHEMY_DATABASE_URI = sqlite:///{str(project_abs_dir.joinpath('database/db.sqlite'))} # into this: SQLALCHEMY_DATABASE_URI = f"replace_db_url" replace_db_url_value = "sqlite:///{str(project_abs_dir.joinpath('database/db.sqlite'))}" if os.name == "nt": # windows target_db_loc_actual = get_windows_path_with_slashes(target_db_loc_actual) # set this in config.py: SQLALCHEMY_DATABASE_URI = "replace_db_url" return_abs_db_url = f'sqlite:///{target_db_loc_actual}' create_utils.replace_string_in_file(search_for="replace_db_url", replace_with=replace_db_url_value, in_file=f'{project.project_directory}/config.py') create_utils.replace_string_in_file(search_for="replace_db_url", replace_with=return_abs_db_url, in_file=f'{project.project_directory}/database/alembic.ini') create_utils.replace_string_in_file(search_for="replace_db_url", replace_with=return_abs_db_url, in_file=f'{project.project_directory}/database/db_debug.py') log.debug(f'.. ..Sqlite database setup {target_db_loc_actual}...') log.debug(f'.. .. ..From {db_loc}') log.debug(f'.. .. ..db_uri set to: {return_abs_db_url} in <project>/config.py') if project.merge_into_prototype: recursive_overwrite(str(tmpdirname), project.project_directory) # delete_dir(realpath(Path(str(tmpdirname))), "") # os.removedirs(Path(str(tmpdirname))) # tmpdirname.cleanup() return return_abs_db_url def get_windows_path_with_slashes(url: str) -> str: """ idiotic fix for windows (\ --> \\\\) https://stackoverflow.com/questions/1347791/unicode-error-unicodeescape-codec-cant-decode-bytes-cannot-open-text-file""" return url.replace('\\', '\\\\') def resolve_home(name: str) -> str: """ :param name: a file name, eg, ~/Desktop/a.b :return: /users/you/Desktop/a.b This just removes the ~, the path may still be relative to run location """ result = name if result.startswith("~"): result = str(Path.home()) + result[1:] return result def fix_database_models(project_directory: str, db_types: str, nw_db_status: str, is_tutorial: bool=False): """ Alters models.py * Injects <db_types file> into database/models.py, fix nw cascade delete, jsonapi_attr * Provides for column alias examples (Category.CategoryName, etc) * Cascade Delete for OrderDetails Args: project_directory (str): /Users/val/dev/Org-ApiLogicServer/API_Fiddle/1. Instant_Creation db_types (str): _description_ nw_db_status (str): whether this is nw, nw- or nw+ (or none of the above) is_tutorial (bool, optional): creating tutorial or api_fiddle. Defaults to False. """ models_file_name = f'{project_directory}/database/models.py' if db_types is not None and db_types != "": log.debug(f'.. .. ..Injecting file {db_types} into database/models.py') with open(db_types, 'r') as file: db_types_data = file.read() create_utils.insert_lines_at(lines=db_types_data, at="(typically via --db_types)", file_name=models_file_name) if nw_db_status in ["nw", "nw+"] or (is_tutorial and nw_db_status == "nw-"): # no manual fixups for nw- log.debug(f'.. .. ..Setting cascade delete and column alias for sample database database/models.py') create_utils.replace_string_in_file(in_file=models_file_name, search_for='OrderDetailList : Mapped[List["OrderDetail"]] = relationship(back_populates="Order")', replace_with='OrderDetailList : Mapped[List["OrderDetail"]] = relationship(cascade="all, delete", back_populates="Order") # manual fix') create_utils.replace_string_in_file(in_file=models_file_name, search_for="ShipPostalCode = Column(String(8000))", replace_with="ShipZip = Column('ShipPostalCode', String(8000)) # manual fix - alias") create_utils.replace_string_in_file(in_file=models_file_name, search_for="CategoryName_ColumnName = Column(String(8000))", replace_with="CategoryName = Column('CategoryName_ColumnName', String(8000)) # manual fix - alias") """ if not "include_exclude" in project_directory and False: # log.debug(f'.. .. ..And Employee Virtual Attributes') nw_virtuals_attrs_file_name = Path(get_api_logic_server_dir()).\ joinpath('fragments/nw_virtual_attrs.py') with open(nw_virtuals_attrs_file_name, 'r') as file: nw_virtual_attrs = file.read() nw_virtuals_attrs = nw_virtual_attrs[8:] # first line was for IDE no errors create_utils.insert_lines_at(lines=nw_virtuals_attrs, at="OrderList = relationship('Order', cascade_backrefs=True, backref='Employee')", file_name=models_file_name, after=True) """ def final_project_fixup(msg, project) -> str: """ * fix ports/hosts, * inject project names/dates, * update info file Args: msg (_type_): _description_ project (_type_): _description_ Returns: str: _description_ """ log.debug(msg) # "7. Final project fixup" if project.command.startswith("rebuild"): pass else: log.debug(f' b. Update api_logic_server_run.py with ' f'project_name={project.project_name} and api_name, host, port') update_api_logic_server_run(project) fix_host_and_ports(" c. Fixing api/expose_services - port, host", project) fix_build_docker_image(" d. Fixing devops/docker-image/build_image.sh - project name", project) api_logic_server_info_file_dict["last_created_project_name"] = project.project_directory # project_name - twiddle api_logic_server_info_file_dict["last_created_date"] = str(datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S")) api_logic_server_info_file_dict["last_created_version"] = __version__ with open(api_logic_server_info_file_name, 'w') as api_logic_server_info_file_file: yaml.dump(api_logic_server_info_file_dict, api_logic_server_info_file_file, default_flow_style=False) return def fix_database_models__import_customize_models(project_directory: str, msg: str): """ Append "from database import customize_models" to database/models.py """ models_file_name = f'{project_directory}/database/models.py' log.debug(msg) models_file = open(models_file_name, 'a') models_file.write("\n\nfrom database import customize_models\n") models_file.close() def update_api_logic_server_run(project): """ Updates project_name, ApiLogicServer hello, project_dir in config.py Note project_directory is from user, and may be relative (and same as project_name) """ api_logic_server_run_py = f'{project.project_directory}/api_logic_server_run.py' config_py = f'{project.project_directory}/config.py' create_utils.replace_string_in_file(search_for="\"api_logic_server_project_name\"", # fix logic_bank_utils.add_python_path replace_with='"' + os.path.basename(project.project_name) + '"', in_file=api_logic_server_run_py) create_utils.replace_string_in_file(search_for="ApiLogicServer hello", replace_with="ApiLogicServer generated at:" + str(datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S")), in_file=api_logic_server_run_py) project_directory_fix = project.project_directory_actual if os.name == "nt": # windows project_directory_fix = get_windows_path_with_slashes(str(project.project_directory_actual)) create_utils.replace_string_in_file(search_for="\"api_logic_server_project_dir\"", # for logging project location replace_with='"' + project_directory_fix + '"', in_file=api_logic_server_run_py) create_utils.replace_string_in_file(search_for="api_logic_server_api_name", # last node of server url replace_with=project.api_name, in_file=api_logic_server_run_py) create_utils.replace_string_in_file(search_for="api_logic_server_host", replace_with=project.host, in_file=config_py) create_utils.replace_string_in_file(search_for="api_logic_server_swagger_host", replace_with=project.swagger_host, in_file=config_py) replace_port = f', port="{project.port}"' if project.port else "" # TODO: consider reverse proxy create_utils.replace_string_in_file(search_for="api_logic_server_version", replace_with=__version__, in_file=api_logic_server_run_py) create_utils.replace_string_in_file(search_for="api_logic_server_created_on", replace_with=str(datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S")), in_file=api_logic_server_run_py) create_utils.replace_string_in_file(search_for="api_logic_server_port", # server port replace_with=project.port, in_file=config_py) create_utils.replace_string_in_file(search_for="api_logic_server_port", # server port replace_with=project.port, in_file=api_logic_server_run_py) create_utils.replace_string_in_file(search_for="api_logic_server_host", replace_with=project.host, in_file=api_logic_server_run_py) pass def fix_host_and_ports(msg, project): """ c. Fixing api/expose_services - port, host """ log.debug(msg) # c. Fixing api/expose_services - port, host replace_port = f':{project.port}' if project.port else "" # replace_with = host + replace_port in_file = f'{project.project_directory}/api/customize_api.py' create_utils.replace_string_in_file(search_for="api_logic_server_host", replace_with=project.host, in_file=in_file) create_utils.replace_string_in_file(search_for="api_logic_server_port", replace_with=replace_port, in_file=in_file) log.debug(f' d. Updated customize_api_py with port={project.port} and host={project.host}') full_path = project.project_directory_actual create_utils.replace_string_in_file(search_for="python_anywhere_path", replace_with=full_path, in_file=f'{project.project_directory}/devops/python-anywhere/python_anywhere_wsgi.py') log.debug(f' e. Updated python_anywhere_wsgi.py with {full_path}') def fix_build_docker_image(msg, project: Project): """ d. Fixing devops/docker-image/build_image.sh - project name """ log.debug(msg) # d. Fixing devops/docker-image/build_image.sh - project name replace_port = f':{project.port}' if project.port else "" # replace_with = host + replace_port in_file = f'{project.project_directory}/devops/docker-image/build_image.sh' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-image/run_image.sh' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-compose-dev-local-nginx/docker-compose-dev-local-nginx.yml' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-compose-dev-local/docker-compose-dev-local.yml' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-compose-dev-azure/docker-compose-dev-azure.yml' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-compose-dev-azure/azure-deploy.sh' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-compose-dev-azure-nginx/azure-deploy.sh' if Path(in_file).is_file(): create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) in_file = f'{project.project_directory}/devops/docker-compose-dev-azure-nginx/docker-compose-dev-azure-nginx.yml' create_utils.replace_string_in_file(search_for="apilogicserver_project_name_lower", replace_with=project.project_name_last_node.lower(), in_file=in_file) def start_open_with(open_with: str, project_name: str): """ Creation complete. Opening {open_with} at {project_name} """ log.debug(f'\nCreation complete - Opening {open_with} at {project_name}') log.debug(".. See the readme for install / run instructions") create_utils.run_command(f'{open_with} {project_name}', None, "no-msg") def invoke_extended_builder(builder_path, db_url, project_directory): # spec = importlib.util.spec_from_file_location("module.name", "/path/to/file.py") spec = importlib.util.spec_from_file_location("module.name", builder_path) extended_builder = importlib.util.module_from_spec(spec) spec.loader.exec_module(extended_builder) # runs "bare" module code (e.g., initialization) extended_builder.extended_builder(db_url, project_directory) # extended_builder.MyClass() def invoke_creators(model_creation_services: ModelCreationServices): """ MAJOR: uses model_creation_services (resource_list, model iterator functions) to create api, apps """ creator_path = abspath(f'{abspath(get_api_logic_server_dir())}/create_from_model') log.debug(" b. Create api/expose_api_models.py from models") # log.debug(f'---> cwd: {model_creation_services.os_cwd}') spec = importlib.util.spec_from_file_location("module.name", f'{creator_path}/api_expose_api_models_creator.py') creator = importlib.util.module_from_spec(spec) spec.loader.exec_module(creator) # runs "bare" module code (e.g., initialization) creator.create(model_creation_services) # invoke create function if model_creation_services.project.admin_app: log.debug(" c. Create ui/admin/admin.yaml from models") spec = importlib.util.spec_from_file_location("module.name", f'{creator_path}/ui_admin_creator.py') creator = importlib.util.module_from_spec(spec) spec.loader.exec_module(creator) creator.create(model_creation_services) else: pass # log.debug(".. .. ..ui/admin_app creation declined") # model_creation_services.close_app() # this may no longer be required class ProjectRun(Project): """ Main Class - instantiate / create_project to run CLI functions """ def __init__(self, command: str, project_name: str, db_url: str, api_name: str="api", host: str='localhost', port: str='5656', swagger_host: str="localhost", not_exposed: str="ProductDetails_V", from_git: str="", db_types: str=None, open_with: str="", run: bool=False, use_model: str="", admin_app: bool=True, flask_appbuilder: bool=False, favorites: str="name description", non_favorites: str="id", react_admin: bool=True, extended_builder: str="", include_tables: str="", multi_api: bool=False, infer_primary_key: bool=False, bind_key_url_separator: str=default_bind_key_url_separator, bind_key: str="", execute: bool=True, opt_locking: str=OptLocking.OPTIONAL.value, opt_locking_attr: str="S_CheckSum", id_column_alias: str="Id"): super(ProjectRun, self).__init__() self.project_name = project_name self.db_url = db_url self.bind_key = bind_key self.api_name = api_name self.host = host self.port = port self.swagger_host = swagger_host self.not_exposed = not_exposed self.from_git = from_git self.db_types = db_types self.open_with = open_with self.run = run self.use_model = use_model self.admin_app = admin_app self.flask_appbuilder = flask_appbuilder self.favorites = favorites self.non_favorites = non_favorites self.react_admin = react_admin self.extended_builder = extended_builder self.include_tables = include_tables self.multi_api = multi_api self.infer_primary_key = infer_primary_key self.bind_key_url_separator = bind_key_url_separator self.command = command self.opt_locking = opt_locking self.opt_locking_attr = opt_locking_attr self.id_column_alias = id_column_alias if execute: self.create_project() def print_options(self): """ Creating ApiLogicProject with options: (or uri helo) """ if self.db_url == "?": uri_info.print_uri_info() exit(0) print_options = True if print_options: creating_or_updating = "Creating" if self.command.startswith("add_"): creating_or_updating = "Updating" log.debug(f'\n\n{creating_or_updating} ApiLogicProject with options:') log.debug(f' --db_url={self.db_url}') log.debug(f' --bind_key={self.bind_key}') log.debug(f' --project_name={self.project_name} (pwd: {self.os_cwd})') log.debug(f' --api_name={self.api_name}') log.debug(f' --admin_app={self.admin_app}') log.debug(f' --react_admin={self.react_admin}') log.debug(f' --flask_appbuilder={self.flask_appbuilder}') log.debug(f' --id_column_alias={self.id_column_alias}') log.debug(f' --from_git={self.from_git}') # log.debug(f' --db_types={self.db_types}') log.debug(f' --run={self.run}') log.debug(f' --host={self.host}') log.debug(f' --port={self.port}') log.debug(f' --swagger_host={self.swagger_host}') log.debug(f' --not_exposed={self.not_exposed}') log.debug(f' --open_with={self.open_with}') log.debug(f' --use_model={self.use_model}') log.debug(f' --favorites={self.favorites}') log.debug(f' --non_favorites={self.non_favorites}') log.debug(f' --extended_builder={self.extended_builder}') log.debug(f' --include_tables={self.include_tables}') log.debug(f' --multi_api={self.multi_api}') log.debug(f' --infer_primary_key={self.infer_primary_key}') log.debug(f' --opt_locking={self.opt_locking}') log.debug(f' --opt_locking_attr={self.opt_locking_attr}') def update_config_and_copy_sqlite_db(self, msg: str) -> str: """ 1. If sqlite, copy db to database folder 2. Add project.db_url to config 3. Update database/multi_db.py - bind & expose APIs Parameters: :arg: msg log.debug this, e.g., ".. ..Adding Database [{self.bind_key}] to existing project" :arg: project project setting object """ log.debug(msg) bind_key_upper = self.bind_key.upper() # configs insist on all caps return_abs_db_url = self.abs_db_url config_uri_value = "'" + return_abs_db_url + "'" # ************************** # sqlite? copy to database/ # ************************** if "sqlite" in self.abs_db_url: """ sqlite - copy the db (relative fails, since cli-dir != project-dir) """ log.debug(f'.. .. ..Copying sqlite database to: database/{self.bind_key}_db.sqlite') db_loc = self.abs_db_url.replace("sqlite:///", "") target_db_loc_actual = str(self.project_directory_path.joinpath(f'database/{self.bind_key}_db.sqlite')) copyfile(db_loc, target_db_loc_actual) if os.name == "nt": # windows # 'C:\\\\Users\\\\val\\\\dev\\\\servers\\\\api_logic_server\\\\database\\\\db.sqlite' target_db_loc_actual = get_windows_path_with_slashes(target_db_loc_actual) return_abs_db_url = f'sqlite:///{target_db_loc_actual}' # build this: SQLALCHEMY_DATABASE_URI_AUTHENTICATION = f'sqlite:///{str(project_abs_dir.joinpath("database/authentication_db.sqlite"))}' # into this: {CONFIG_URI} = '{config_uri_value}' file_name = f'"database/{self.bind_key}_db.sqlite"' config_uri_value = "f'sqlite:///{str(project_abs_dir.joinpath(" + file_name + "))}'" log.debug(f'.. .. ..From {db_loc}') # ********************** # add url to config # ********************** # db_uri = config_uri # return_abs_db_url if os.name == "nt": # windows # 'C:\\\\Users\\\\val\\\\dev\\\\servers\\\\api_logic_server\\\\database\\\\db.sqlite' target_db_loc_actual = get_windows_path_with_slashes(self.abs_db_url) CONFIG_URI = f'SQLALCHEMY_DATABASE_URI_{bind_key_upper}' config_insert = f""" {CONFIG_URI} = {config_uri_value} app_logger.info(f'config.py - {CONFIG_URI}: <CONFIG_URI_VALUE>\\n') # as desired, use env variable: export SQLALCHEMY_DATABASE_URI='sqlite:////Users/val/dev/servers/docker_api_logic_project/database/db.sqliteXX' if os.getenv('{CONFIG_URI}'): {CONFIG_URI} = os.getenv('{CONFIG_URI}') # type: ignore # type: str app_logger.debug(f'.. overridden from env variable: {CONFIG_URI}') """ config_insert = config_insert.replace("<CONFIG_URI_VALUE>", "{" + f'{CONFIG_URI}' + "}") config_file = f'{self.project_directory}/config.py' config_built = create_utils.does_file_contain(search_for=CONFIG_URI, in_file=config_file) if not config_built: create_utils.insert_lines_at(lines=config_insert, at="# End Multi-Database URLs (from ApiLogicServer add-db...)", file_name=f'{self.project_directory}/config.py') log.debug(f'.. ..Updating config.py file with {CONFIG_URI}...') else: log.debug(f'.. ..Not updating config.py file with {CONFIG_URI}... (already present)') # ************************** # update multi_db.py # ************************** # NB: must do all binds in 1 call (not 1 call / db): https://www.youtube.com/watch?v=SB5BfYYpXjE bind_insert_urls = """ app_logger.debug(f"\\n<project.bind_key> Config complete - database/<project.bind_key>_models.py" + f'\\n -- with bind: <project.bind_key>' + f'\\n -- len(database.<project.bind_key>_models.<project.bind_key>.metadata.tables) tables loaded') """ # not f-string since it contains {} expose_apis = """ <project.bind_key>_expose_api_models.expose_models(safrs_api, method_decorators= method_decorators) """ imports = """ from api import <project.bind_key>_expose_api_models from database import <project.bind_key>_models """ flask_app_config__bind_update = \ f"\t\t'{self.bind_key}': flask_app.config['SQLALCHEMY_DATABASE_URI_{bind_key_upper}']\n" expose_apis = expose_apis.replace('<project.bind_key>', f'{self.bind_key}') imports = imports.replace('<project.bind_key>', f'{self.bind_key}') imports = imports.replace('<bind_key_upper>', f'{bind_key_upper}') binds_databases_file = f'{self.project_directory}/database/multi_db.py' binds_built = create_utils.does_file_contain( \ search_for=bind_key_upper, in_file=binds_databases_file) some_configs_built = create_utils.does_file_contain( \ search_for='flask_app.config[', in_file=binds_databases_file) if some_configs_built: flask_app_config__bind_update = ', ' + flask_app_config__bind_update if not binds_built: create_utils.insert_lines_at(lines=flask_app_config__bind_update, at="# make multiple databases available", file_name=binds_databases_file) create_utils.insert_lines_at(lines=expose_apis, at="# Begin Expose APIs", after=True, file_name=binds_databases_file) create_utils.insert_lines_at(lines=imports, at="# additional per-database imports", after=True, file_name=binds_databases_file) log.debug(f'.. ..Updated database/multi_db.py with {CONFIG_URI}...') else: log.debug(f'.. ..Not updating database/multi_db.py with {CONFIG_URI} (already present)') return return_abs_db_url def add_auth(self, msg: str, is_nw: bool = False): """_summary_ 1. add-db --db_url= [ auth | db_url ] 2. add user.login endpoint 3. Set SECURITY_ENABLED in config.py 4. Adding Sample authorization to security/declare_security.py, or user Args: msg (str): eg: ApiLogicProject customizable project created. Adding Security:") is_nw (bool): is northwind, which means we add the nw security logic """ database_path = self.project_directory_path.joinpath("database") log.debug("\n\n==================================================================") if msg != "": log.info(msg + f" to project: {str(database_path.parent)}") log.info(" 1. ApiLogicServer add-db --db_url=auth --bind_key=authentication") log.debug("==================================================================5\n") save_run = self.run save_command = self.command save_db_url = self.db_url self.command = "add_db" self.bind_key = "authentication" is_northwind = is_nw or self.nw_db_status == "nw" # nw_db_status altered in create_project if is_northwind: # is_nw or self.nw_db_status == "nw": self.db_url = "auth" # shorthand for api_logic_server_cli/database/auth... self.run = False self.create_project() # not creating project, but using model creation svcs self.run = save_run self.command = save_command self.db_url = save_db_url log.debug("\n==================================================================") if msg != "": log.info(" 2. Add User.Login endpoint") log.debug("==================================================================\n") login_endpoint_filename = f'{self.api_logic_server_dir_path.joinpath("templates/login_endpoint.txt")}' auth_models_file_name = f'{self.project_directory_path.joinpath("database/authentication_models.py")}' with open(login_endpoint_filename, 'r') as file: login_endpoint_data = file.read() create_utils.insert_lines_at(lines=login_endpoint_data, at='UserRoleList : Mapped[List["UserRole"]] = relationship(back_populates="user")', after=True, file_name=auth_models_file_name) login_endpoint_filename = f'{self.api_logic_server_dir_path.joinpath("templates/login_endpoint_imports.txt")}' auth_models_file_name = f'{self.project_directory_path.joinpath("database/authentication_models.py")}' with open(login_endpoint_filename, 'r') as file: login_endpoint_data = file.read() create_utils.insert_lines_at(lines=login_endpoint_data, at="import declarative_base", after=True, file_name=auth_models_file_name) log.debug("\n==================================================================") if msg != "": log.info(" 3. Set SECURITY_ENABLED in config.py") log.debug("==================================================================\n") create_utils.replace_string_in_file(search_for="SECURITY_ENABLED = False #", replace_with='SECURITY_ENABLED = True #', in_file=f'{self.project_directory}/config.py') if is_northwind: # is_nw or self.nw_db_status == "nw": log.debug("\n==================================================================") if msg != "": if msg != "": log.info(" 4. Adding Sample authorization to security/declare_security.py") log.debug("==================================================================\n\n") nw_declare_security_py_path = self.api_logic_server_dir_path.\ joinpath('prototypes/nw/security/declare_security.py') declare_security_py_path = self.project_directory_path.joinpath('security/declare_security.py') shutil.copyfile(nw_declare_security_py_path, declare_security_py_path) else: log.debug("\n==================================================================") if msg != "": log.info(" 4. TODO: Declare authorization in security/declare_security.py") log.debug("==================================================================\n\n") def add_nw_customizations(self, do_show_messages: bool = True, do_security: bool = True): """_summary_ 1. add-sqlite-security 2. deep copy project_prototype_nw (adds logic) Args: """ log.debug("\n\n==================================================================") nw_messages = "" if do_security: if do_show_messages: nw_messages = "Add northwind customizations - enabling security" self.add_auth(is_nw=True, msg=nw_messages) nw_path = (self.api_logic_server_dir_path).\ joinpath('prototypes/nw') # PosixPath('/Users/val/dev/ApiLogicServer/ApiLogicServer-dev/org_git/ApiLogicServer-src/api_logic_server_cli/prototypes/nw') recursive_overwrite(nw_path, self.project_directory) # '/Users/val/dev/ApiLogicServer/ApiLogicServer-dev/org_git/tutorial/1. Instant_Creation' create_nw_tutorial(self.project_directory, str(self.api_logic_server_dir_path)) if do_show_messages: log.info("\nExplore key customization files:") log.info(f'..api/customize_api.py') log.info(f'..database/customize_models.py') log.info(f'..logic/declare_logic.py') log.info(f'..security/declare_security.py\n') if self.is_tutorial == False: log.info(".. complete\n") def tutorial(self, msg: str="", create: str='tutorial'): """ Creates (overwrites) Tutorial (`api_logic_server_cli/project_tutorial`) Contains 3 projects: basic_app, ApiLogicProject, ApiLogicProject_Logic example: \b cd ApiLogicProject # any empty folder, perhaps where ApiLogicServer is installed \b Args: msg (str): eg: ApiLogicProject customizable project created. Adding Security:") create: 'LearningCenter', or 'tutorial' """ log.info(f'\n{msg} {create}') target_project = self.project_name # eg, /Users/val/dev/Org-ApiLogicServer target_project_path = Path(target_project) self.project_directory_path = Path(self.project_name) self.project_directory_actual = self.project_directory_path # if not self.project_directory_path.exists(): # os.mkdir(self.project_directory_path, mode = 0o777) log.info(f"\nCreating {create}") workspace_name = 'prototypes/tutorial' if create == "tutorial" else "prototypes/fiddle" shutil.copytree(dirs_exist_ok=True, src=self.api_logic_server_dir_path.joinpath(workspace_name), dst=target_project_path.joinpath(create)) # project named from arg create self.command = "create" self.project_name = str(target_project_path.joinpath(f"{create}/1. Instant_Creation")) self.db_url = "nw-" # shorthand for sample db, no cust save_run = self.run self.run = False self.is_tutorial = True log.info(f"\nCreating ApiLogicProject") self.create_project() log.info(f"\nCreating Customized\n") no_cust = self.project_name # 1. Instant_Creation with_cust = str(target_project_path.joinpath(f"{create}/2. Customized")) self.project_directory = with_cust shutil.copytree(dirs_exist_ok=True, src=no_cust, dst=with_cust) self.project_name = with_cust self.command = "add-cust" self.add_nw_customizations(do_show_messages=False, do_security=False) self.run = save_run # remove logic below log.info(f"\nCreating Logic\n") no_cust = self.project_name with_cust = str(target_project_path.joinpath(f"{create}/3. Logic")) shutil.copytree(dirs_exist_ok=True, src=no_cust, dst=with_cust) self.project_name = with_cust self.command = "add-cust" self.add_nw_customizations(do_show_messages=False) self.run = save_run if create != "tutorial": # remove projects 1 and 2 shutil.rmtree(str(target_project_path.joinpath(f"{create}/1. Instant_Creation"))) shutil.rmtree(str(target_project_path.joinpath(f"{create}/2. Customized"))) if os.path.isdir(target_project_path.joinpath(f"{create}/2. Learn JSON_API using API Logic Server")): shutil.rmtree(str(target_project_path.joinpath(f"{create}/2. Learn JSON_API using API Logic Server"))) shutil.move(src = str(target_project_path.joinpath(f"{create}/3. Logic")), dst = str(target_project_path.joinpath(f"{create}/2. Learn JSON_API using API Logic Server"))) else: # remove logic and database customizations from "2. Customized" (win requires: ignore_errors=True) shutil.rmtree(str(target_project_path.joinpath(f"{create}/2. Customized/logic")), ignore_errors=True) shutil.rmtree(str(target_project_path.joinpath(f"{create}/2. Customized/database")), ignore_errors=True) shutil.copytree(dirs_exist_ok=True, src=str(target_project_path.joinpath(f"{create}/1. Instant_Creation/logic")), dst=str(target_project_path.joinpath(f"{create}/2. Customized/logic"))) shutil.copytree(dirs_exist_ok=True, src=str(target_project_path.joinpath(f"{create}/1. Instant_Creation/database")), dst=str(target_project_path.joinpath(f"{create}/2. Customized/database"))) create_utils.replace_string_in_file(search_for="SECURITY_ENABLED = True", replace_with='SECURITY_ENABLED = False', in_file=str(target_project_path.joinpath(f"{create}/2. Customized/config.py"))) shutil.copyfile(src=self.api_logic_server_dir_path.joinpath('templates/admin.yaml'), dst=str(target_project_path.joinpath(f"{create}/2. Customized/ui/admin/admin.yaml"))) log.info(f"Tutorial project successfully created. Next steps:\n") log.info(f' Open the tutorial project in your VSCode\n') if is_docker() == False: log.info(f' Establish your Python environment - see https://apilogicserver.github.io/Docs/Project-Env/') docker_info = """ cd tutorial python3 -m venv venv # may require python -m venv venv source venv/bin/activate # windows venv\Scripts\\activate python3 -m pip install -r requirements.txt""" log.info(f'{docker_info}\n') def create_project(self): """ Creates logic-enabled Python safrs api/admin project, options for execution main driver :returns: none """ # SQLALCHEMY_DATABASE_URI = "sqlite:///" + path.join(basedir, "database/db.sqlite")+ '?check_same_thread=False' self.print_options() log.debug(f"\nApiLogicServer {__version__} Creation Log:") self.abs_db_url, self.nw_db_status, self.model_file_name = create_utils.get_abs_db_url("0. Using Sample DB", self) if self.extended_builder == "*": self.extended_builder = abspath(f'{self.api_logic_server_dir_path}/extended_builder.py') log.debug(f'0. Using default extended_builder: {self.extended_builder}') self.project_directory, self.api_name, self.merge_into_prototype = \ create_utils.get_project_directory_and_api_name(self) self.project_directory_actual = os.path.abspath(self.project_directory) # make path absolute, not relative (no /../) self.project_directory_path = Path(self.project_directory_actual) self.project_name_last_node = Path(self.project_directory_path).name # for prototype, project_name='.' if self.command.startswith("rebuild") or self.command == "add_db": log.debug("1. Not Deleting Existing Project") log.debug("2. Using Existing Project") if self.command == "add_db": self.abs_db_url = self.update_config_and_copy_sqlite_db( f".. ..Adding Database [{self.bind_key}] to existing project") else: # normal path - clone, [overlay nw] self.abs_db_url = create_project_with_nw_samples(self, "2. Create Project:") log.debug(f'3. Create/verify database/{self.model_file_name}, then use that to create api/ and ui/ models') model_creation_services = ModelCreationServices(project = self, # Create database/models.py from db project_directory=self.project_directory) fix_database_models(self.project_directory, self.db_types, self.nw_db_status, self.is_tutorial) invoke_creators(model_creation_services) # MAJOR! creates api/expose_api_models, ui/admin & basic_web_app if self.extended_builder is not None and self.extended_builder != "": log.debug(f'4. Invoke extended_builder: {self.extended_builder}, ({self.db_url}, {self.project_directory})') invoke_extended_builder(self.extended_builder, self.abs_db_url, self.project_directory) final_project_fixup("4. Final project fixup", self) if self.open_with != "": # open project with open_with (vscode, charm, atom) -- NOT for docker!! start_open_with(open_with=self.open_with, project_name=self.project_name) if self.nw_db_status in ["nw", "nw+"] and self.command != "add_db": self.add_auth("\nApiLogicProject customizable project created. Adding Security:") if self.command.startswith("add_"): pass # keep silent for add-db, add-auth... elif self.is_tutorial: log.debug(f"\nTutorial created. Next steps:\n") log.debug(f' Establish your Python environment - see https://apilogicserver.github.io/Docs/IDE-Execute/#execute-prebuilt-launch-configurations\n') else: disp_url = self.db_url if disp_url == "": disp_url = "nw" log.info(f"\n\nCustomizable project {self.project_name} created from database {disp_url}. Next steps:\n") if self.multi_api: log.debug(f'Server already running. To Access: Configuration > Load > //localhost:5656/{self.api_name}') else: log.info("\nRun API Logic Server:") if os.getenv('CODESPACES'): # log.debug(f' Add port 5656, with Public visibility') - automated in .devcontainer.json log.info(f' Execute using Launch Configuration "ApiLogicServer"') else: log.info(f' cd {self.project_name}; python api_logic_server_run.py') if self.command.startswith("add_"): pass # keep silent for add-db, add-auth... elif self.is_tutorial: log.debug(f" Proceed as described in the readme\n") else: if (is_docker()): if os.getenv('CODESPACES'): log.info(f'\nCustomize right here, in Browser/VSCode - just as you would locally') log.info(f'Save customized project to GitHub') else: log.info(f'\nCustomize Docker project using IDE on local machine:') docker_project_name = self.project_name if self.project_name.startswith('/localhost/'): docker_project_name = self.project_name[11:] else: docker_project_name = f'<local machine directory for: {self.project_name}>' log.info(f' exit # exit the Docker container ') log.info(f' code {docker_project_name} # e.g., open VSCode on created project') else: log.info(f'\nCustomize using your IDE:') log.info(f' code {self.project_name} # e.g., open VSCode on created project') log.info(f' Establish your Python environment - see https://apilogicserver.github.io/Docs/IDE-Execute/#execute-prebuilt-launch-configurations\n') if self.run: # synchronous run of server - does not return run_file = os.path.abspath(f'{resolve_home(self.project_name)}/api_logic_server_run.py') run_file = '"' + run_file + '"' # spaces in file names - with windows run_args = "" if self.command == "create-and-run": run_args = "--create_and_run=True" create_utils.run_command(f'python {run_file} {run_args}', msg="\nStarting created API Logic Project") def check_ports(): try: rtn_hostname = socket.gethostname() rtn_local_ip = socket.gethostbyname(rtn_hostname) except: rtn_local_ip = f"cannot get local ip from {rtn_hostname}" log.debug(f"{rtn_local_ip}") port_check = False if port_check or is_docker(): s = socket.socket() # Create a socket object host = socket.gethostname() # Get local machine name port = 5656 # Reserve a port for your service. port_is_available = True try: s.bind((host, port)) # Bind to the port except: port_is_available = False if not port_is_available: msg = "\nWarning - port 5656 does not appear to be available\n" \ " Version 3.30 has changed port assignments to avoid port conflicts\n" \ " For example, docker start:\n" \ " docker run -it --name api_logic_server --rm -p 5656:5656 -p 5002:5002 -v ${PWD}:/localhost apilogicserver/api_logic_server \n" \ "Ports are sometimes freed slowly, you may need to re-issue this command.\n\n" log.warning(msg) # sys.exit(msg) s.close() return rtn_hostname, rtn_local_ip def key_module_map(): """ not called - just index of key code - use this for hover, goto etc ctl-l (^l) for last edit Also, CmdP: Comments: Toggle Editor Commenting """ import create_from_model.ui_admin_creator as ui_admin_creator import create_from_model.api_expose_api_models_creator as api_expose_api_models_creator import sqlacodegen_wrapper.sqlacodegen_wrapper as sqlacodegen_wrapper ProjectRun.create_project() # main driver, calls... create_utils.get_abs_db_url() # nw set here, dbname, db abbrevs create_project_with_nw_samples() # clone project, overlay nw model_creation_services = ModelCreationServices() # creates resource_list (python db model); ctor calls... def and_the_ctor_calls(): sqlacodegen_wrapper.create_models_py({}) # creates models.py via *sqlacodegen* sqlacodegen_wrapper.CodeGenerator.render_class() # sqlacodegen - creates models_py as string model_creation_services.create_resource_list() # creates resource_list via *dynamic import* of models.py invoke_creators(model_creation_services) # creates api & ui, via create_from_model... api_expose_api_models_creator.create() # creates api/expose_api_models.py, key input to SAFRS ui_admin_creator.create() # creates ui/admin/admin.yaml from resource_list ProjectRun.update_config_and_copy_sqlite_db() # adds db (model, binds, api, app) to curr project ProjectRun.add_auth() # add_db(auth), adds nw declare_security, upd config ProjectRun.tutorial() # creates basic, nw, nw + cust
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/api_logic_server.py
api_logic_server.py
import api_logic_server_cli.create_from_model.uri_info as uri_info from api_logic_server_cli.cli_args_base import CliArgsBase from os.path import abspath from pathlib import Path import os class Project(CliArgsBase): # extend user-visible args with internal values def __init__(self): super(Project, self).__init__() self.os_cwd = os.getcwd() self.abs_db_url = None self.nw_db_status = None """ '', nw, nw+, nw- """ self.project_directory = None """ string - may have relative /../ """ self.project_directory_actual = None """ string - no relative /../ """ self.project_directory_path = None """ Path (project_directory_actual) """ self.merge_into_prototype = None """ used by codespaces (create project over current) - project_name = ./ """ self.model_gen_bind_msg = False """ sqlacodegen/codegen msg printed """ self.model_file_name = "models.py" """ name of models file being processed """ self. default_db = "default = nw.sqlite, ? for help" self.default_project_name = "ApiLogicProject" self.default_fab_host = "localhost" self.default_bind_key_url_separator = "-" # admin self.is_tutorial = False self.project_name_last_node = "TBD" running_at = Path(__file__) self.api_logic_server_dir_path = running_at.parent.absolute() # no abspath(f'{abspath(get_api_logic_server_dir())}')) self.is_codespaces = os.getenv('CODESPACES') def print_options(self): """ Creating ApiLogicServer with options: (or uri helo) """ if self.db_url == "?": # can only test interactively, not from launch uri_info.print_uri_info() exit(0) print_options = True if print_options: print(f'\n\nCreating ApiLogicServer with options:') print(f' --db_url={self.db_url}') print(f' --bind_key={self.bind_key}') print(f' --bind_url_separator={self.bind_key_url_separator}') print(f' --project_name={self.project_name} (pwd: {self.os_cwd})') print(f' --api_name={self.api_name}') print(f' --admin_app={self.admin_app}') print(f' --react_admin={self.react_admin}') print(f' --flask_appbuilder={self.flask_appbuilder}') print(f' --from_git={self.from_git}') # print(f' --db_types={self.db_types}') print(f' --run={self.run}') print(f' --host={self.host}') print(f' --port={self.port}') print(f' --swagger_host={self.swagger_host}') print(f' --not_exposed={self.not_exposed}') print(f' --open_with={self.open_with}') print(f' --use_model={self.use_model}') print(f' --favorites={self.favorites}') print(f' --non_favorites={self.non_favorites}') print(f' --extended_builder={self.extended_builder}') print(f' --multi_api={self.multi_api}') print(f' --infer_primary_key={self.infer_primary_key}') print(f' --opt_locking={self.opt_locking}') print(f' --opt_locking={self.opt_locking_attr}')
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/cli_args_project.py
cli_args_project.py
from contextlib import closing import yaml temp_created_project = "temp_created_project" # see copy_if_mounted import socket import subprocess from os.path import abspath from os.path import realpath from pathlib import Path from shutil import copyfile import shutil import importlib.util from flask import Flask import logging import datetime from typing import NewType import sys import os import importlib import click def get_api_logic_server_dir() -> str: """ :return: ApiLogicServer dir, eg, /Users/val/dev/ApiLogicServer """ running_at = Path(__file__) python_path = running_at.parent.absolute() return str(python_path) # print("sys.path.append(get_api_logic_server_dir())\n",get_api_logic_server_dir()) sys.path.append(get_api_logic_server_dir()) # e.g, on Docker: export PATH="/home/api_logic_server/api_logic_server_cli" api_logic_server_path = os.path.dirname(get_api_logic_server_dir()) # e.g: export PATH="/home/api_logic_server" sys.path.append(api_logic_server_path) from create_from_model.model_creation_services import ModelCreationServices import create_from_model.api_logic_server_utils as create_utils import api_logic_server_cli.create_from_model.uri_info as uri_info import api_logic_server_cli.api_logic_server as PR ''' ProjectRun (main class) ''' from api_logic_server_cli.cli_args_base import OptLocking api_logic_server_info_file_name = get_api_logic_server_dir() + "/api_logic_server_info.yaml" api_logic_server_info_file_dict = {} # last-run (debug, etc) info """ contains last-run info, debug switches to show args, etc """ if Path(api_logic_server_info_file_name).is_file(): api_logic_server_info_file = open(api_logic_server_info_file_name) api_logic_server_info_file_dict = yaml.load(api_logic_server_info_file, Loader=yaml.FullLoader) api_logic_server_info_file.close() last_created_project_name = api_logic_server_info_file_dict.get("last_created_project_name","") default_db = "default = nw.sqlite, ? for help" default_project_name = "ApiLogicProject" default_fab_host = "localhost" os_cwd = os.getcwd() default_bind_key_url_separator = "-" # admin app fails with "/" or ":" (json issues?) if os.path.exists('/home/api_logic_server'): # docker? default_project_name = "/localhost/ApiLogicProject" default_fab_host = "0.0.0.0" def is_docker() -> bool: """ running docker? dir exists: /home/api_logic_server """ path = '/home/api_logic_server' path_result = os.path.isdir(path) # this *should* exist only on docker env_result = "DOCKER" == os.getenv('APILOGICSERVER_RUNNING') assert path_result == env_result return path_result ''' exploring no-args, not a clue from click_default_group import DefaultGroup @click.group(cls=DefaultGroup, default='no_args', default_if_no_args=True) def main(): """ wonder if this can just do something """ click.echo("group execution (never happens)") # @click.pass_context @main.command() @click.option('--config', default=None) def no_args(config): print("no args!!") @click.group() @click.pass_context @main.command("mainZ") def mainZ(ctx): """ Creates [and runs] logic-enabled Python database API Logic Projects. \b Doc: https://apilogicserver.github.io/Docs \b Examples: \b ApiLogicServer tutorial # *** start here *** ApiLogicServer create-and-run --db_url= project_name= # defaults to Northwind ApiLogicServer create # prompts for project, db Then, customize created API Logic Project in your IDE """ print("Never executed") ''' @click.group() @click.pass_context def main(ctx): """ Creates [and runs] logic-enabled Python database API Logic Projects. \b Doc: https://apilogicserver.github.io/Docs \b Examples: \b ApiLogicServer tutorial # <=== Start Here ApiLogicServer create-and-run --db_url= project_name= # defaults to Northwind ApiLogicServer create # prompts for project name, db url Then, customize created API Logic Project in your IDE """ # click.echo("main - called iff commands supplied") @main.command("about") @click.pass_context def about(ctx): """ Recent Changes, system information. """ print(f'\tInstalled at {abspath(__file__)}\n') print(f'\thttps://apilogicserver.github.io/Docs/Tutorial/\n') def print_at(label: str, value: str): tab_to = 30 - len(label) spaces = ' ' * tab_to print(f'{label}: {spaces}{value}') print("\nPYTHONPATH..") for p in sys.path: print(".." + p) print("") print("api_logic_server_info...") for key, value in api_logic_server_info_file_dict.items(): print_at(f' {key}', value) print("") try: hostname = socket.gethostname() local_ip = socket.gethostbyname(hostname) except: local_ip = f"cannot get local ip from {hostname}" log.debug(f"{local_ip}") print_at('ApiLogicServer version', PR.__version__) print_at('ip (gethostbyname)', local_ip) print_at('on hostname', hostname) print_at('cwd', os. getcwd()) print_at("Python version", create_utils.run_command(f'python --version', msg="no-msg")) print_at("Docker", is_docker()) click.echo( click.style(PR.recent_changes) ) @main.command("welcome") @click.pass_context def welcome(ctx): """ Just print version and exit. """ @main.command("tutorial") @click.option('--create', default='tutorial', help="tutorial or fiddle") @click.pass_context def tutorial(ctx, create): """ Creates (updates) Tutorial. Contains 3 projects: basic_flask, ApiLogicProject, ApiLogicProjectNoCustomizations example: cd ApiLogicProject # any empty folder, perhaps where ApiLogicServer is installed ApiLogicServer tutorial """ project_name=os.getcwd() if project_name == get_api_logic_server_dir(): # for ApiLogicServer dev (from |> Run and Debug ) project_name = str(Path(project_name).parent.parent) # .joinpath("Org-ApiLogicServer")) else: project_name = str(Path(project_name)) project = PR.ProjectRun(command="tutorial", project_name=project_name, db_url="", execute=False ) project.tutorial(msg="Creating:", create=create) log.info("") @main.command("create") @click.option('--project_name', default=f'{default_project_name}', prompt="Project to create", help="Create new directory here") # option text shown on create --help @click.option('--db_url', default=f'{default_db}', prompt="SQLAlchemy Database URI", help="SQLAlchemy Database URL - see above\n") @click.option('--api_name', default=f'api', help="Last node of API Logic Server url\n") @click.option('--opt_locking', default=OptLocking.OPTIONAL.value, help="Optimistic Locking [ignore, optional, required]") @click.option('--opt_locking_attr', default="S_CheckSum", help="Attribute Name for Optimistic Locking CheckSum (unused)") @click.option('--id_column_alias', default="Id", help="Attribute Name for db cols named 'id'") @click.option('--from_git', default="", help="Template clone-from project (or directory)") @click.option('--run', is_flag=True, default=False, help="Run created project") @click.option('--open_with', default='', help="Open created project (eg, charm, atom)") @click.option('--not_exposed', default="ProductDetails_V", help="Tables not written to api/expose_api_models") @click.option('--admin_app/--no_admin_app', default=True, is_flag=True, help="Creates ui/react app (yaml model)") @click.option('--multi_api/--no_multi_api', default=False, is_flag=True, help="Create multiple APIs") @click.option('--flask_appbuilder/--no_flask_appbuilder', default=False, is_flag=True, help="Creates ui/basic_web_app") @click.option('--react_admin/--no_react_admin', default=False, is_flag=True, help="Creates ui/react_admin app") @click.option('--favorites', default="name description", help="Columns named like this displayed first") @click.option('--non_favorites', default="id", help="Columns named like this displayed last") @click.option('--use_model', default="", help="See ApiLogicServer/wiki/Troubleshooting") @click.option('--host', default=f'localhost', help="Server hostname (default is localhost)") @click.option('--port', default=f'5656', help="Port (default 5656, or leave empty)") @click.option('--swagger_host', default=f'localhost', help="Swagger hostname (default is localhost)") @click.option('--extended_builder', default=f'', help="your_code.py for additional build automation") @click.option('--include_tables', default=f'', help="yml for include: exclude:") @click.option('--infer_primary_key/--no_infer_primary_key', default=False, is_flag=True, help="Infer primary_key for unique cols") @click.pass_context def create(ctx, project_name: str, db_url: str, not_exposed: str, api_name: str, from_git: str, # db_types: str, open_with: str, run: click.BOOL, admin_app: click.BOOL, flask_appbuilder: click.BOOL, react_admin: click.BOOL, use_model: str, host: str, port: str, swagger_host: str, favorites: str, non_favorites: str, extended_builder: str, include_tables: str, multi_api: click.BOOL, opt_locking: str, opt_locking_attr: str, infer_primary_key: click.BOOL, id_column_alias: str): """ Creates new customizable project (overwrites). """ global command db_types = "" PR.ProjectRun(command="create", project_name=project_name, db_url=db_url, api_name=api_name, not_exposed=not_exposed, run=run, use_model=use_model, from_git=from_git, db_types=db_types, flask_appbuilder=flask_appbuilder, host=host, port=port, swagger_host=swagger_host, react_admin=react_admin, admin_app=admin_app, favorites=favorites, non_favorites=non_favorites, open_with=open_with, extended_builder=extended_builder, include_tables=include_tables, multi_api=multi_api, infer_primary_key=infer_primary_key, opt_locking=opt_locking, opt_locking_attr=opt_locking_attr, id_column_alias=id_column_alias) @main.command("create-and-run") @click.option('--project_name', default=f'{default_project_name}', prompt="Project to create", help="Create new directory here") @click.option('--db_url', default=f'{default_db}', prompt="SQLAlchemy Database URI", help="SQLAlchemy Database URL - see above\n") @click.option('--api_name', default=f'api', help="Last node of API Logic Server url\n") @click.option('--opt_locking', default=OptLocking.OPTIONAL.value, help="Optimistic Locking [ignore, optional, required]") @click.option('--opt_locking_attr', default="S_CheckSum", help="Attribute Name for Optimistic Locking CheckSum (unused)") @click.option('--id_column_alias', default="Id", help="Attribute Name for db cols named 'id'") @click.option('--from_git', default="", help="Template clone-from project (or directory)") @click.option('--run', is_flag=True, default=True, help="Run created project") @click.option('--open_with', default='', help="Open created project (eg, charm, atom)") @click.option('--not_exposed', default="ProductDetails_V", help="Tables not written to api/expose_api_models") @click.option('--admin_app/--no_admin_app', default=True, is_flag=True, help="Creates ui/react app (yaml model)") @click.option('--flask_appbuilder/--no_flask_appbuilder', default=False, is_flag=True, help="Creates ui/basic_web_app") @click.option('--react_admin/--no_react_admin', default=False, is_flag=True, help="Creates ui/react_admin app") @click.option('--multi_api/--no_multi_api', default=False, is_flag=True, help="Create multiple APIs") @click.option('--favorites', default="name description", help="Columns named like this displayed first") @click.option('--non_favorites', default="id", help="Columns named like this displayed last") @click.option('--use_model', default="", help="See ApiLogicServer/wiki/Troubleshooting") @click.option('--host', default=f'localhost', help="Server hostname (default is localhost)") @click.option('--port', default=f'5656', help="Port (default 5656, or leave empty)") @click.option('--swagger_host', default=f'localhost', help="Swagger hostname (default is localhost)") @click.option('--extended_builder', default=f'', help="your_code.py for additional build automation") @click.option('--include_tables', default=f'', help="yml for include: exclude:") @click.option('--infer_primary_key/--no_infer_primary_key', default=False, is_flag=True, help="Infer primary_key for unique cols") @click.pass_context def create_and_run(ctx, project_name: str, db_url: str, not_exposed: str, api_name: str, from_git: str, # db_types: str, open_with: str, run: click.BOOL, admin_app: click.BOOL, flask_appbuilder: click.BOOL, react_admin: click.BOOL, use_model: str, host: str, port: str, swagger_host: str, favorites: str, non_favorites: str, extended_builder: str, include_tables: str, multi_api: click.BOOL, opt_locking: str, opt_locking_attr: str, id_column_alias: str, infer_primary_key: click.BOOL): """ Creates new project and runs it (overwrites). """ global command # TODO drop this global db_types = "" PR.ProjectRun(command="create-and-run", project_name=project_name, db_url=db_url, api_name=api_name, not_exposed=not_exposed, run=run, use_model=use_model, from_git=from_git, db_types=db_types, flask_appbuilder=flask_appbuilder, host=host, port=port, swagger_host=swagger_host, react_admin=react_admin, admin_app=admin_app, favorites=favorites, non_favorites=non_favorites, open_with=open_with, extended_builder=extended_builder, include_tables=include_tables, multi_api=multi_api, infer_primary_key=infer_primary_key, opt_locking=opt_locking, opt_locking_attr=opt_locking_attr, id_column_alias=id_column_alias) @main.command("rebuild-from-database") @click.option('--project_name', default=f'{default_project_name}', prompt="Project to create", help="Create new directory here") @click.option('--db_url', default=f'{default_db}', prompt="SQLAlchemy Database URI", help="SQLAlchemy Database URL - see above\n") @click.option('--api_name', default=f'api', help="Last node of API Logic Server url\n") @click.option('--id_column_alias', default="Id", help="Attribute Name for db cols named 'id'") @click.option('--from_git', default="", help="Template clone-from project (or directory)") @click.option('--run', is_flag=True, default=False, help="Run created project") @click.option('--open_with', default='', help="Open created project (eg, charm, atom)") @click.option('--not_exposed', default="ProductDetails_V", help="Tables not written to api/expose_api_models") @click.option('--admin_app/--no_admin_app', default=True, is_flag=True, help="Creates ui/react app (yaml model)") @click.option('--flask_appbuilder/--no_flask_appbuilder', default=False, is_flag=True, help="Creates ui/basic_web_app") @click.option('--react_admin/--no_react_admin', default=False, is_flag=True, help="Creates ui/react_admin app") @click.option('--favorites', default="name description", help="Columns named like this displayed first") @click.option('--non_favorites', default="id", help="Columns named like this displayed last") @click.option('--use_model', default="", help="See ApiLogicServer/wiki/Troubleshooting") @click.option('--host', default=f'localhost', help="Server hostname (default is localhost)") @click.option('--port', default=f'5656', help="Port (default 5656, or leave empty)") @click.option('--swagger_host', default=f'localhost', help="Swagger hostname (default is localhost)") @click.option('--extended_builder', default=f'', help="your_code.py for additional build automation") @click.option('--infer_primary_key/--no_infer_primary_key', default=False, is_flag=True, help="Infer primary_key for unique cols") @click.pass_context def rebuild_from_database(ctx, project_name: str, db_url: str, api_name: str, not_exposed: str, from_git: str, # db_types: str, open_with: str, run: click.BOOL, admin_app: click.BOOL, flask_appbuilder: click.BOOL, react_admin: click.BOOL, use_model: str, host: str, port: str, swagger_host: str, favorites: str, non_favorites: str, extended_builder: str, infer_primary_key: click.BOOL, id_column_alias: str): """ Updates database, api, and ui from changed db. \b ex \b ApiLogicServer rebuild-from-database --project_name=~/dev/servers/ApiLogicProject --db_url=nw """ db_types = "" PR.ProjectRun(command="rebuild-from-database", project_name=project_name, db_url=db_url, api_name=api_name, not_exposed=not_exposed, run=run, use_model=use_model, from_git=from_git, db_types=db_types, flask_appbuilder=flask_appbuilder, host=host, port=port, swagger_host=swagger_host, react_admin=react_admin, admin_app=admin_app, favorites=favorites, non_favorites=non_favorites, open_with=open_with, extended_builder=extended_builder, multi_api=False, infer_primary_key=infer_primary_key, id_column_alias=id_column_alias) @main.command("add-db") @click.option('--db_url', default=f'todo', prompt="Database url", help="Connect new database here") # TODO @click.option('--bind_key', default=f'Alt', prompt="Bind key", help="Add new bind key here") # TODO @click.option('--bind_key_url_separator', default=default_bind_key_url_separator, help="bindkey / class name url separator") @click.option('--project_name', default=f'', help="Project location") @click.option('--api_name', default="api", help="api prefix name") @click.pass_context # Kat def add_db(ctx, db_url: str, bind_key: str, bind_key_url_separator: str, api_name: str, project_name: str): """ Adds db (model, binds, api, app) to curr project. example: cd existing_project ApiLogicServer add-db --db-url="todo" --bind-key="Todo" """ if project_name == "": project_name=os.getcwd() if project_name == get_api_logic_server_dir(): # for ApiLogicServer dev (from |> Run and Debug ) project_name = str( Path(project_name).parent.parent.joinpath("servers").joinpath("ApiLogicProject") ) if db_url == "auth": bind_key = "authentication" PR.ProjectRun(command="add_db", project_name=project_name, api_name=api_name, db_url=db_url, bind_key=bind_key, bind_key_url_separator=bind_key_url_separator ) print("DB Added") @main.command("add-auth") @click.option('--bind_key_url_separator', default=default_bind_key_url_separator, help="bindkey / class name url separator") @click.option('--project_name', default=f'', help="Project location") @click.option('--db_url', default=f'auth', prompt="SQLAlchemy Database URI", help="SQLAlchemy Database URL - see above\n") @click.option('--api_name', default="api", help="api prefix name") @click.pass_context def add_auth_cmd(ctx, bind_key_url_separator: str, db_url: str, project_name: str, api_name: str): """ Adds authorization/authentication to curr project. example: cd existing_project ApiLogicServer add-auth project_name=. """ if project_name == "": project_name=os.getcwd() if project_name == get_api_logic_server_dir(): # for ApiLogicServer dev (from |> Run and Debug ) project_name = str( Path(project_name).parent.parent.joinpath("servers").joinpath("ApiLogicProject") ) bind_key = "authentication" project = PR.ProjectRun(command="add_security", project_name=project_name, api_name=api_name, db_url=db_url, bind_key=bind_key, bind_key_url_separator=bind_key_url_separator, execute=False ) project.project_directory, project.api_name, project.merge_into_prototype = \ create_utils.get_project_directory_and_api_name(project) project.project_directory_actual = os.path.abspath(project.project_directory) # make path absolute, not relative (no /../) project.project_directory_path = Path(project.project_directory_actual) models_py_path = project.project_directory_path.joinpath('database/models.py') project.abs_db_url, project.nw_db_status, project.model_file_name = create_utils.get_abs_db_url("0. Using Sample DB", project) if not models_py_path.exists(): log.info(f'... Error - does not appear to be a project: {str(project.project_directory_path)}') log.info(f'... Typical usage - cd into project, use --project_name=. \n') exit (1) is_nw = False if create_utils.does_file_contain(search_for="CategoryTableNameTest", in_file=models_py_path): is_nw = True project.add_auth(msg="Adding Security", is_nw=is_nw) log.info("") @main.command("add-cust") @click.option('--bind_key_url_separator', default=default_bind_key_url_separator, help="bindkey / class name url separator") @click.option('--project_name', default=f'', help="Project location") @click.option('--api_name', default="api", help="api prefix name") @click.pass_context def add_cust(ctx, bind_key_url_separator: str, api_name: str, project_name: str): """ Adds customizations to northwind project. example: cd existing_project ApiLogicServer add-cust """ if project_name == "": project_name=os.getcwd() if project_name == get_api_logic_server_dir(): # for ApiLogicServer dev (from |> Run and Debug ) project_name = str( Path(project_name).parent.parent.joinpath("servers").joinpath("NW_NoCust") ) db_url = "auth" bind_key = "authentication" project = PR.ProjectRun(command="add_cust", project_name=project_name, api_name=api_name, db_url=db_url, execute=False ) project.project_directory, project.api_name, project.merge_into_prototype = \ create_utils.get_project_directory_and_api_name(project) project.project_directory_actual = os.path.abspath(project.project_directory) # make path absolute, not relative (no /../) project.project_directory_path = Path(project.project_directory_actual) models_py_path = project.project_directory_path.joinpath('database/models.py') project.abs_db_url, project.nw_db_status, project.model_file_name = create_utils.get_abs_db_url("0. Using Sample DB", project) is_nw = False if create_utils.does_file_contain(search_for="CategoryTableNameTest", in_file=models_py_path): is_nw = True else: raise Exception("Customizations are northwind-specific - this does not appear to be a northwind database") project.add_nw_customizations() @main.command("rebuild-from-model") @click.option('--project_name', default=f'{default_project_name}', prompt="Project to create", help="Create new directory here") @click.option('--db_url', default=f'{default_db}', prompt="SQLAlchemy Database URI", help="SQLAlchemy Database URL - see above\n") @click.option('--api_name', default=f'api', help="Last node of API Logic Server url\n") @click.option('--from_git', default="", help="Template clone-from project (or directory)") @click.option('--run', is_flag=True, default=False, help="Run created project") @click.option('--open_with', default='', help="Open created project (eg, charm, atom)") @click.option('--not_exposed', default="ProductDetails_V", help="Tables not written to api/expose_api_models") @click.option('--admin_app/--no_admin_app', default=True, is_flag=True, help="Creates ui/react app (yaml model)") @click.option('--flask_appbuilder/--no_flask_appbuilder', default=False, is_flag=True, help="Creates ui/basic_web_app") @click.option('--react_admin/--no_react_admin', default=False, is_flag=True, help="Creates ui/react_admin app") @click.option('--favorites', default="name description", help="Columns named like this displayed first") @click.option('--non_favorites', default="id", help="Columns named like this displayed last") @click.option('--use_model', default="", help="See ApiLogicServer/wiki/Troubleshooting") @click.option('--host', default=f'localhost', help="Server hostname (default is localhost)") @click.option('--port', default=f'5656', help="Port (default 5656, or leave empty)") @click.option('--swagger_host', default=f'localhost', help="Swagger hostname (default is localhost)") @click.option('--extended_builder', default=f'', help="your_code.py for additional build automation") @click.option('--infer_primary_key/--no_infer_primary_key', default=False, is_flag=True, help="Infer primary_key for unique cols") @click.pass_context # Kat def rebuild_from_model(ctx, project_name: str, db_url: str, api_name: str, not_exposed: str, from_git: str, # db_types: str, open_with: str, run: click.BOOL, admin_app: click.BOOL, flask_appbuilder: click.BOOL, react_admin: click.BOOL, use_model: str, host: str, port: str, swagger_host: str, favorites: str, non_favorites: str, extended_builder: str, infer_primary_key: click.BOOL): """ Updates database, api, and ui from changed models. """ db_types = "" PR.ProjectRun(command="rebuild-from-model", project_name=project_name, db_url=db_url, api_name=api_name, not_exposed=not_exposed, run=run, use_model=use_model, from_git=from_git, db_types=db_types, flask_appbuilder=flask_appbuilder, host=host, port=port, swagger_host=swagger_host, react_admin=react_admin, admin_app=admin_app, favorites=favorites, non_favorites=non_favorites, open_with=open_with, extended_builder=extended_builder, multi_api=False, infer_primary_key=infer_primary_key) @main.command("run") @click.option('--project_name', default=f'{last_created_project_name}', prompt="Project to run", help="Project to run") @click.option('--host', default=f'localhost', help="Server hostname (default is localhost)") @click.option('--port', default=f'5656', help="Port (default 5656, or leave empty)") @click.option('--swagger_host', default=f'localhost', help="Swagger hostname (default is localhost)") @click.pass_context def run_api(ctx, project_name: str, host: str="localhost", port: str="5656", swagger_host: str="localhost"): """ Runs existing project. \b Example \b ApiLogicServer run --project_name=/localhost/ApiLogicProject ApiLogicServer run --project_name= # runs last-created project """ global command command = "run-api" proj_dir = project_name if proj_dir == "": proj_dir = last_created_project_name else: proj_dir = os.path.abspath(f'{create_utils.resolve_home(project_name)}') run_file = f'{proj_dir}/api_logic_server_run.py ' # alert: sending args makes it hang: {host} {port} {swagger_host} create_utils.run_command(f'python {run_file}', msg="Run Created ApiLogicServer Project", new_line=True) print("run complete") @main.command("create-ui") @click.option('--use_model', default="models.py", help="See ApiLogicServer/wiki/Troubleshooting") @click.option('--favorites', default="name description", help="Columns named like this displayed first") @click.option('--non_favorites', default="id", help="Columns named like this displayed last") @click.pass_context def create_ui(ctx, use_model: str, favorites: str, non_favorites: str, ): """ Creates models.yaml from models.py (internal). \b Example \b ApiLogicServer create-ui --use_model=~/dev/ApiLogicServer/tests/models-nw-plus.py """ global command command = "create-ui" admin_out = resolve_home(use_model.replace("py","yaml")) project_directory, ignore = os.path.split(resolve_home(use_model)) print(f'1. Loading existing model: {use_model}') model_creation_services = ModelCreationServices( # fills in rsource_list for ui_admin_creator use_model=use_model, favorite_names=favorites, non_favorite_names=non_favorites, project_directory=project_directory, command=command, version=PR.__version__) print(f'2. Creating yaml from model') creator_path = abspath(f'{abspath(get_api_logic_server_dir())}/create_from_model') spec = importlib.util.spec_from_file_location("module.name", f'{creator_path}/ui_admin_creator.py') creator = importlib.util.module_from_spec(spec) spec.loader.exec_module(creator) admin_yaml_dump = creator.create(model_creation_services) print(f'3. Writing yaml: {admin_out}') with open(admin_out, 'w') as yaml_file: yaml_file.write(admin_yaml_dump) @main.command("examples") @click.pass_context def examples(ctx): """ Example commands, including SQLAlchemy URIs. """ uri_info.print_uri_info() log = logging.getLogger(__name__) def print_args(args, msg): print(msg) for each_arg in args: print(f' {each_arg}') print(" ") def check_ports(): try: rtn_hostname = socket.gethostname() rtn_local_ip = socket.gethostbyname(rtn_hostname) except: rtn_local_ip = f"cannot get local ip from {rtn_hostname}" print(f"{rtn_local_ip}") port_check = False if port_check or is_docker(): s = socket.socket() # Create a socket object host = socket.gethostname() # Get local machine name port = 5656 # Reserve a port for your service. port_is_available = True try: s.bind((host, port)) # Bind to the port except: port_is_available = False if not port_is_available: msg = "\nWarning - port 5656 does not appear to be available\n" \ " Version 3.30 has changed port assignments to avoid port conflicts\n" \ " For example, docker start:\n" \ " docker run -it --name api_logic_server --rm -p 5656:5656 -p 5002:5002 -v ${PWD}:/localhost apilogicserver/api_logic_server \n" \ "Ports are sometimes freed slowly, you may need to re-issue this command.\n\n" log.warning(msg) # sys.exit(msg) s.close() return rtn_hostname, rtn_local_ip def start(): # target of setup.py sys.stdout.write("\nWelcome to API Logic Server " + PR.__version__ + "\n\n") hostname, local_ip = check_ports() # = socket.gethostname() # sys.stdout.write(" SQLAlchemy Database URI help: https://docs.sqlalchemy.org/en/14/core/engines.html\n") main(obj={}) command = "not set" if __name__ == '__main__': # debugger & python command line start here # eg: python api_logic_server_cli/cli.py create --project_name=~/Desktop/test_project # unix: python api_logic_server_cli/cli.py create --project_name=/home/ApiLogicProject print(f'\nWelcome to API Logic Server, {PR.__version__}\n') # at {local_ip} ') hostname, local_ip = check_ports() commands = sys.argv if len(sys.argv) > 1 and sys.argv[1] not in ["version", "sys-info", "welcome"] and \ "show-args" in api_logic_server_info_file_dict: print_args(commands, f'\nCommand Line Arguments:') main()
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/cli.py
cli.py
import logging import shutil import sys import os import datetime from pathlib import Path from typing import NewType from shutil import copyfile import create_from_model.model_creation_services as create_from_model log = logging.getLogger(__file__) handler = logging.StreamHandler(sys.stderr) formatter = logging.Formatter('%(message)s') # lead tag - '%(name)s: %(message)s') handler.setFormatter(formatter) log.addHandler(handler) log.propagate = True # log.setLevel(logging.DEBUG) # MetaData = NewType('MetaData', object) MetaDataTable = NewType('MetaDataTable', object) __version__ = "0.0" def create_expose_api_models(model_creation_services: create_from_model.ModelCreationServices): """ create strings for ui/basic_web_app/views.py and api/expose_api_models.py """ result_apis = '' ''' result_apis += '"""' result_apis += ("\nApiLogicServer Generate From Model " + model_creation_services.version + "\n\n" # + "From: " + sys.argv[0] + "\n\n" + "Using Python: " + sys.version + "\n\n" + "At: " + str(datetime.datetime.date()) + "\n\n" + '"""\n\n') ''' port_replace = model_creation_services.project.port if model_creation_services.project.port else "None" result_apis += \ f'\n\ndef expose_models(api, method_decorators = []): \n' # result_apis += ' my_host = HOST\n' # result_apis += ' if HOST == "0.0.0.0":\n' # result_apis += ' my_host = "localhost" # override default HOST for pc"\n' result_apis += ' """\n' result_apis += ' Declare API - on existing SAFRSAPI to expose each model - API automation \n' result_apis += ' - Including get (filtering, pagination, related data access) \n' result_apis += ' - And post/patch/update (including logic enforcement) \n\n' result_apis += ' Invoked at server startup (api_logic_server_run) \n\n' result_apis += ' You typically do not customize this file \n' result_apis += ' - See https://apilogicserver.github.io/Docs/Tutorial/#customize-and-debug \n' result_apis += ' """\n' sys.path.append(model_creation_services.project.os_cwd) for each_resource_name in model_creation_services.resource_list: # log.debug("process_each_table: " + each_resource_name) if "TRANSFERFUNDx" in each_resource_name: log.debug("special table") # debug stop here if model_creation_services.project.not_exposed is not None and each_resource_name + " " in model_creation_services.project.not_exposed: # result_apis += "# not_exposed: api.expose_object(models.{resource_name})" continue if "ProductDetails_V" in each_resource_name: log.debug("special table") # should not occur (--noviews) if each_resource_name.startswith("Ab"): # result_apis += "# skip admin table: " + resource_name + "\n" continue elif 'sqlite_sequence' in each_resource_name: # result_apis += "# skip sqlite_sequence table: " + resource_name + "\n" continue else: models_file = 'models' if model_creation_services.project.bind_key != "": models_file = model_creation_services.project.bind_key + "_" + models_file result_apis += f' api.expose_object(database.{models_file}.{each_resource_name}, method_decorators= method_decorators)\n' result_apis += f' return api\n' # self.session.close() expose_api_models_path = Path(model_creation_services.project_directory).joinpath('api/expose_api_models.py') if model_creation_services.project.command.startswith("rebuild"): expose_api_models_path = Path(model_creation_services.project_directory).\ joinpath('api/expose_api_models_created.py') log.debug(f'.. .. ..Rebuild - new api at api/expose_api_models_created (merge/replace expose_api_models as nec)') src = model_creation_services.project.api_logic_server_dir_path src = src.joinpath("prototypes/base/api/expose_api_models.py") assert src.is_file() shutil.copyfile(src, expose_api_models_path) expose_api_models_file = open(expose_api_models_path, 'a') expose_api_models_file.write(result_apis) expose_api_models_file.close() else: # normal path... if model_creation_services.project.bind_key != "": expose_api_models_path = Path(model_creation_services.project_directory).\ joinpath(f'api/{model_creation_services.project.bind_key}_expose_api_models.py') src = model_creation_services.project.api_logic_server_dir_path.\ joinpath('prototypes/base/api/expose_api_models.py') dest = expose_api_models_path copyfile(src, dest) expose_api_models_file = open(expose_api_models_path, 'a') expose_api_models_file.write(result_apis) expose_api_models_file.close() return def create(model_creation_services: create_from_model.ModelCreationServices): """ called by ApiLogicServer CLI -- creates api/expose_api_models.py, key input to SAFRS """ create_expose_api_models(model_creation_services)
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/api_expose_api_models_creator.py
api_expose_api_models_creator.py
import ast import io import logging import shutil import traceback from os.path import abspath import importlib.util import sys import os from typing import NewType, Type import sqlalchemy import sqlalchemy.ext from sqlalchemy import MetaData import inspect import importlib from flask import Flask from typing import List, Dict from pathlib import Path from shutil import copyfile from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY from api_logic_server_cli.sqlacodegen_wrapper import sqlacodegen_wrapper from api_logic_server_cli.create_from_model.meta_model import Resource, ResourceAttribute, ResourceRelationship log = logging.getLogger(__name__) # MetaData = NewType('MetaData', object) MetaDataTable = NewType('MetaDataTable', object) class ModelCreationServices(object): """ Model creation and shared services (favorite attributes, etc) Create database/models.py, services for api/ui creation. Key logic is `__init__` (for singleton) calls `create_models`. Much later, create_from_model creators (api, ui) then call helpers * get get resources, favorite attributes, etc etc Note this is about Logical Objects (classes), not tables * Ignore old to-be-deleted code regarding tables and columns """ from api_logic_server_cli.cli_args_project import Project result_views = "" result_apis = "" """ array of substrings used to find favorite column name command line option to override per language, db conventions eg, name in English nom in French """ non_favorite_names = "id" _indent = " " num_pages_generated = 0 num_related = 0 def __init__(self, project: Project, project_directory: str = "~/Desktop/my_project", copy_to_project_directory: str = "", my_children_list: dict = None, my_parents_list: dict = None, version: str = "0.0.0"): """ Called from main driver (create_project) to open db, build resource_list """ self.project = project self.project_directory = None if project_directory: self.project_directory = self.get_windows_path_with_slashes(project_directory) self.copy_to_project_directory = "" if copy_to_project_directory != "": self.copy_to_project_directory = self.get_windows_path_with_slashes(copy_to_project_directory) """ self.api_logic_server_dir = api_logic_server_dir self.abs_db_url = abs_db_url # actual (not relative, reflects nw copy, etc) self.os_cwd = os_cwd self.nw_db_status = nw_db_status self.command = command """ self.resource_list : Dict[str, Resource] = dict() self.resource_list_complete = False self.version = version self.my_children_list = my_children_list """ for key table name, value is list of (parent-role-name, child-role-name, relationship) ApiLogicServer """ self.my_parents_list = my_parents_list """ for key table name, value is list of (parent-role-name, child-role-name, relationship) ApiLogicServer """ self.table_to_class_map = {} """ keys are table[.column], values are class / attribute """ self.metadata = None self.engine = None self.session = None self.connection = None self.app = None self.opt_locking = "" """ optimistic locking virtuals (jsonattrs) appended to each class """ ################################################################# # Introspect data mdel (sqlacodegen) & create database/models.py # create resource_list ################################################################# model_file_name, msg = sqlacodegen_wrapper.create_models_py( model_creation_services = self, abs_db_url= self.project.abs_db_url, project_directory = project_directory) self.create_resource_list(model_file_name, msg) # whether created or used, build resource_list @staticmethod def get_windows_path_with_slashes(url: str) -> str: """ idiotic fix for windows (\ --> \\\\) https://stackoverflow.com/questions/1347791/unicode-error-unicodeescape-codec-cant-decode-bytes-cannot-open-text-file""" """ old code full_path = os.path.abspath(url) result = full_path.replace('\\', '\\\\') if os.name == "nt": # windows result = full_path.replace('/', '\\') log.debug(f'*** DEBUG - how about url_path={url_path}') """ url_path = Path(url) result = str(url_path) return result def recursive_overwrite(self, src, dest, ignore=None): """ copyTree, with overwrite """ if os.path.isdir(src): if not os.path.isdir(dest): os.makedirs(dest) files = os.listdir(src) if ignore is not None: ignored = ignore(src, files) else: ignored = set() for f in files: if f not in ignored: self.recursive_overwrite(os.path.join(src, f), os.path.join(dest, f), ignore) else: shutil.copyfile(src, dest) @staticmethod def fix_win_path(path: str) -> str: result = path if os.name == "nt": result = path.replace('/', '\\') return result @staticmethod def create_app_zzz(config_filename=None, host="localhost"): import safrs app = Flask("API Logic Server") import api_logic_server_cli.config as app_logic_server_config app.config.from_object(app_logic_server_config.Config) db = safrs.DB db.init_app(app) return app def list_columns(self, a_table_def: MetaDataTable) -> str: """ Example: list_columns = ["InvoiceLineId", "Track.Name", "Invoice.InvoiceId", "UnitPrice", "Quantity"] Parameters a_table_def TableModelInstance Returns list_columns = [...] - favorites / joins first, not too many """ return self.gen_columns(a_table_def, "list_columns = [", 2, 5, 0) def get_list_columns(self, a_table_def: MetaDataTable) -> set: gen_string = self.list_columns(a_table_def) gen_string = gen_string[2 + gen_string.find("="):] columns = ast.literal_eval(gen_string) return columns def show_columns(self, a_table_def: MetaDataTable): return self.gen_columns(a_table_def, "show_columns = [", 99, 999, 999) def show_attributes(self, resource: Resource): return self.gen_attributes(resource, "show_columns = [", 99, 999, 999) def get_show_columns(self, a_table_def: MetaDataTable) -> set: gen_string = self.show_columns(a_table_def) gen_string = gen_string[2 + gen_string.find("="):] columns = ast.literal_eval(gen_string) return columns def get_show_attributes(self, resource: Resource) -> set: gen_string = self.show_attributes(resource) gen_string = gen_string[2 + gen_string.find("="):] attributes = ast.literal_eval(gen_string) return attributes def get_attributes(self, resource: Resource) -> list: """ bypass all joins, ids at end - just the raw attributes """ result_set = list() for each_attribute in resource.attributes: result_set.append(each_attribute.name) return result_set def edit_columns(self, a_table_def: MetaDataTable): return self.gen_columns(a_table_def, "edit_columns = [", 99, 999, 999) def get_edit_columns(self, a_table_def: MetaDataTable) -> set: gen_string = self.edit_columns(a_table_def) gen_string = gen_string[2 + gen_string.find("="):] columns = ast.literal_eval(gen_string) return columns def add_columns(self, a_table_def: MetaDataTable): return self.gen_columns(a_table_def, "add_columns = [", 99, 999, 999) def get_add_columns(self, a_table_def: MetaDataTable) -> set: gen_string = self.add_columns(a_table_def) gen_string = gen_string[2 + gen_string.find("="):] columns = ast.literal_eval(gen_string) return columns def query_columns(self, a_table_def: MetaDataTable): return self.gen_columns(a_table_def, "query_columns = [", 99, 999, 999) def get_query_columns(self, a_table_def: MetaDataTable) -> set: gen_string = self.query_columns(a_table_def) gen_string = gen_string[2 + gen_string.find("="):] columns = ast.literal_eval(gen_string) return columns def gen_attributes(self, a_resource: Resource, a_view_type: str, a_max_joins: int, a_max_columns: int, a_max_id_columns: int): """ Generates statements like: list_columns =["Id", "Product.ProductName", ... "Id"] This is *not* simply a list of columms: 1. favorite column first, 2. then join (parent) columns, with predictive joins 3. and id fields at the end. Parameters argument1 a_table_def - TableModelInstance argument2 a_view_type - str like "list_columns = [" argument3 a_max_joins - int max joins (list is smaller) argument4 a_max_columns - int how many columns (") argument5 a_id_columns - int how many "id" columns (") Returns string like list_columns =["Name", "Parent.Name", ... "Id"] """ result = a_view_type attributes = a_resource.attributes id_attribute_names = set() processed_attribute_names = set() result += "" if a_resource.name == "OrderDetail": result += "\n" # just for debug stop favorite_attribute_name = self.favorite_attribute_name(a_resource) # FIXME old code, not called column_count = 1 result += '"' + favorite_attribute_name + '"' # todo hmm: emp territory processed_attribute_names.add(favorite_attribute_name) predictive_joins = self.predictive_join_attributes(a_resource) if "list" in a_view_type or "show" in a_view_type: # alert - prevent fab key errors! for each_parent_attribute in predictive_joins: column_count += 1 if column_count > 1: result += ", " result += '"' + each_parent_attribute + '"' if column_count > a_max_joins: break for each_column in attributes: if each_column.name in processed_attribute_names: continue if self.is_non_favorite_name(each_column.name.lower()): id_attribute_names.add(each_column.name) continue # ids are boring - do at end column_count += 1 if column_count > a_max_columns: break if column_count > 1: result += ", " result += '"' + each_column.name + '"' for each_id_column_name in id_attribute_names: column_count += 1 if column_count > a_max_id_columns: break if column_count > 1: result += ", " result += '"' + each_id_column_name + '"' result += "]\n" return result def gen_columns(self, a_table_def: MetaDataTable, a_view_type: str, a_max_joins: int, a_max_columns: int, a_max_id_columns: int): """ Generates statements like: list_columns =["Id", "Product.ProductName", ... "Id"] This is *not* simply a list of columms: 1. favorite column first, 2. then join (parent) columns, with predictive joins 3. and id fields at the end. Parameters argument1 a_table_def - TableModelInstance argument2 a_view_type - str like "list_columns = [" argument3 a_max_joins - int max joins (list is smaller) argument4 a_max_columns - int how many columns (") argument5 a_id_columns - int how many "id" columns (") Returns string like list_columns =["Name", "Parent.Name", ... "Id"] """ result = a_view_type columns = a_table_def.columns id_column_names = set() processed_column_names = set() result += "" if a_table_def.name == "OrderDetail": result += "\n" # just for debug stop favorite_column_name = self.favorite_column_name(a_table_def) column_count = 1 result += '"' + favorite_column_name + '"' # todo hmm: emp territory processed_column_names.add(favorite_column_name) predictive_joins = self.predictive_join_columns(a_table_def) if "list" in a_view_type or "show" in a_view_type: # alert - prevent fab key errors! for each_join_column in predictive_joins: column_count += 1 if column_count > 1: result += ", " result += '"' + each_join_column + '"' if column_count > a_max_joins: break for each_column in columns: if each_column.name in processed_column_names: continue if self.is_non_favorite_name(each_column.name.lower()): id_column_names.add(each_column.name) continue # ids are boring - do at end column_count += 1 if column_count > a_max_columns: break if column_count > 1: result += ", " result += '"' + each_column.name + '"' for each_id_column_name in id_column_names: column_count += 1 if column_count > a_max_id_columns: break if column_count > 1: result += ", " result += '"' + each_id_column_name + '"' result += "]\n" return result def predictive_join_attributes(self, a_resource: Resource) -> list: """ Generates set of predictive join column name: (Parent1.FavoriteColumn, Parent2.FavoriteColumn, ...) Parameters argument1 a_table_def - TableModelInstance Returns set of col names (such Product.ProductName for OrderDetail) """ result = list() if a_resource.name == "Order": # for debug debug_str = "predictive_joins for: " + a_resource.name for each_parent in a_resource.parents: each_parent_resource = self.resource_list[each_parent.parent_resource] favorite_attribute_name = self.favorite_attribute_name(each_parent_resource) parent_ref_attr_name = each_parent.parent_role_name + "." + favorite_attribute_name result.append(parent_ref_attr_name) return result def predictive_join_columns(self, a_table_def: MetaDataTable) -> list: """ Generates set of predictive join column name: (Parent1.FavoriteColumn, Parent2.FavoriteColumn, ...) Parameters argument1 a_table_def - TableModelInstance Returns set of col names (such Product.ProductName for OrderDetail) """ result = list() foreign_keys = a_table_def.foreign_key_constraints if a_table_def.name == "Order": # for debug debug_str = "predictive_joins for: " + a_table_def.name for each_foreign_key in foreign_keys: """ remove old code each_parent_name = each_foreign_key.referred_table.name + "." + each_foreign_key.column_keys[0] loc_dot = each_parent_name.index(".") each_parent_name = each_parent_name[0:loc_dot] """ each_parent_name = each_foreign_key.referred_table.name # todo: improve multi-field key support parent_getter = each_parent_name if parent_getter[-1] == "s": # plural parent table names have singular lower case accessors class_name = self.get_class_for_table(each_parent_name) # eg, Product parent_getter = class_name[0].lower() + class_name[1:] each_parent = a_table_def.metadata.tables[each_parent_name] favorite_column_name = self.favorite_column_name(each_parent) parent_ref_attr_name = parent_getter + "." + favorite_column_name if parent_ref_attr_name in result: parent_ref_attr_name = parent_getter + "1." + favorite_column_name result.append(parent_ref_attr_name) return result def is_non_favorite_name(self, a_name: str) -> bool: """ Whether a_name is non-favorite (==> display at end, e.g., 'Id') Parameters argument1 a_name - str (lower case expected) Returns bool """ for each_non_favorite_name in self._non_favorite_names_list: if each_non_favorite_name in a_name: return True return False def find_child_list(self, a_table_def: MetaDataTable) -> list: """ Returns list of models w/ fKey to a_table_def Not super efficient pass entire table list for each table ok until very large schemas Parameters argument1 a_table_def - TableModelInstance Returns list of models w/ fKey to each_table """ child_list = [] all_tables = a_table_def.metadata.tables for each_possible_child_tuple in all_tables.items(): each_possible_child = each_possible_child_tuple[1] parents = each_possible_child.foreign_keys if (a_table_def.name == "Customer" and each_possible_child.name == "Order"): debug_str = a_table_def for each_parent in parents: each_parent_name = each_parent.target_fullname loc_dot = each_parent_name.index(".") each_parent_name = each_parent_name[0:loc_dot] if each_parent_name == a_table_def.name: child_list.append(each_possible_child) return child_list def model_name(self, a_class_name: str): # override as req'd """ returns "ModelView" default suffix for view corresponding to model intended for subclass override, for custom views Parameters argument1 a_table_name - str Returns view model_name for a_table_name, defaulted to "ModelView" """ return "ModelView" def favorite_column_name(self, a_table_def: MetaDataTable) -> str: """ returns string of first column that is... named <favorite_name> (default to "name"), else containing <favorite_name>, else (or first column) Parameters argument1 a_table_name - str Returns string of column name that is favorite (e.g., first in list) """ favorite_names = self._favorite_names_list for each_favorite_name in favorite_names: columns = a_table_def.columns for each_column in columns: col_name = each_column.name.lower() if col_name == each_favorite_name: return each_column.name for each_column in columns: col_name = each_column.name.lower() if each_favorite_name in col_name: return each_column.name for each_column in columns: # no favorites, just return 1st return each_column.name def favorite_attribute_name(self, resource: Resource) -> str: """ returns string of first column that is... named <favorite_name> (default to "name"), else containing <favorite_name>, else (or first column) Parameters argument1 a_table_name - str Returns string of column name that is favorite (e.g., first in list) """ return resource.get_favorite_attribute() """ favorite_names = self.project.favorites # FIXME not _favorite_names_list for each_favorite_name in favorite_names: attributes = resource.attributes for each_attribute in attributes: attribute_name = each_attribute.name.lower() if attribute_name == each_favorite_name: return each_attribute.name for each_attribute in attributes: attribute_name = each_attribute.name.lower() if each_favorite_name in attribute_name: return each_attribute.name for each_attribute in resource.attributes: # no favorites, just return 1st return each_attribute.name """ def add_table_to_class_map(self, orm_class) -> str: """ given class, find table (hide your eyes), add table/class to table_to_class_map """ orm_class_info = orm_class[1] query = str(orm_class_info.query)[7:] table_name = query.split('.')[0] table_name = table_name.strip('\"') self.table_to_class_map_update(table_name=table_name, class_name=orm_class[0]) return table_name def table_to_class_map_update(self, table_name: str, class_name: str): self.table_to_class_map.update({table_name: class_name}) def get_class_for_table(self, table_name) -> str: """ given table_name, return its class_name from table_to_class_map """ if table_name in self.table_to_class_map: return self.table_to_class_map[table_name] else: debug_str = "skipping view: " + table_name return None def find_meta_data(self, cwd: str, log_info: bool=False) -> MetaData: return self.metadata def resolve_home(self, name: str) -> str: """ :param name: a file name, eg, ~/Desktop/a.b :return: /users/you/Desktop/a.b This just removes the ~, the path may still be relative to run location """ result = name if result.startswith("~"): result = str(Path.home()) + result[1:] return result def close_app(self): """ may not be necessary - once had to open app to load class """ if self.app: self.app.teardown_appcontext(None) if self.engine: self.engine.dispose() ############################# # get meta data ############################# def create_resource_list(self, models_file, msg): """ Creates self.resource_list (ie, ModelCreationServices.resource_list) 1. Dynamic import of models.py 2. Use Safrs metadata to create ModelCreationServices.resource_list self.resource_list later used to drive create_from_model modules - API, UI :param models_file name of file for output :param msg e.g. .. .. ..Create resource_list - dynamic import database/models.py, inspect """ """ old code # project_abs_path = abspath(self.project_directory) # project_abs_path = str(Path(self.project_directory).absolute()) """ project_path = self.project_directory debug_dynamic_loader = False if debug_dynamic_loader: log.debug(f'\n\n ### INSTALL cwd = {os.getcwd()}') log.debug(f'\n*** DEBUG/import - self.project_directory={self.project_directory}') log.debug(f'*** DEBUG/import - project_abs_path={project_path}') model_imported = False path_to_add = project_path if self.project.command == "create-ui" else \ project_path + "/database" # for Api Logic Server projects sys.path.insert(0, self.project_directory) # e.g., /Users/val/dev/servers/install/ApiLogicServer sys.path.insert(0, path_to_add) # e.g., /Users/val/dev/servers/install/ApiLogicServer/database log.debug(msg + " in <project>/database") # + path_to_add) # sys.path.insert( 0, '/Users/val/dev/servers/install/ApiLogicServer/ApiLogicProject/database') # sys.path.insert( 0, '/Users/val/dev/servers/install/ApiLogicServer/ApiLogicProject') # AH HA!! # sys.path.insert( 0, 'ApiLogicProject') # or, AH HA!! # log.debug(f'*** DEBUG - sys.path={sys.path}') try: # credit: https://www.blog.pythonlibrary.org/2016/05/27/python-201-an-intro-to-importlib/ models_name = 'models' if self.project.bind_key is not None and self.project.bind_key != "": models_name = self.project.bind_key + "_" + models_name importlib.import_module(models_name) model_imported = True except: log.debug(f'\n===> ERROR - Dynamic model import failed in {path_to_add} - project run will fail') traceback.print_exc() pass # try to continue to enable manual fixup orm_class = None if not model_imported: log.debug('.. .. ..Creation proceeding to enable manual database/models.py fixup') log.debug('.. .. .. See https://apilogicserver.github.io/Docs/Troubleshooting/') else: try: resource_list: Dict[str, Resource] = dict() """ will be assigned to ModelCreationServices.resource_list """ cls_members = inspect.getmembers(sys.modules[models_name], inspect.isclass) for each_cls_member in cls_members: each_class_def_str = str(each_cls_member) # such as ('Category', <class 'models.Category'>) if (f"'{models_name}." in str(each_class_def_str) and "Ab" not in str(each_class_def_str)): resource_name = each_cls_member[0] resource_class = each_cls_member[1] table_name = resource_class._s_collection_name if table_name == "Todo": debug_str = "Excellent breakpoint" resource = Resource(name=resource_name, model_creation_services=self) self.metadata = resource_class.metadata self.table_to_class_map.update({table_name: resource_name}) # required for ui_basic_web_app if resource_name not in resource_list: resource_list[resource_name] = resource resource = resource_list[resource_name] resource.table_name = table_name resource_data = {"type": resource_class._s_type} # todo what's this? resource_data = {"type": resource_name} for each_attribute in resource_class._s_columns: attr_type = str(each_attribute.type) resource_attribute = ResourceAttribute(each_attribute=each_attribute, resource=resource) for rel_name, rel in resource_class._s_relationships.items(): # relation = {} # relation["direction"] = "toone" if rel.direction == MANYTOONE else "tomany" if rel.direction == ONETOMANY: # process only parents of this child debug_str = "onetomany" else: # many to one (parent for this child) - version <= 3.50.43 debug_rel = False if debug_rel: debug_rel_str = f'Debug resource_class._s_relationships {resource_name}: ' \ f'parent_role_name (aka rel_name): {rel_name}, ' \ f'child_role_name (aka rel.back_populates): {rel.back_populates}' log.debug(debug_rel_str) parent_role_name = rel_name child_role_name = rel.back_populates do_patch_self_reln = False # roles backward for self-relns, but addressed in codegen if do_patch_self_reln and resource_name == rel.mapper.class_._s_class_name: parent_role_name = rel.back_populates child_role_name = rel_name relationship = ResourceRelationship(parent_role_name=parent_role_name, child_role_name=child_role_name) for each_fkey in rel._calculated_foreign_keys: pair = ("?", each_fkey.description) relationship.parent_child_key_pairs.append(pair) resource.parents.append(relationship) relationship.child_resource = resource_name parent_resource_name = str(rel.target.name) parent_resource_name = rel.mapper.class_._s_class_name relationship.parent_resource = parent_resource_name if parent_resource_name not in resource_list: parent_resource = Resource(name=parent_resource_name, model_creation_services=self) resource_list[parent_resource_name] = parent_resource parent_resource = resource_list[parent_resource_name] parent_resource.children.append(relationship) pass pass debug_str = f'setting resource_list: {str(resource_list)}' self.resource_list = resource_list # model loaded - excellent breakpoint location if orm_class is not None: log.debug(f'.. .. ..Dynamic model import successful ' f'({len(self.table_to_class_map)} classes' f') -' f' getting metadata from {str(orm_class)}') except: log.debug("\n===> ERROR - Unable to introspect model classes") traceback.print_exc() pass
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/model_creation_services.py
model_creation_services.py
import subprocess, os, sys from pathlib import Path from os.path import abspath from api_logic_server_cli.cli_args_project import Project import logging log = logging.getLogger(__name__) def resolve_home(name: str) -> str: """ :param name: a file name, eg, ~/Desktop/a.b :return: /users/you/Desktop/a.b This just removes the ~, the path may still be relative to run location """ result = name if result.startswith("~"): result = str(Path.home()) + result[1:] return result def get_project_directory_and_api_name(project): """ user-supplied project_name, less the tilde (which might be in project_name); typically relative to cwd. :param project_name: a file name, eg, ~/Desktop/a.b :param api_name: defaults to 'api' :param multi_api: cli arg - e.g., set by alsdock :return: rtn_project_directory -- /users/you/Desktop/a.b (removes the ~) rtn_api_name -- api_name, or last node of project_name if multi_api or api_name is "." rtn_merge_into_prototype -- preserve contents of current (".", "./") *prototype* project """ rtn_project_directory = project.project_name # eg, '../../servers/ApiLogicProject' rtn_api_name = project.api_name # typically api rtn_merge_into_prototype = False if rtn_project_directory.startswith("~"): rtn_project_directory = str(Path.home()) + rtn_project_directory[1:] if rtn_project_directory == '.' or rtn_project_directory == './': rtn_project_directory = project.os_cwd rtn_merge_into_prototype = True msg = '' if rtn_project_directory == get_api_logic_server_dir(): rtn_project_directory = str( Path(get_api_logic_server_dir()) / 'ApiLogicProject' ) msg = ' <dev>' log.debug(f'1. Merge into project prototype / current project: {rtn_project_directory}{msg}') project_path = Path(rtn_project_directory) project_path_last_node = project_path.parts[-1] if project.multi_api or project.api_name == ".": rtn_api_name = project_path_last_node return rtn_project_directory, \ rtn_api_name, \ rtn_merge_into_prototype def get_abs_db_url(msg, project: Project): """ non-relative db location - we work with this handle db_url abbreviations (nw, nw-, todo, allocation, etc) but NB: we copy sqlite db to <project>/database - see create_project_with_nw_samples (below) also: compute physical nw db name (usually nw-gold) to be used for copy returns abs_db_url, nw_db_status - the real url (e.g., for nw), and whether it's really nw, and model_file_name """ rtn_nw_db_status = "" # presume not northwind rtn_abs_db_url = project.db_url # SQL/Server urls make VScode fail due to '?', so unfortunate work-around... (better: internalConsole) if rtn_abs_db_url.startswith('{install}'): install_db = str(Path(get_api_logic_server_dir()).joinpath('database')) rtn_abs_db_url = rtn_abs_db_url.replace('{install}', install_db) if rtn_abs_db_url.startswith('SqlServer-arm'): pass """ per this: https://stackoverflow.com/questions/69950871/sqlalchemy-and-sqlite3-error-if-database-file-does-not-exist I would like to set URL like this to avoid creating empty db, but it fails SQLALCHEMY_DATABASE_URI = 'sqlite:///file:/Users/val/dev/servers/ApiLogicProject/database/db.sqlite' # ?mode=ro&uri=true' the file: syntax fails, though "current versions" should work: https://docs.sqlalchemy.org/en/14/dialects/sqlite.html#uri-connections """ if project.db_url in [project.default_db, "", "nw", "sqlite:///nw.sqlite"]: # nw-gold: default sample rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/nw-gold.sqlite"))}' rtn_nw_db_status = "nw" # api_logic_server_dir_path # see also create_project_with_nw_samples for overlaying other project files log.debug(f'{msg} from: {rtn_abs_db_url}') # /Users/val/dev/ApiLogicServer/api_logic_server_cli/database/nw-gold.sqlite elif project.db_url == "nw-": # nw: just in case rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/nw-gold.sqlite"))}' rtn_nw_db_status = "nw-" elif project.db_url == "nw--": # nw: unused - avoid rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/nw.sqlite"))}' rtn_nw_db_status = "nw-" elif project.db_url == "nw+": # nw-gold-plus: next version rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/nw-gold-plus.sqlite"))}' rtn_nw_db_status = "nw+" log.debug(f'{msg} from: {rtn_abs_db_url}') elif project.db_url == "auth" or project.db_url == "authorization": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/authentication.sqlite"))}' elif project.db_url == "chinook": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/Chinook_Sqlite.sqlite"))}' elif project.db_url == "todo" or project.db_url == "todos": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/todos.sqlite"))}' elif project.db_url == "new": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/new.sqlite"))}' elif project.db_url == "table_filters_tests": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/table_filters_tests.sqlite"))}' elif project.db_url == "classicmodels": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/classicmodels.sqlite"))}' elif project.db_url == "allocation": rtn_abs_db_url = f'sqlite:///{str(project.api_logic_server_dir_path.joinpath("database/allocation.sqlite"))}' elif project.db_url.startswith('sqlite:///'): url = project.db_url[10: len(project.db_url)] rtn_abs_db_url = abspath(url) rtn_abs_db_url = 'sqlite:///' + rtn_abs_db_url elif project.db_url == 'sqlsvr-sample': # work-around - VSCode run config arg parsing rtn_abs_db_url = 'mssql+pyodbc://sa:Posey3861@localhost:1433/SampleDB?driver=ODBC+Driver+18+for+SQL+Server&trusted_connection=no&Encrypt=no' elif project.db_url == 'sqlsvr-nwlogic': # work-around - VSCode run config arg parsing rtn_abs_db_url = 'mssql+pyodbc://sa:Posey3861@localhost:1433/nwlogic?driver=ODBC+Driver+18+for+SQL+Server&trusted_connection=no&Encrypt=no' elif project.db_url == 'sqlsvr-nw': # work-around - VSCode run config arg parsing rtn_abs_db_url = 'mssql+pyodbc://sa:Posey3861@localhost:1433/NORTHWND?driver=ODBC+Driver+18+for+SQL+Server&trusted_connection=no&Encrypt=no' elif project.db_url == 'sqlsvr-nw-docker': # work-around - VSCode run config arg parsing rtn_abs_db_url = 'mssql+pyodbc://sa:Posey3861@HOST_IP:1433/NORTHWND?driver=ODBC+Driver+17+for+SQL+Server&trusted_connection=no' rtn_abs_db_url = 'mssql+pyodbc://sa:Posey3861@HOST_IP:1433/NORTHWND?driver=ODBC+Driver+18+for+SQL+Server&trusted_connection=no&Encrypt=no' host_ip = "10.0.0.234" # ApiLogicServer create --project_name=/localhost/sqlsvr-nw-docker --db_url=sqlsvr-nw-docker if os.getenv('HOST_IP'): host_ip = os.getenv('HOST_IP') # type: ignore # type: str rtn_abs_db_url = rtn_abs_db_url.replace("HOST_IP", host_ip) elif project.db_url == 'sqlsvr-nw-docker-arm': # work-around - VSCode run config arg parsing rtn_abs_db_url = 'mssql+pyodbc://sa:[email protected]:1433/NORTHWND?driver=ODBC+Driver+18+for+SQL+Server&trusted_connection=no&Encrypt=no' host_ip = "10.0.0.77" # ApiLogicServer create --project_name=/localhost/sqlsvr-nw-docker --db_url=sqlsvr-nw-docker-arm if os.getenv('HOST_IP'): host_ip = os.getenv('HOST_IP') # type: ignore # type: str rtn_abs_db_url = rtn_abs_db_url.replace("HOST_IP", host_ip) model_file_name = "models.py" if project.bind_key != "": model_file_name = project.bind_key + "_" + "models.py" return rtn_abs_db_url, rtn_nw_db_status, model_file_name def get_api_logic_server_dir() -> str: """ :return: ApiLogicServer dir, eg, /Users/val/dev/ApiLogicServer """ path = Path(__file__) parent_path = path.parent parent_path = parent_path.parent return str(parent_path) def does_file_contain(search_for: str, in_file: str) -> bool: """ returns True if <search_for> is <in_file> """ with open(Path(in_file), 'r+') as fp: file_lines = fp.readlines() # lines is list of lines, each element '...\n' found = False insert_line = 0 for each_line in file_lines: if search_for in each_line: found = True break return found def replace_string_in_file(search_for: str, replace_with: str, in_file: str): with open(Path(in_file), 'r') as file: file_data = file.read() file_data = file_data.replace(search_for, replace_with) with open(in_file, 'w') as file: file.write(file_data) def insert_lines_at(lines: str, at: str, file_name: str, after: bool = False): """ insert <lines> into file_name after line with <str> """ with open(Path(file_name), 'r+') as fp: file_lines = fp.readlines() # lines is list of lines, each element '...\n' found = False insert_line = 0 for each_line in file_lines: if at in each_line: found = True break insert_line += 1 if not found: raise Exception(f'Internal error - unable to find insert:\n' f'.. seeking {at}\n' f'.. in {file_name}') if after: insert_line = insert_line + 1 file_lines.insert(insert_line, lines) # you can use any index if you know the line index fp.seek(0) # file pointer locates at the beginning to write the whole file again fp.writelines(file_lines) # write whole list again to the same file def find_valid_python_name() -> str: ''' sigh. On *some* macs, python fails so we must use python3. return 'python3' in this case ''' python3_worked = False try: result_b = subprocess.check_output('python --version', shell=True, stderr=subprocess.STDOUT) except Exception as e: python3_worked = False try: result_b = subprocess.check_output('python3 --version', shell=True, stderr=subprocess.STDOUT) except Exception as e1: python3_worked = False python3_worked = True if python3_worked: return "python3" else: return "python" def run_command(cmd: str, env=None, msg: str = "", new_line: bool=False) -> str: """ run shell command :param cmd: string of command to execute :param env: :param msg: optional message (no-msg to suppress) :return: """ if cmd.startswith('python'): valid_python_name = find_valid_python_name() cmd = cmd.replace("python", valid_python_name) log_msg = "" if msg != "Execute command:": log_msg = msg + " with command:" if msg == "no-msg": log_msg = "" else: log.debug(f'{log_msg} {cmd}') if new_line: log.debug("") use_env = env if env is None: project_dir = get_api_logic_server_dir() python_path = str(project_dir) + "/venv/lib/python3.9/site_packages" use_env = os.environ.copy() # log.debug("\n\nFixing env for cmd: " + cmd) if hasattr(use_env, "PYTHONPATH"): use_env["PYTHONPATH"] = python_path + ":" + use_env["PYTHONPATH"] # eg, /Users/val/dev/ApiLogicServer/venv/lib/python3.9 # log.debug("added PYTHONPATH: " + str(use_env["PYTHONPATH"])) else: use_env["PYTHONPATH"] = python_path # log.debug("created PYTHONPATH: " + str(use_env["PYTHONPATH"])) use_env_debug = False # not able to get this working if use_env_debug: result_b = subprocess.check_output(cmd, shell=True, env=use_env) else: result_b = subprocess.check_output(cmd, shell=True) # , stderr=subprocess.STDOUT) # causes hang on docker result = str(result_b) # b'pyenv 1.2.21\n' # this code never gets reached... result = result[2: len(result) - 3] tab_to = 20 - len(cmd) spaces = ' ' * tab_to if msg == "no-msg": pass elif result != "" and result != "Downloaded the skeleton app, good coding!": log.debug(f'{log_msg} {cmd} result: {spaces}{result}') return result
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/api_logic_server_utils.py
api_logic_server_utils.py
import sys uri_info = [ 'Examples:', ' ApiLogicServer create-and-run', ' ApiLogicServer create-and-run --db_url=sqlite:////Users/val/dev/todo_example/todos.db --project_name=todo', ' ApiLogicServer create-and-run --db_url=sqlite:///c:\ApiLogicServer\\nw.sqlite --project_name=nw', ' ApiLogicServer create-and-run --db_url=mysql+pymysql://root:p@mysql-container:3306/classicmodels ' '--project_name=/localhost/docker_db_project', ' ApiLogicServer create-and-run --db_url=\'mssql+pyodbc://sa:Posey3861@localhost:1433/NORTHWND?driver=ODBC+Driver+18+for+SQL+Server&trusted_connection=no&Encrypt=no\'', ' ApiLogicServer create-and-run --db_url=postgresql://postgres:[email protected]/postgres', ' ApiLogicServer create --project_name=my_schema --db_url=postgresql://postgres:p@localhost/my_schema', ' ApiLogicServer create --db_url=postgresql+psycopg2:' '//postgres:password@localhost:5432/postgres?options=-csearch_path%3Dmy_db_schema', ' ApiLogicServer create --project_name=Chinook \\', ' --host=ApiLogicServer.pythonanywhere.com --port= \\', ' --db_url=mysql+pymysql://ApiLogicServer:***@ApiLogicServer.mysql.pythonanywhere-services.com/ApiLogicServer\$Chinook', '', 'Where --db_url is one of...', ' <default> Sample DB - https://apilogicserver.github.io/Docs/Sample-Database/', ' nw- Sample DB, no customizations - add later with perform_customizations.py', ' <SQLAlchemy Database URI> Your own database - https://docs.sqlalchemy.org/en/14/core/engines.html', ' Other URI examples: - https://apilogicserver.github.io/Docs/Database-Connectivity//', ' ', 'Docs: https://apilogicserver.github.io/Docs/' ] def print_uri_info(): """ Creates and optionally runs a customizable Api Logic Project, Example URI examples, Docs URL """ header = [ '', 'Creates and optionally runs a customizable Api Logic Project', '' ] for each_line in header: sys.stdout.write(each_line + '\n') for each_line in uri_info: sys.stdout.write(each_line + '\n') sys.stdout.write('\n')
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/uri_info.py
uri_info.py
import logging from re import X import shutil import sys import os import pathlib from pathlib import Path from typing import NewType, List, Tuple, Dict import sqlalchemy import yaml from sqlalchemy import MetaData, false import datetime import api_logic_server_cli.create_from_model.model_creation_services as create_from_model import api_logic_server_cli.create_from_model.api_logic_server_utils as create_utils from dotmap import DotMap from api_logic_server_cli.create_from_model.meta_model import Resource log = logging.getLogger(__file__) log.setLevel(logging.INFO) handler = logging.StreamHandler(sys.stderr) formatter = logging.Formatter(f'%(name)s: %(message)s') # lead tag - '%(name)s: %(message)s') handler.setFormatter(formatter) log.addHandler(handler) log.propagate = True # temp hacks for admin app migration to attributes admin_attr_ordering = True admin_parent_joins_implicit = True # True => id's displayed as joins, False => explicit parent join attrs admin_child_grids = False # True => identify each child grid attr explicitly, False => use main grid definition admin_relationships_with_parents = True # have to monkey patch to work with WSL as workaround for https://bugs.python.org/issue38633 import errno, shutil orig_copyxattr = shutil._copyxattr def patched_copyxattr(src, dst, *, follow_symlinks=True): try: orig_copyxattr(src, dst, follow_symlinks=follow_symlinks) except OSError as ex: if ex.errno != errno.EACCES: raise shutil._copyxattr = patched_copyxattr # MetaData = NewType('MetaData', object) MetaDataTable = NewType('MetaDataTable', object) class AdminCreator(object): """ Iterate over model Create ui/admin/admin.yaml """ _favorite_names_list = [] #: ["name", "description"] """ array of substrings used to find favorite column name command line option to override per language, db conventions eg, name in English nom in French """ _non_favorite_names_list = [] non_favorite_names = "id" num_pages_generated = 0 num_related = 0 def __init__(self, mod_gen: create_from_model.ModelCreationServices, host: str = "localhost", port: str = "5656", not_exposed: str = 'ProductDetails_V', favorite_names: str = "name description", non_favorite_names: str = "id"): self.mod_gen = mod_gen self.host = host self.port = port self.not_exposed = not_exposed self.favorite_names = favorite_names self.non_favorite_name = non_favorite_names self.multi_reln_exceptions = list() self.metadata = None self.engine = None self.session = None self.connection = None self.app = None self.admin_yaml = DotMap() self.max_list_columns = 8 # maybe make this a param self._non_favorite_names_list = self.non_favorite_names.split() self._favorite_names_list = self.favorite_names.split() def create_admin_application(self): """ main driver - loop through resources, write admin.yaml - with backup, nw customization """ if (self.mod_gen.project.command == "create-ui" or self.mod_gen.project.command.startswith("rebuild")) \ or self.mod_gen.project.command == "add_db": if self.mod_gen.project.command.startswith("rebuild"): log.debug(".. .. ..Use existing ui/admin directory") else: self.create_admin_app(msg=".. .. ..Create ui/admin") sys.path.append(self.mod_gen.project.os_cwd) use_repl = True if use_repl: # enables same admin.yaml for local vs Codespace, by runtime fixup of api_root self.admin_yaml.api_root = '{http_type}://{swagger_host}:{port}/{api}' self.admin_yaml.authentication = {} self.admin_yaml.authentication['endpoint'] = '{http_type}://{swagger_host}:{port}/api/auth/login' else: # old code - ignore self.admin_yaml.api_root = f'http://localhost:5656/{self.mod_gen.api_name}' self.admin_yaml.authentication = f'http://localhost:5656/auth/login' if self.host != "localhost": if self.port !="": self.admin_yaml.api_root = f'http://{self.host}:{self.port}/{self.mod_gen.api_name}' self.admin_yaml.authentication = f'http://{self.host}:{self.port}/auth/login' else: self.admin_yaml.api_root = f'http://{self.host}/{self.mod_gen.api_name}' self.admin_yaml.authentication = f'http://{self.host}/{auth/login}' self.admin_yaml.resources = {} for each_resource_name in self.mod_gen.resource_list: each_resource = self.mod_gen.resource_list[each_resource_name] # class_name, per _s_collection_name self.create_resource_in_admin(each_resource) self.create_about() self.create_info() self.create_settings() # self.doc_properties() if self.mod_gen.project.command != "create-ui": self.write_yaml_files() def create_resource_in_admin(self, resource: Resource): """ self.admin_yaml.resources += resource DotMap for given resource """ resource_name = resource.name if resource_name == "ProductXXX": debug_stop = "good breakpoint" if self.do_process_resource(resource_name): new_resource = DotMap() self.num_pages_generated += 1 new_resource.type = str(resource.name) new_resource.user_key = str(self.mod_gen.favorite_attribute_name(resource)) self.create_attributes_in_owner(new_resource, resource, None) child_tabs = self.create_child_tabs(resource) if child_tabs: new_resource.tab_groups = child_tabs self.admin_yaml.resources[resource.table_name] = new_resource.toDict() def create_attributes_in_owner(self, owner: DotMap, resource: Resource, owner_resource) -> Dict[None, Resource]: """ create attributes in owner (owner is a DotMap -- of resource, or tab) Class created, nw- has CategoryName = Column('CategoryName_ColumnName'... Caution: fix_database_models() occurs after model in memory... Order: attributes: 1 Favorite, 2 Joins, 3 Others / not favs, 4 Not Favs - label: ShipName* name: ShipName search: true - name: OrderDate - name: RequiredDate - name: Id - name: CustomerId """ owner.attributes = [] attributes_dict = [] # DotMap() processed_attributes = set() if resource.name == "Categoryxx": log.debug(f'ui_admin_creator.create_attributes_in_owner: {resource.name}') # Step 1 - favorite attribute favorite_attribute = resource.get_favorite_attribute() admin_attribute = self.create_admin_attribute(favorite_attribute) if admin_attribute is None: favorite_attribute = resource.attributes[0] admin_attribute = self.create_admin_attribute(favorite_attribute) processed_attributes.add(favorite_attribute.name) admin_attribute.search = True admin_attribute.sort = True admin_attribute.label = f"{self.cap_space(favorite_attribute.name)}*" attributes_dict.append(admin_attribute) # Step 2 - Parent Joins for each_parent in resource.parents: if admin_parent_joins_implicit: # temp hack - just do the FK fk_pair = each_parent.parent_child_key_pairs[0] # assume single-field keys fk_attr_name = fk_pair[1] resource_attribute = None for each_attribute in resource.attributes: if each_attribute.name == fk_attr_name: resource_attribute = each_attribute break if resource_attribute is None: raise Exception(f'System Error: unable to find {fk_attr_name} in {resource.name}') processed_attributes.add(fk_attr_name) admin_attribute = self.create_admin_attribute(resource_attribute) if admin_attribute is not None: attributes_dict.append(admin_attribute) else: pass """ perhaps something like this: - Location: <— this is the parent resource name fks: - City <- child FKs - Country attributes: <- parent attrs to display - name: city - name: country """ # Step 3 - Other fields, except non-favorites for each_attribute in resource.attributes: if each_attribute.name not in processed_attributes: if not each_attribute.non_favorite: processed_attributes.add(each_attribute.name) admin_attribute = self.create_admin_attribute(each_attribute) if admin_attribute is not None: attributes_dict.append(admin_attribute) # Step 4 - Non-favorites for each_attribute in resource.attributes: if each_attribute.name not in processed_attributes: if each_attribute.non_favorite: processed_attributes.add(each_attribute.name) admin_attribute = self.create_admin_attribute(each_attribute) if admin_attribute is not None: attributes_dict.append(admin_attribute) owner.attributes = attributes_dict @staticmethod def create_admin_attribute(resource_attribute) -> DotMap: """ create attribute entry for admin.yaml """ attribute_name = resource_attribute if isinstance(resource_attribute, str) else resource_attribute.name required = False if isinstance(resource_attribute, str) else resource_attribute.is_required admin_attribute = DotMap() admin_attribute.name = str(attribute_name) if required: admin_attribute.required = True if attribute_name == "xShippedDate": log.debug("Good breakpoint location") if isinstance(resource_attribute, str) == True: log.debug("Just a string") raise Exception(f'System Error - expected resource_attribute, got string: {resource_attribute}') if not isinstance(resource_attribute, str): if resource_attribute.type in ["DECIMAL", "DATE"]: admin_attribute.type = resource_attribute.type if resource_attribute.type in ["NTEXT", "IMAGE"]: admin_attribute = None return admin_attribute #.toDict() hmm... sometimes a "shape" property slips in...? @staticmethod def cap_space(text): new_text = ' ' for i, letter in enumerate(text): if i and letter.isupper(): new_text += ' ' new_text += letter return new_text def new_relationship_to_parent(self, a_child_resource: Resource, parent_attribute_reference, a_master_parent_resource) -> DotMap: """ given a_child_table_def.parent_column_reference, create relationship: attrs, fKeys (for *js* client (no meta)) Order: attributes: - ShipName - Amount - Location: fks: - City - Country attributes: - name: city - name: country :param a_child_resource: a child resource (not class), eg, Employees :param parent_attribute_reference: parent ref, eg, Department1.DepartmentName :param a_master_parent_resource: the master of master/detail - skip joins for this :returns DotMap relationship """ parent_role_name = parent_attribute_reference.split('.')[0] # careful - is role (class) name, not table name if a_master_parent_resource is not None and parent_role_name == a_master_parent_resource.name: skipped = f'avoid redundant master join - {a_child_resource}.{parent_attribute_reference}' log.debug(f'master object detected - {skipped}') return None relationship = DotMap() if len(self.mod_gen.resource_list) == 0: # RARELY used - use_model is true (sqlacodegen_wrapper not called) return self.new_relationship_to_parent_no_model(a_child_resource, parent_attribute_reference, a_master_parent_resource) my_parents_list = a_child_resource.parents parent_relationship = None for each_parent_relationship in my_parents_list: if each_parent_relationship.parent_role_name == parent_role_name: parent_relationship = each_parent_relationship break if not parent_relationship: msg = f'Unable to find role for: {parent_attribute_reference}' relationship.error_unable_to_find_role = msg if parent_role_name not in self.multi_reln_exceptions: self.multi_reln_exceptions.append(parent_role_name) log.warning(f'Error - please search ui/admin/admin.yaml for: Unable to find role') relationship.resource = str(parent_relationship.parent_resource) # redundant?? relationship.attributes = [] relationship.fks = [] if a_child_resource.name == "Order": log.debug("Parents for special table - debug") for each_column in parent_relationship.parent_child_key_pairs: # XXX FIXME # key_column = DotMap() # key_column.name = str(each_column) relationship.fks.append(str(each_column[1])) # todo - verify fullname is table name (e.g, multiple relns - emp.worksFor/onLoan) return relationship def create_child_tabs(self, resource: Resource) -> List: """ build tabs for related children tab_groups: CustomerCustomerDemoList: direction: tomany fks: - CustomerTypeId resource: CustomerCustomerDemo """ if len(self.mod_gen.resource_list) == 0: # almost always, use_model false (we create) return self.create_child_tabs_no_model(resource) if resource.name == "Department": # excellent breakpoint location log.debug(f'Relationships for {resource.name}') children_seen = set() tab_group = [] for each_resource_relationship in resource.children: each_resource_tab = DotMap() self.num_related += 1 each_child = each_resource_relationship.child_resource if each_child in children_seen: pass # it's ok, we are using the child_role_name now children_seen.add(each_child) each_resource_tab.fks = [] for each_pair in each_resource_relationship.parent_child_key_pairs: each_resource_tab.fks.append(str(each_pair[1])) each_child_resource = self.mod_gen.resource_list[each_child] each_resource_tab.resource = each_child_resource.table_name each_resource_tab.direction = "tomany" each_resource_tab.name = each_resource_relationship.child_role_name each_child_resource = self.mod_gen.resource_list[each_child] if admin_child_grids: self.create_attributes_in_owner(each_resource_tab, each_child_resource, resource) tab_group.append(each_resource_tab) # disambiguate multi-relns, eg Employee OnLoan/WorksForDept if admin_relationships_with_parents: for each_resource_relationship in resource.parents: each_resource_tab = DotMap() each_parent = each_resource_relationship.parent_resource each_resource_tab.resource = str(each_parent) each_parent_resource = self.mod_gen.resource_list[each_parent] each_resource_tab.resource = each_parent_resource.table_name each_resource_tab.direction = "toone" each_resource_tab.fks = [] for each_pair in each_resource_relationship.parent_child_key_pairs: each_resource_tab.fks.append(str(each_pair[1])) each_resource_tab.name = each_resource_relationship.parent_role_name # tab_group[tab_name] = each_resource_tab # disambiguate multi-relns, eg Employee OnLoan/WorksForDept tab_group.append(each_resource_tab) return tab_group def do_process_resource(self, resource_name: str)-> bool: """ filter out resources that are skipped by user, start with ab etc """ if resource_name + " " in self.not_exposed: return False # not_exposed: api.expose_object(models.{table_name}) if "ProductDetails_V" in resource_name: log.debug("special table") # should not occur (--noviews) if resource_name.startswith("ab_"): return False # skip admin table: " + table_name + "\n elif 'sqlite_sequence' in resource_name: return False # skip sqlite_sequence table: " + table_name + "\n elif resource_name is None: return False # no class (view): " + table_name + "\n elif resource_name.startswith("Ab"): return False return True def create_child_tabs_no_model(self, a_table_def: MetaDataTable) -> DotMap: """ Rarely used, now broken. Ignore for now This approach is for cases where use_model specifies an existing model. In such cases, self.mod_gen.my_children_list is None, so we need to get relns from db, inferring role names """ all_tables = a_table_def.metadata.tables tab_group = DotMap() for each_possible_child_tuple in all_tables.items(): each_possible_child = each_possible_child_tuple[1] parents = each_possible_child.foreign_keys if (a_table_def.name == "Customer" and each_possible_child.name == "Order"): log.debug(a_table_def) for each_parent in parents: each_parent_name = each_parent.target_fullname loc_dot = each_parent_name.index(".") each_parent_name = each_parent_name[0:loc_dot] if each_parent_name == a_table_def.name: self.num_related += 1 # self.yaml_lines.append(f' - tab: {each_possible_child.name} List') # self.yaml_lines.append(f' resource: {each_possible_child.name}') # self.yaml_lines.append(f' fkeys:') for each_foreign_key in each_parent.parent.foreign_keys: for each_element in each_foreign_key.constraint.elements: # self.yaml_lines.append(f' - target: {each_element.column.key}') child_table_name = each_element.parent.table.name # self.yaml_lines.append(f' source: {each_element.parent.name}') # self.yaml_lines.append(f' columns:') columns = columns = self.mod_gen.get_show_columns(each_possible_child) col_count = 0 for each_column in columns: col_count += 1 if col_count > self.max_list_columns: break if "." not in each_column: # self.yaml_lines.append(f' - name: {each_column}') pass else: pass # self.create_object_reference(each_possible_child, each_column, 4, a_table_def) return tab_group def new_relationship_to_parent_no_model(self, a_child_table_def: MetaDataTable, parent_column_reference, a_master_parent_table_def) -> DotMap: """ Rarely used, now broken. Ignore for now. This approach is for cases where use_model specifies an existing model. In such cases, self.mod_gen.my_children_list is None, so we need to get relns from db, inferring role names """ parent_role_name = parent_column_reference.split('.')[0] # careful - is role (class) name, not table name relationship = DotMap() fkeys = a_child_table_def.foreign_key_constraints if a_child_table_def.name == "Employee": # table Employees, class/role employee log.debug("Debug stop") found_fkey = False checked_keys = "" for each_fkey in fkeys: # find fkey for parent_role_name referred_table: str = each_fkey.referred_table.key # table name, eg, Employees referred_table = referred_table.lower() checked_keys += referred_table + " " if referred_table.startswith(parent_role_name.lower()): # self.yaml_lines.append(f'{tabs(num_tabs)} - object:') # todo - verify fullname is table name (e.g, multiple relns - emp.worksFor/onLoan) # self.yaml_lines.append(f'{tabs(num_tabs)} - type: {each_fkey.referred_table.fullname}') # self.yaml_lines.append(f'{tabs(num_tabs)} - show_attributes:') # self.yaml_lines.append(f'{tabs(num_tabs)} - key_attributes:') log.debug(f'got each_fkey: {str(each_fkey)}') for each_column in each_fkey.column_keys: # self.yaml_lines.append(f'{tabs(num_tabs)} - name: {each_column}') pass found_fkey = True if not found_fkey: parent_table_name = parent_role_name if parent_table_name.endswith("1"): parent_table_name = parent_table_name[:-1] pass msg = f'Please specify references to {parent_table_name}' # self.yaml_lines.append(f'#{tabs(num_tabs)} - Multiple relationships detected -- {msg}') FIXME if parent_role_name not in self.multi_reln_exceptions: self.multi_reln_exceptions.append(parent_role_name) log.warning(f'Alert - please search ui/admin/admin.yaml for: {msg}') # raise Exception(msg) return relationship def get_create_from_model_dir(self) -> Path: """ :return: create_from_model dir, eg, /Users/val/dev/ApiLogicServer/create_from_model """ path = Path(__file__) parent_path = path.parent parent_path = parent_path.parent return parent_path def write_yaml_files(self): """ write admin[-merge].yaml from self.admin_yaml.toDict() with -created backup, plus additional nw customized backup """ admin_yaml_dict = self.admin_yaml.toDict() admin_yaml_dump = yaml.dump(admin_yaml_dict) yaml_file_name = os.path.join(Path(self.mod_gen.project_directory), Path(f'ui/admin/admin.yaml')) if self.mod_gen.project.command == "add_db": yaml_file_name = os.path.join(Path(self.mod_gen.project_directory), Path(f'ui/admin/{self.mod_gen.project.bind_key}_admin.yaml')) write_file = "Write" # alert - not just message, drives processing if self.mod_gen.project.command.startswith("rebuild"): ''' creation_time different mac - always appears unaltered (== modified_time) https://stackoverflow.com/questions/946967/get-file-creation-time-with-python-on-mac https://thispointer.com/python-get-last-modification-date-time-of-a-file-os-stat-os-path-getmtime/ windows: has proper time_created/modified mac: mac created_time always = modified_time, but can use birthtime linux: same as mac, but not birthtime -- disable for linux ''' enable_rebuild_unaltered = True yaml_file_stats = Path(yaml_file_name).stat() if sys.platform == 'win32': time_diff = abs(yaml_file_stats.st_mtime - yaml_file_stats.st_ctime) # these are seconds elif sys.platform == 'darwin': time_diff = abs(yaml_file_stats.st_mtime - yaml_file_stats.st_birthtime) else: time_diff = 1000 # linux never captures ctime (!), so we must preserve possible chgs if time_diff >= 5: write_file = "Rebuild - preserve altered admin.yaml" else: write_file = "Rebuild - preserve unaltered admin.yaml (cp admin-merge.yaml admin.yaml)" if write_file.startswith("Rebuild"): yaml_merge_file_name = os.path.join(Path(self.mod_gen.project_directory), Path(f'ui/admin/admin-merge.yaml')) log.debug(f'.. .. ..{write_file} {yaml_file_name} - creating merge at {yaml_merge_file_name}') merge_yaml = self.create_yaml_merge() admin_merge_yaml_dump = yaml.dump(merge_yaml) with open(yaml_merge_file_name, 'w') as yaml_merge_file: yaml_merge_file.write(admin_merge_yaml_dump) if write_file.startswith("Rebuild - overwrite"): log.debug(f'.. .. ..{write_file} {yaml_file_name} - creating merge at {yaml_merge_file_name}') if write_file == "Write": # or write_file.startswith("Rebuild - overwrite"): # more drastic approach log.debug(f'.. .. ..{write_file} {yaml_file_name}') with open(yaml_file_name, 'w') as yaml_file: yaml_file.write(admin_yaml_dump) yaml_created_file_name = \ os.path.join(Path(self.mod_gen.project_directory), Path(f'ui/admin/admin-created.yaml')) create_initial_backup = False # caused rebuild confusion, so disabled if create_initial_backup: with open(yaml_created_file_name, 'w') as yaml_created_file: yaml_created_file.write(admin_yaml_dump) if self.mod_gen.project.nw_db_status in ["nw"] and self.mod_gen.project.api_name == "api": if not self.mod_gen.project.command.startswith("rebuild"): src = os.path.join(self.mod_gen.project.api_logic_server_dir_path, Path(f'prototypes/nw/ui/admin/admin.yaml')) dest = os.path.join(Path(self.mod_gen.project_directory), Path(f'ui/admin/admin.yaml')) shutil.copyfile(src, dest) def create_yaml_merge(self) -> dict: """ return admin_merge.yaml from self.admin_yaml.toDict() and ui/admin/admin.yaml xxx """ yaml_admin_file_name = \ os.path.join(Path(self.mod_gen.project_directory), Path(f'ui/admin/admin.yaml')) with open(yaml_admin_file_name,'r')as file_descriptor: # new rsc, attr merge_yaml_dict = yaml.load(file_descriptor, Loader=yaml.SafeLoader) merge_resources = merge_yaml_dict['resources'] current_resources = self.admin_yaml.resources new_resources = '' new_attributes = '' new_tab_groups = '' for each_resource_name, each_resource in current_resources.items(): if each_resource_name not in merge_resources: new_resources = new_resources + f'{each_resource_name} ' merge_resources[each_resource_name] = each_resource else: current_attributes = each_resource['attributes'] merge_attributes = merge_resources[each_resource_name]['attributes'] for each_current_attribute in current_attributes: attribute_name = each_current_attribute['name'] attribute_found = False for each_merge_attribute in merge_attributes: if attribute_name == each_merge_attribute['name']: attribute_found = True break if not attribute_found: new_attributes = new_attributes + f'{each_resource_name}.{attribute_name} ' merge_attributes.append(each_current_attribute) if 'tab_groups' in each_resource: current_tab_groups = each_resource['tab_groups'] if 'tab_groups' not in merge_resources[each_resource_name]: merge_resources[each_resource_name]['tab_groups'] = [] merge_tab_groups = merge_resources[each_resource_name]['tab_groups'] for each_current_tab_group in current_tab_groups: tab_group_name = each_current_tab_group['name'] tab_group_found = False for each_merge_tab_group in merge_tab_groups: if tab_group_name == each_merge_tab_group['name']: tab_group_found = True break if not tab_group_found: new_tab_groups = new_tab_groups +f'{each_resource_name}.{tab_group_name} ' merge_tab_groups.append(each_current_tab_group) merge_yaml_dict['about']['merged'] = {} merge_yaml_dict['about']['merged']['at'] = str(datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S")) merge_yaml_dict['about']['merged']['new_resources'] = new_resources merge_yaml_dict['about']['merged']['new_attributes'] = new_attributes merge_yaml_dict['about']['merged']['new_tab_groups'] = new_tab_groups return merge_yaml_dict def create_settings(self): self.admin_yaml.settings = DotMap() self.admin_yaml.settings.max_list_columns = self.max_list_columns home_js = "http://localhost:5656/admin-app/home.js" if self.host != "localhost": if self.port !="": home_js = f'http://{self.host}:{self.port}/admin-app/home.js' else: home_js = f'http://{self.host}/admin-app/home.js' self.admin_yaml.settings.HomeJS = home_js return def create_about(self): self.admin_yaml.about = DotMap() self.admin_yaml.about.date = f'{str(datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S"))}' self.admin_yaml.about.version = self.mod_gen.version self.admin_yaml.about.recent_changes = "works with modified safrs-react-admin" return def create_info(self): """ info block - # tables, relns, [no-relns warning] """ self.admin_yaml.info = DotMap() self.admin_yaml.info.number_tables = self.num_pages_generated self.admin_yaml.info.number_relationships = self.num_related if self.num_related == 0: # FIXME what to do self.yaml_lines.append(f' warning: no_related_view') log.debug(".. .. ..WARNING - no relationships detected - add them to your database or model") log.debug(".. .. .. See https://github.com/valhuber/LogicBank/wiki/Managing-Rules#database-design") def doc_properties(self): """ show non-automated properties in yaml, for users' quick reference """ resource_props = DotMap() resource_props.menu = "False | name" resource_props.info = "long html / rich text" resource_props.allow_insert = "exp" resource_props.allow_update = "exp" resource_props.allow_delete = "exp" self.admin_yaml.properties_ref.resource = resource_props attr_props = DotMap() attr_props.search = "true | false" attr_props.label = "caption for display" attr_props.hidden = "exp" attr_props.group = "name" style_props = DotMap() style_props.font_weight = 0 style_props.color = "blue" attr_props.style = style_props self.admin_yaml.properties_ref.attribute = attr_props tab_props = DotMap() tab_props.label = "text" tab_props.lookup = "boolean" self.admin_yaml.properties_ref.tab = tab_props def create_admin_app(self, msg: str = "", from_git: str = ""): """ deep copy ApiLogicServer/create_from_model/admin -> project_directory/ui/admin :param msg: console (.. .. ..Create ui/admin) :param from_git: git url for source - override ApiLogicServer/create_from_model/admin (not impl) """ from_proto_dir = from_git if from_proto_dir == "": from_proto_dir = pathlib.Path(self.get_create_from_model_dir()).\ joinpath("create_from_model", "safrs-react-admin-npm-build") to_project_dir = pathlib.Path(self.mod_gen.project_directory).joinpath("ui", "safrs-react-admin") use_alsdock_sra = True ''' set False if alsdock/dockerfile copies this folder: RUN cp -r /app/ui/safrs-react-admin /app/ApiLogicServer-main/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build ''' if use_alsdock_sra and self.mod_gen.project.multi_api: log.debug(f'{msg} multi_api - copy safrs-react-admin {from_proto_dir} -> {to_project_dir}') from_proto_dir = pathlib.Path("/app/ui/safrs-react-admin") # enables debug for alsdock projects shutil.copytree(from_proto_dir, to_project_dir) else: log.debug(f'{msg} copy safrs-react-admin to: {to_project_dir}') log.debug(f'.. .. .. ..From {from_proto_dir}') if not os.path.isdir(from_proto_dir): log.debug(f'\n==> Error - safrs-react-admin... did you complete setup: https://apilogicserver.github.io/Docs/Internals/') log.debug(".. Setup required. Really.") exit(1) use_sra_from_install = True if use_sra_from_install: log.debug(".. created app will use sra from ApiLogicServer install") else: shutil.copytree(from_proto_dir, to_project_dir) to_project_dir = pathlib.Path(self.mod_gen.project_directory).joinpath("ui", "admin") swagger_name = self.mod_gen.project.api_name if self.mod_gen.project.multi_api: swagger_name += "/api" log.debug(f'.. ui/admin/home.js updated url: {swagger_name}') create_utils.replace_string_in_file(search_for="api_logic_server_api_name", # last node of server url replace_with=swagger_name, in_file=to_project_dir.joinpath("home.js")) def create(model_creation_services: create_from_model.ModelCreationServices): """ called by ApiLogicServer CLI -- creates ui/admin application (ui/admin folder, admin.yaml) """ admin_creator = AdminCreator(model_creation_services, host=model_creation_services.project.host, port=model_creation_services.project.port, not_exposed=model_creation_services.project.not_exposed + " ", favorite_names=model_creation_services.project.favorites, non_favorite_names=model_creation_services.project.non_favorites) admin_creator.create_admin_application()
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/ui_admin_creator.py
ui_admin_creator.py
from typing import List, Dict from typing import NewType, Type import logging log = logging.getLogger(__name__) class ResourceAttribute(): """ instances added to Resource """ def __init__(self, each_attribute: object, resource: Type['Resource']): self.name = str(each_attribute.name) if self.name == "Ready": debug_str = "Nice breakpoint" # self.nullable = each_attribute.nullable type = str(each_attribute.type) self.type = None # none means not interesting, default display to simple text if type == "DECIMAL": self.type = "DECIMAL" elif type == "DATE": self.type = "DATE" # elif type == "DATETIME": safrs-admin date-picker fails with bad datetime format # self.type = "DATE" elif type == "IMAGE": self.type = "IMAGE" elif type.startswith("NTEXT") == "NTEXT": self.type = "NTEXT" self.non_favorite = False self.is_required = not each_attribute.nullable if self.is_required and each_attribute.primary_key: if type in ["Integer", "INTEGER", "MEDIUMINT", "SMALLINT", "TINYINT"]: self.is_required = False # this is autonum... so not required (affects admin.yaml - required) else: debug_str = "Alpha Pkey" # nothing to do, this for debug verification lower_name = self.name.lower() non_favs = resource.model_creation_services.project.non_favorites # FIMXE not _non_favorite_names_list for each_non_fav in non_favs: if lower_name.endswith(each_non_fav): self.non_favorite = True break resource.attributes.append(self) def __str__(self): result = self.name if self.type is not None: result += " - " + self.type return result class ResourceRelationship(): def __init__(self, parent_role_name: str, child_role_name: str): self.parent_role_name = parent_role_name self.child_role_name = child_role_name self.parent_resource = None self.child_resource = None self.parent_child_key_pairs = list() def __str__(self): return f'ResourceRelationship: ' \ f'parent_role_name: {self.parent_role_name} | ' \ f'child_role_name: {self.child_role_name} | ' \ f'parent: {self.parent_resource} | ' \ f'child: {self.child_resource} | ' class Resource(): def __init__(self, name: str, model_creation_services): self.name = name # class name (which != safrs resource name) self.table_name = name # safrs resource name; this is just default, overridden in create_model self.type = name # just default, overridden in create_model self.children: List[ResourceRelationship] = list() self.parents: List[ResourceRelationship] = list() self.attributes: List[ResourceAttribute] = list() self.model_creation_services = model_creation_services # to find favorite names etc. def __str__(self): return f'Resource: {self.name}, table_name: {self.table_name}, type: {self.type}' def get_favorite_attribute(self) -> ResourceAttribute: """ returns ResourceAttribute of first attribute that is... named <favorite_name> (default to "name"), else containing <favorite_name>, else (or first column) Parameters argument1 a_table_name - str Returns string of column name that is favorite (e.g., first in list) """ self_model_creation_services = self.model_creation_services if self.name == 'ActionPlanScenario': debug = "compute favorite attribute" favorite_names = self.model_creation_services.project.favorites.split() # FIX for each_favorite_name in favorite_names: # FIXME - tokenize! for each_attribute in self.attributes: attribute_name = each_attribute.name.lower() if attribute_name == each_favorite_name: return each_attribute for each_attribute in self.attributes: attribute_name = each_attribute.name.lower() if each_favorite_name in attribute_name: return each_attribute for each_attribute in self.attributes: # no favorites, just return 1st return each_attribute
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/meta_model.py
meta_model.py
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[3126],{63126:function(e,t,n){n.r(t),n.d(t,{conf:function(){return s},language:function(){return o}});var s={comments:{blockComment:["\x3c!--","--\x3e"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:"<",close:">",notIn:["string"]}],surroundingPairs:[{open:"(",close:")"},{open:"[",close:"]"},{open:"`",close:"`"}],folding:{markers:{start:new RegExp("^\\s*\x3c!--\\s*#?region\\b.*--\x3e"),end:new RegExp("^\\s*\x3c!--\\s*#?endregion\\b.*--\x3e")}}},o={defaultToken:"",tokenPostfix:".md",control:/[\\`*_\[\]{}()#+\-\.!]/,noncontrol:/[^\\`*_\[\]{}()#+\-\.!]/,escapes:/\\(?:@control)/,jsescapes:/\\(?:[btnfr\\"']|[0-7][0-7]?|[0-3][0-7]{2})/,empty:["area","base","basefont","br","col","frame","hr","img","input","isindex","link","meta","param"],tokenizer:{root:[[/^\s*\|/,"@rematch","@table_header"],[/^(\s{0,3})(#+)((?:[^\\#]|@escapes)+)((?:#+)?)/,["white","keyword","keyword","keyword"]],[/^\s*(=+|\-+)\s*$/,"keyword"],[/^\s*((\*[ ]?)+)\s*$/,"meta.separator"],[/^\s*>+/,"comment"],[/^\s*([\*\-+:]|\d+\.)\s/,"keyword"],[/^(\t|[ ]{4})[^ ].*$/,"string"],[/^\s*~~~\s*((?:\w|[\/\-#])+)?\s*$/,{token:"string",next:"@codeblock"}],[/^\s*```\s*((?:\w|[\/\-#])+).*$/,{token:"string",next:"@codeblockgh",nextEmbedded:"$1"}],[/^\s*```\s*$/,{token:"string",next:"@codeblock"}],{include:"@linecontent"}],table_header:[{include:"@table_common"},[/[^\|]+/,"keyword.table.header"]],table_body:[{include:"@table_common"},{include:"@linecontent"}],table_common:[[/\s*[\-:]+\s*/,{token:"keyword",switchTo:"table_body"}],[/^\s*\|/,"keyword.table.left"],[/^\s*[^\|]/,"@rematch","@pop"],[/^\s*$/,"@rematch","@pop"],[/\|/,{cases:{"@eos":"keyword.table.right","@default":"keyword.table.middle"}}]],codeblock:[[/^\s*~~~\s*$/,{token:"string",next:"@pop"}],[/^\s*```\s*$/,{token:"string",next:"@pop"}],[/.*$/,"variable.source"]],codeblockgh:[[/```\s*$/,{token:"string",next:"@pop",nextEmbedded:"@pop"}],[/[^`]+/,"variable.source"]],linecontent:[[/&\w+;/,"string.escape"],[/@escapes/,"escape"],[/\b__([^\\_]|@escapes|_(?!_))+__\b/,"strong"],[/\*\*([^\\*]|@escapes|\*(?!\*))+\*\*/,"strong"],[/\b_[^_]+_\b/,"emphasis"],[/\*([^\\*]|@escapes)+\*/,"emphasis"],[/`([^\\`]|@escapes)+`/,"variable"],[/\{+[^}]+\}+/,"string.target"],[/(!?\[)((?:[^\]\\]|@escapes)*)(\]\([^\)]+\))/,["string.link","","string.link"]],[/(!?\[)((?:[^\]\\]|@escapes)*)(\])/,"string.link"],{include:"html"}],html:[[/<(\w+)\/>/,"tag"],[/<(\w+)(\-|\w)*/,{cases:{"@empty":{token:"tag",next:"@tag.$1"},"@default":{token:"tag",next:"@tag.$1"}}}],[/<\/(\w+)(\-|\w)*\s*>/,{token:"tag"}],[/<!--/,"comment","@comment"]],comment:[[/[^<\-]+/,"comment.content"],[/-->/,"comment","@pop"],[/<!--/,"comment.content.invalid"],[/[<\-]/,"comment.content"]],tag:[[/[ \t\r\n]+/,"white"],[/(type)(\s*=\s*)(")([^"]+)(")/,["attribute.name.html","delimiter.html","string.html",{token:"string.html",switchTo:"@tag.$S2.$4"},"string.html"]],[/(type)(\s*=\s*)(')([^']+)(')/,["attribute.name.html","delimiter.html","string.html",{token:"string.html",switchTo:"@tag.$S2.$4"},"string.html"]],[/(\w+)(\s*=\s*)("[^"]*"|'[^']*')/,["attribute.name.html","delimiter.html","string.html"]],[/\w+/,"attribute.name.html"],[/\/>/,"tag","@pop"],[/>/,{cases:{"$S2==style":{token:"tag",switchTo:"embeddedStyle",nextEmbedded:"text/css"},"$S2==script":{cases:{$S3:{token:"tag",switchTo:"embeddedScript",nextEmbedded:"$S3"},"@default":{token:"tag",switchTo:"embeddedScript",nextEmbedded:"text/javascript"}}},"@default":{token:"tag",next:"@pop"}}}]],embeddedStyle:[[/[^<]+/,""],[/<\/style\s*>/,{token:"@rematch",next:"@pop",nextEmbedded:"@pop"}],[/</,""]],embeddedScript:[[/[^<]+/,""],[/<\/script\s*>/,{token:"@rematch",next:"@pop",nextEmbedded:"@pop"}],[/</,""]]}}}}]); //# sourceMappingURL=3126.99b58416.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/3126.99b58416.chunk.js
3126.99b58416.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[4900],{54900:function(e,t,a){a.r(t),a.d(t,{conf:function(){return n},language:function(){return i}});var n={comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["[","]"],["(",")"],["{","}"]],autoClosingPairs:[{open:'"',close:'"',notIn:["string","comment","identifier"]},{open:"[",close:"]",notIn:["string","comment","identifier"]},{open:"(",close:")",notIn:["string","comment","identifier"]},{open:"{",close:"}",notIn:["string","comment","identifier"]}]},i={defaultToken:"",tokenPostfix:".pq",ignoreCase:!1,brackets:[{open:"[",close:"]",token:"delimiter.square"},{open:"{",close:"}",token:"delimiter.brackets"},{open:"(",close:")",token:"delimiter.parenthesis"}],operatorKeywords:["and","not","or"],keywords:["as","each","else","error","false","if","in","is","let","meta","otherwise","section","shared","then","true","try","type"],constructors:["#binary","#date","#datetime","#datetimezone","#duration","#table","#time"],constants:["#infinity","#nan","#sections","#shared"],typeKeywords:["action","any","anynonnull","none","null","logical","number","time","date","datetime","datetimezone","duration","text","binary","list","record","table","function"],builtinFunctions:["Access.Database","Action.Return","Action.Sequence","Action.Try","ActiveDirectory.Domains","AdoDotNet.DataSource","AdoDotNet.Query","AdobeAnalytics.Cubes","AnalysisServices.Database","AnalysisServices.Databases","AzureStorage.BlobContents","AzureStorage.Blobs","AzureStorage.Tables","Binary.Buffer","Binary.Combine","Binary.Compress","Binary.Decompress","Binary.End","Binary.From","Binary.FromList","Binary.FromText","Binary.InferContentType","Binary.Length","Binary.ToList","Binary.ToText","BinaryFormat.7BitEncodedSignedInteger","BinaryFormat.7BitEncodedUnsignedInteger","BinaryFormat.Binary","BinaryFormat.Byte","BinaryFormat.ByteOrder","BinaryFormat.Choice","BinaryFormat.Decimal","BinaryFormat.Double","BinaryFormat.Group","BinaryFormat.Length","BinaryFormat.List","BinaryFormat.Null","BinaryFormat.Record","BinaryFormat.SignedInteger16","BinaryFormat.SignedInteger32","BinaryFormat.SignedInteger64","BinaryFormat.Single","BinaryFormat.Text","BinaryFormat.Transform","BinaryFormat.UnsignedInteger16","BinaryFormat.UnsignedInteger32","BinaryFormat.UnsignedInteger64","Byte.From","Character.FromNumber","Character.ToNumber","Combiner.CombineTextByDelimiter","Combiner.CombineTextByEachDelimiter","Combiner.CombineTextByLengths","Combiner.CombineTextByPositions","Combiner.CombineTextByRanges","Comparer.Equals","Comparer.FromCulture","Comparer.Ordinal","Comparer.OrdinalIgnoreCase","Csv.Document","Cube.AddAndExpandDimensionColumn","Cube.AddMeasureColumn","Cube.ApplyParameter","Cube.AttributeMemberId","Cube.AttributeMemberProperty","Cube.CollapseAndRemoveColumns","Cube.Dimensions","Cube.DisplayFolders","Cube.Measures","Cube.Parameters","Cube.Properties","Cube.PropertyKey","Cube.ReplaceDimensions","Cube.Transform","Currency.From","DB2.Database","Date.AddDays","Date.AddMonths","Date.AddQuarters","Date.AddWeeks","Date.AddYears","Date.Day","Date.DayOfWeek","Date.DayOfWeekName","Date.DayOfYear","Date.DaysInMonth","Date.EndOfDay","Date.EndOfMonth","Date.EndOfQuarter","Date.EndOfWeek","Date.EndOfYear","Date.From","Date.FromText","Date.IsInCurrentDay","Date.IsInCurrentMonth","Date.IsInCurrentQuarter","Date.IsInCurrentWeek","Date.IsInCurrentYear","Date.IsInNextDay","Date.IsInNextMonth","Date.IsInNextNDays","Date.IsInNextNMonths","Date.IsInNextNQuarters","Date.IsInNextNWeeks","Date.IsInNextNYears","Date.IsInNextQuarter","Date.IsInNextWeek","Date.IsInNextYear","Date.IsInPreviousDay","Date.IsInPreviousMonth","Date.IsInPreviousNDays","Date.IsInPreviousNMonths","Date.IsInPreviousNQuarters","Date.IsInPreviousNWeeks","Date.IsInPreviousNYears","Date.IsInPreviousQuarter","Date.IsInPreviousWeek","Date.IsInPreviousYear","Date.IsInYearToDate","Date.IsLeapYear","Date.Month","Date.MonthName","Date.QuarterOfYear","Date.StartOfDay","Date.StartOfMonth","Date.StartOfQuarter","Date.StartOfWeek","Date.StartOfYear","Date.ToRecord","Date.ToText","Date.WeekOfMonth","Date.WeekOfYear","Date.Year","DateTime.AddZone","DateTime.Date","DateTime.FixedLocalNow","DateTime.From","DateTime.FromFileTime","DateTime.FromText","DateTime.IsInCurrentHour","DateTime.IsInCurrentMinute","DateTime.IsInCurrentSecond","DateTime.IsInNextHour","DateTime.IsInNextMinute","DateTime.IsInNextNHours","DateTime.IsInNextNMinutes","DateTime.IsInNextNSeconds","DateTime.IsInNextSecond","DateTime.IsInPreviousHour","DateTime.IsInPreviousMinute","DateTime.IsInPreviousNHours","DateTime.IsInPreviousNMinutes","DateTime.IsInPreviousNSeconds","DateTime.IsInPreviousSecond","DateTime.LocalNow","DateTime.Time","DateTime.ToRecord","DateTime.ToText","DateTimeZone.FixedLocalNow","DateTimeZone.FixedUtcNow","DateTimeZone.From","DateTimeZone.FromFileTime","DateTimeZone.FromText","DateTimeZone.LocalNow","DateTimeZone.RemoveZone","DateTimeZone.SwitchZone","DateTimeZone.ToLocal","DateTimeZone.ToRecord","DateTimeZone.ToText","DateTimeZone.ToUtc","DateTimeZone.UtcNow","DateTimeZone.ZoneHours","DateTimeZone.ZoneMinutes","Decimal.From","Diagnostics.ActivityId","Diagnostics.Trace","DirectQueryCapabilities.From","Double.From","Duration.Days","Duration.From","Duration.FromText","Duration.Hours","Duration.Minutes","Duration.Seconds","Duration.ToRecord","Duration.ToText","Duration.TotalDays","Duration.TotalHours","Duration.TotalMinutes","Duration.TotalSeconds","Embedded.Value","Error.Record","Excel.CurrentWorkbook","Excel.Workbook","Exchange.Contents","Expression.Constant","Expression.Evaluate","Expression.Identifier","Facebook.Graph","File.Contents","Folder.Contents","Folder.Files","Function.From","Function.Invoke","Function.InvokeAfter","Function.IsDataSource","GoogleAnalytics.Accounts","Guid.From","HdInsight.Containers","HdInsight.Contents","HdInsight.Files","Hdfs.Contents","Hdfs.Files","Informix.Database","Int16.From","Int32.From","Int64.From","Int8.From","ItemExpression.From","Json.Document","Json.FromValue","Lines.FromBinary","Lines.FromText","Lines.ToBinary","Lines.ToText","List.Accumulate","List.AllTrue","List.Alternate","List.AnyTrue","List.Average","List.Buffer","List.Combine","List.Contains","List.ContainsAll","List.ContainsAny","List.Count","List.Covariance","List.DateTimeZones","List.DateTimes","List.Dates","List.Difference","List.Distinct","List.Durations","List.FindText","List.First","List.FirstN","List.Generate","List.InsertRange","List.Intersect","List.IsDistinct","List.IsEmpty","List.Last","List.LastN","List.MatchesAll","List.MatchesAny","List.Max","List.MaxN","List.Median","List.Min","List.MinN","List.Mode","List.Modes","List.NonNullCount","List.Numbers","List.PositionOf","List.PositionOfAny","List.Positions","List.Product","List.Random","List.Range","List.RemoveFirstN","List.RemoveItems","List.RemoveLastN","List.RemoveMatchingItems","List.RemoveNulls","List.RemoveRange","List.Repeat","List.ReplaceMatchingItems","List.ReplaceRange","List.ReplaceValue","List.Reverse","List.Select","List.Single","List.SingleOrDefault","List.Skip","List.Sort","List.StandardDeviation","List.Sum","List.Times","List.Transform","List.TransformMany","List.Union","List.Zip","Logical.From","Logical.FromText","Logical.ToText","MQ.Queue","MySQL.Database","Number.Abs","Number.Acos","Number.Asin","Number.Atan","Number.Atan2","Number.BitwiseAnd","Number.BitwiseNot","Number.BitwiseOr","Number.BitwiseShiftLeft","Number.BitwiseShiftRight","Number.BitwiseXor","Number.Combinations","Number.Cos","Number.Cosh","Number.Exp","Number.Factorial","Number.From","Number.FromText","Number.IntegerDivide","Number.IsEven","Number.IsNaN","Number.IsOdd","Number.Ln","Number.Log","Number.Log10","Number.Mod","Number.Permutations","Number.Power","Number.Random","Number.RandomBetween","Number.Round","Number.RoundAwayFromZero","Number.RoundDown","Number.RoundTowardZero","Number.RoundUp","Number.Sign","Number.Sin","Number.Sinh","Number.Sqrt","Number.Tan","Number.Tanh","Number.ToText","OData.Feed","Odbc.DataSource","Odbc.Query","OleDb.DataSource","OleDb.Query","Oracle.Database","Percentage.From","PostgreSQL.Database","RData.FromBinary","Record.AddField","Record.Combine","Record.Field","Record.FieldCount","Record.FieldNames","Record.FieldOrDefault","Record.FieldValues","Record.FromList","Record.FromTable","Record.HasFields","Record.RemoveFields","Record.RenameFields","Record.ReorderFields","Record.SelectFields","Record.ToList","Record.ToTable","Record.TransformFields","Replacer.ReplaceText","Replacer.ReplaceValue","RowExpression.Column","RowExpression.From","Salesforce.Data","Salesforce.Reports","SapBusinessWarehouse.Cubes","SapHana.Database","SharePoint.Contents","SharePoint.Files","SharePoint.Tables","Single.From","Soda.Feed","Splitter.SplitByNothing","Splitter.SplitTextByAnyDelimiter","Splitter.SplitTextByDelimiter","Splitter.SplitTextByEachDelimiter","Splitter.SplitTextByLengths","Splitter.SplitTextByPositions","Splitter.SplitTextByRanges","Splitter.SplitTextByRepeatedLengths","Splitter.SplitTextByWhitespace","Sql.Database","Sql.Databases","SqlExpression.SchemaFrom","SqlExpression.ToExpression","Sybase.Database","Table.AddColumn","Table.AddIndexColumn","Table.AddJoinColumn","Table.AddKey","Table.AggregateTableColumn","Table.AlternateRows","Table.Buffer","Table.Column","Table.ColumnCount","Table.ColumnNames","Table.ColumnsOfType","Table.Combine","Table.CombineColumns","Table.Contains","Table.ContainsAll","Table.ContainsAny","Table.DemoteHeaders","Table.Distinct","Table.DuplicateColumn","Table.ExpandListColumn","Table.ExpandRecordColumn","Table.ExpandTableColumn","Table.FillDown","Table.FillUp","Table.FilterWithDataTable","Table.FindText","Table.First","Table.FirstN","Table.FirstValue","Table.FromColumns","Table.FromList","Table.FromPartitions","Table.FromRecords","Table.FromRows","Table.FromValue","Table.Group","Table.HasColumns","Table.InsertRows","Table.IsDistinct","Table.IsEmpty","Table.Join","Table.Keys","Table.Last","Table.LastN","Table.MatchesAllRows","Table.MatchesAnyRows","Table.Max","Table.MaxN","Table.Min","Table.MinN","Table.NestedJoin","Table.Partition","Table.PartitionValues","Table.Pivot","Table.PositionOf","Table.PositionOfAny","Table.PrefixColumns","Table.Profile","Table.PromoteHeaders","Table.Range","Table.RemoveColumns","Table.RemoveFirstN","Table.RemoveLastN","Table.RemoveMatchingRows","Table.RemoveRows","Table.RemoveRowsWithErrors","Table.RenameColumns","Table.ReorderColumns","Table.Repeat","Table.ReplaceErrorValues","Table.ReplaceKeys","Table.ReplaceMatchingRows","Table.ReplaceRelationshipIdentity","Table.ReplaceRows","Table.ReplaceValue","Table.ReverseRows","Table.RowCount","Table.Schema","Table.SelectColumns","Table.SelectRows","Table.SelectRowsWithErrors","Table.SingleRow","Table.Skip","Table.Sort","Table.SplitColumn","Table.ToColumns","Table.ToList","Table.ToRecords","Table.ToRows","Table.TransformColumnNames","Table.TransformColumnTypes","Table.TransformColumns","Table.TransformRows","Table.Transpose","Table.Unpivot","Table.UnpivotOtherColumns","Table.View","Table.ViewFunction","TableAction.DeleteRows","TableAction.InsertRows","TableAction.UpdateRows","Tables.GetRelationships","Teradata.Database","Text.AfterDelimiter","Text.At","Text.BeforeDelimiter","Text.BetweenDelimiters","Text.Clean","Text.Combine","Text.Contains","Text.End","Text.EndsWith","Text.Format","Text.From","Text.FromBinary","Text.Insert","Text.Length","Text.Lower","Text.Middle","Text.NewGuid","Text.PadEnd","Text.PadStart","Text.PositionOf","Text.PositionOfAny","Text.Proper","Text.Range","Text.Remove","Text.RemoveRange","Text.Repeat","Text.Replace","Text.ReplaceRange","Text.Select","Text.Split","Text.SplitAny","Text.Start","Text.StartsWith","Text.ToBinary","Text.ToList","Text.Trim","Text.TrimEnd","Text.TrimStart","Text.Upper","Time.EndOfHour","Time.From","Time.FromText","Time.Hour","Time.Minute","Time.Second","Time.StartOfHour","Time.ToRecord","Time.ToText","Type.AddTableKey","Type.ClosedRecord","Type.Facets","Type.ForFunction","Type.ForRecord","Type.FunctionParameters","Type.FunctionRequiredParameters","Type.FunctionReturn","Type.Is","Type.IsNullable","Type.IsOpenRecord","Type.ListItem","Type.NonNullable","Type.OpenRecord","Type.RecordFields","Type.ReplaceFacets","Type.ReplaceTableKeys","Type.TableColumn","Type.TableKeys","Type.TableRow","Type.TableSchema","Type.Union","Uri.BuildQueryString","Uri.Combine","Uri.EscapeDataString","Uri.Parts","Value.Add","Value.As","Value.Compare","Value.Divide","Value.Equals","Value.Firewall","Value.FromText","Value.Is","Value.Metadata","Value.Multiply","Value.NativeQuery","Value.NullableEquals","Value.RemoveMetadata","Value.ReplaceMetadata","Value.ReplaceType","Value.Subtract","Value.Type","ValueAction.NativeStatement","ValueAction.Replace","Variable.Value","Web.Contents","Web.Page","WebAction.Request","Xml.Document","Xml.Tables"],builtinConstants:["BinaryEncoding.Base64","BinaryEncoding.Hex","BinaryOccurrence.Optional","BinaryOccurrence.Repeating","BinaryOccurrence.Required","ByteOrder.BigEndian","ByteOrder.LittleEndian","Compression.Deflate","Compression.GZip","CsvStyle.QuoteAfterDelimiter","CsvStyle.QuoteAlways","Culture.Current","Day.Friday","Day.Monday","Day.Saturday","Day.Sunday","Day.Thursday","Day.Tuesday","Day.Wednesday","ExtraValues.Error","ExtraValues.Ignore","ExtraValues.List","GroupKind.Global","GroupKind.Local","JoinAlgorithm.Dynamic","JoinAlgorithm.LeftHash","JoinAlgorithm.LeftIndex","JoinAlgorithm.PairwiseHash","JoinAlgorithm.RightHash","JoinAlgorithm.RightIndex","JoinAlgorithm.SortMerge","JoinKind.FullOuter","JoinKind.Inner","JoinKind.LeftAnti","JoinKind.LeftOuter","JoinKind.RightAnti","JoinKind.RightOuter","JoinSide.Left","JoinSide.Right","MissingField.Error","MissingField.Ignore","MissingField.UseNull","Number.E","Number.Epsilon","Number.NaN","Number.NegativeInfinity","Number.PI","Number.PositiveInfinity","Occurrence.All","Occurrence.First","Occurrence.Last","Occurrence.Optional","Occurrence.Repeating","Occurrence.Required","Order.Ascending","Order.Descending","Precision.Decimal","Precision.Double","QuoteStyle.Csv","QuoteStyle.None","RelativePosition.FromEnd","RelativePosition.FromStart","RoundingMode.AwayFromZero","RoundingMode.Down","RoundingMode.ToEven","RoundingMode.TowardZero","RoundingMode.Up","SapHanaDistribution.All","SapHanaDistribution.Connection","SapHanaDistribution.Off","SapHanaDistribution.Statement","SapHanaRangeOperator.Equals","SapHanaRangeOperator.GreaterThan","SapHanaRangeOperator.GreaterThanOrEquals","SapHanaRangeOperator.LessThan","SapHanaRangeOperator.LessThanOrEquals","SapHanaRangeOperator.NotEquals","TextEncoding.Ascii","TextEncoding.BigEndianUnicode","TextEncoding.Unicode","TextEncoding.Utf16","TextEncoding.Utf8","TextEncoding.Windows","TraceLevel.Critical","TraceLevel.Error","TraceLevel.Information","TraceLevel.Verbose","TraceLevel.Warning","WebMethod.Delete","WebMethod.Get","WebMethod.Head","WebMethod.Patch","WebMethod.Post","WebMethod.Put"],builtinTypes:["Action.Type","Any.Type","Binary.Type","BinaryEncoding.Type","BinaryOccurrence.Type","Byte.Type","ByteOrder.Type","Character.Type","Compression.Type","CsvStyle.Type","Currency.Type","Date.Type","DateTime.Type","DateTimeZone.Type","Day.Type","Decimal.Type","Double.Type","Duration.Type","ExtraValues.Type","Function.Type","GroupKind.Type","Guid.Type","Int16.Type","Int32.Type","Int64.Type","Int8.Type","JoinAlgorithm.Type","JoinKind.Type","JoinSide.Type","List.Type","Logical.Type","MissingField.Type","None.Type","Null.Type","Number.Type","Occurrence.Type","Order.Type","Password.Type","Percentage.Type","Precision.Type","QuoteStyle.Type","Record.Type","RelativePosition.Type","RoundingMode.Type","SapHanaDistribution.Type","SapHanaRangeOperator.Type","Single.Type","Table.Type","Text.Type","TextEncoding.Type","Time.Type","TraceLevel.Type","Type.Type","Uri.Type","WebMethod.Type"],tokenizer:{root:[[/#"[\w \.]+"/,"identifier.quote"],[/\d*\.\d+([eE][\-+]?\d+)?/,"number.float"],[/0[xX][0-9a-fA-F]+/,"number.hex"],[/\d+([eE][\-+]?\d+)?/,"number"],[/(#?[a-z]+)\b/,{cases:{"@typeKeywords":"type","@keywords":"keyword","@constants":"constant","@constructors":"constructor","@operatorKeywords":"operators","@default":"identifier"}}],[/\b([A-Z][a-zA-Z0-9]+\.Type)\b/,{cases:{"@builtinTypes":"type","@default":"identifier"}}],[/\b([A-Z][a-zA-Z0-9]+\.[A-Z][a-zA-Z0-9]+)\b/,{cases:{"@builtinFunctions":"keyword.function","@builtinConstants":"constant","@default":"identifier"}}],[/\b([a-zA-Z_][\w\.]*)\b/,"identifier"],{include:"@whitespace"},{include:"@comments"},{include:"@strings"},[/[{}()\[\]]/,"@brackets"],[/([=\+<>\-\*&@\?\/!])|([<>]=)|(<>)|(=>)|(\.\.\.)|(\.\.)/,"operators"],[/[,;]/,"delimiter"]],whitespace:[[/\s+/,"white"]],comments:[["\\/\\*","comment","@comment"],["\\/\\/+.*","comment"]],comment:[["\\*\\/","comment","@pop"],[".","comment"]],strings:[['"',"string","@string"]],string:[['""',"string.escape"],['"',"string","@pop"],[".","string"]]}}}}]); //# sourceMappingURL=4900.19572072.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/4900.19572072.chunk.js
4900.19572072.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[2909],{72909:function(e,t,p){p.r(t),p.d(t,{conf:function(){return n},language:function(){return i}});var n={wordPattern:/(-?\d*\.\d\w*)|([^\`\~\!\@\#\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)/g,comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}",notIn:["string"]},{open:"[",close:"]",notIn:["string"]},{open:"(",close:")",notIn:["string"]},{open:'"',close:'"',notIn:["string"]},{open:"'",close:"'",notIn:["string","comment"]}],folding:{markers:{start:new RegExp("^\\s*(#|//)region\\b"),end:new RegExp("^\\s*(#|//)endregion\\b")}}},i={defaultToken:"",tokenPostfix:"",tokenizer:{root:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.root"}],[/<!DOCTYPE/,"metatag.html","@doctype"],[/<!--/,"comment.html","@comment"],[/(<)(\w+)(\/>)/,["delimiter.html","tag.html","delimiter.html"]],[/(<)(script)/,["delimiter.html",{token:"tag.html",next:"@script"}]],[/(<)(style)/,["delimiter.html",{token:"tag.html",next:"@style"}]],[/(<)([:\w]+)/,["delimiter.html",{token:"tag.html",next:"@otherTag"}]],[/(<\/)(\w+)/,["delimiter.html",{token:"tag.html",next:"@otherTag"}]],[/</,"delimiter.html"],[/[^<]+/]],doctype:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.comment"}],[/[^>]+/,"metatag.content.html"],[/>/,"metatag.html","@pop"]],comment:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.comment"}],[/-->/,"comment.html","@pop"],[/[^-]+/,"comment.content.html"],[/./,"comment.content.html"]],otherTag:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.otherTag"}],[/\/?>/,"delimiter.html","@pop"],[/"([^"]*)"/,"attribute.value"],[/'([^']*)'/,"attribute.value"],[/[\w\-]+/,"attribute.name"],[/=/,"delimiter"],[/[ \t\r\n]+/]],script:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.script"}],[/type/,"attribute.name","@scriptAfterType"],[/"([^"]*)"/,"attribute.value"],[/'([^']*)'/,"attribute.value"],[/[\w\-]+/,"attribute.name"],[/=/,"delimiter"],[/>/,{token:"delimiter.html",next:"@scriptEmbedded.text/javascript",nextEmbedded:"text/javascript"}],[/[ \t\r\n]+/],[/(<\/)(script\s*)(>)/,["delimiter.html","tag.html",{token:"delimiter.html",next:"@pop"}]]],scriptAfterType:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.scriptAfterType"}],[/=/,"delimiter","@scriptAfterTypeEquals"],[/>/,{token:"delimiter.html",next:"@scriptEmbedded.text/javascript",nextEmbedded:"text/javascript"}],[/[ \t\r\n]+/],[/<\/script\s*>/,{token:"@rematch",next:"@pop"}]],scriptAfterTypeEquals:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.scriptAfterTypeEquals"}],[/"([^"]*)"/,{token:"attribute.value",switchTo:"@scriptWithCustomType.$1"}],[/'([^']*)'/,{token:"attribute.value",switchTo:"@scriptWithCustomType.$1"}],[/>/,{token:"delimiter.html",next:"@scriptEmbedded.text/javascript",nextEmbedded:"text/javascript"}],[/[ \t\r\n]+/],[/<\/script\s*>/,{token:"@rematch",next:"@pop"}]],scriptWithCustomType:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.scriptWithCustomType.$S2"}],[/>/,{token:"delimiter.html",next:"@scriptEmbedded.$S2",nextEmbedded:"$S2"}],[/"([^"]*)"/,"attribute.value"],[/'([^']*)'/,"attribute.value"],[/[\w\-]+/,"attribute.name"],[/=/,"delimiter"],[/[ \t\r\n]+/],[/<\/script\s*>/,{token:"@rematch",next:"@pop"}]],scriptEmbedded:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInEmbeddedState.scriptEmbedded.$S2",nextEmbedded:"@pop"}],[/<\/script/,{token:"@rematch",next:"@pop",nextEmbedded:"@pop"}]],style:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.style"}],[/type/,"attribute.name","@styleAfterType"],[/"([^"]*)"/,"attribute.value"],[/'([^']*)'/,"attribute.value"],[/[\w\-]+/,"attribute.name"],[/=/,"delimiter"],[/>/,{token:"delimiter.html",next:"@styleEmbedded.text/css",nextEmbedded:"text/css"}],[/[ \t\r\n]+/],[/(<\/)(style\s*)(>)/,["delimiter.html","tag.html",{token:"delimiter.html",next:"@pop"}]]],styleAfterType:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.styleAfterType"}],[/=/,"delimiter","@styleAfterTypeEquals"],[/>/,{token:"delimiter.html",next:"@styleEmbedded.text/css",nextEmbedded:"text/css"}],[/[ \t\r\n]+/],[/<\/style\s*>/,{token:"@rematch",next:"@pop"}]],styleAfterTypeEquals:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.styleAfterTypeEquals"}],[/"([^"]*)"/,{token:"attribute.value",switchTo:"@styleWithCustomType.$1"}],[/'([^']*)'/,{token:"attribute.value",switchTo:"@styleWithCustomType.$1"}],[/>/,{token:"delimiter.html",next:"@styleEmbedded.text/css",nextEmbedded:"text/css"}],[/[ \t\r\n]+/],[/<\/style\s*>/,{token:"@rematch",next:"@pop"}]],styleWithCustomType:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInSimpleState.styleWithCustomType.$S2"}],[/>/,{token:"delimiter.html",next:"@styleEmbedded.$S2",nextEmbedded:"$S2"}],[/"([^"]*)"/,"attribute.value"],[/'([^']*)'/,"attribute.value"],[/[\w\-]+/,"attribute.name"],[/=/,"delimiter"],[/[ \t\r\n]+/],[/<\/style\s*>/,{token:"@rematch",next:"@pop"}]],styleEmbedded:[[/<\?((php)|=)?/,{token:"@rematch",switchTo:"@phpInEmbeddedState.styleEmbedded.$S2",nextEmbedded:"@pop"}],[/<\/style/,{token:"@rematch",next:"@pop",nextEmbedded:"@pop"}]],phpInSimpleState:[[/<\?((php)|=)?/,"metatag.php"],[/\?>/,{token:"metatag.php",switchTo:"@$S2.$S3"}],{include:"phpRoot"}],phpInEmbeddedState:[[/<\?((php)|=)?/,"metatag.php"],[/\?>/,{token:"metatag.php",switchTo:"@$S2.$S3",nextEmbedded:"$S3"}],{include:"phpRoot"}],phpRoot:[[/[a-zA-Z_]\w*/,{cases:{"@phpKeywords":{token:"keyword.php"},"@phpCompileTimeConstants":{token:"constant.php"},"@default":"identifier.php"}}],[/[$a-zA-Z_]\w*/,{cases:{"@phpPreDefinedVariables":{token:"variable.predefined.php"},"@default":"variable.php"}}],[/[{}]/,"delimiter.bracket.php"],[/[\[\]]/,"delimiter.array.php"],[/[()]/,"delimiter.parenthesis.php"],[/[ \t\r\n]+/],[/(#|\/\/)$/,"comment.php"],[/(#|\/\/)/,"comment.php","@phpLineComment"],[/\/\*/,"comment.php","@phpComment"],[/"/,"string.php","@phpDoubleQuoteString"],[/'/,"string.php","@phpSingleQuoteString"],[/[\+\-\*\%\&\|\^\~\!\=\<\>\/\?\;\:\.\,\@]/,"delimiter.php"],[/\d*\d+[eE]([\-+]?\d+)?/,"number.float.php"],[/\d*\.\d+([eE][\-+]?\d+)?/,"number.float.php"],[/0[xX][0-9a-fA-F']*[0-9a-fA-F]/,"number.hex.php"],[/0[0-7']*[0-7]/,"number.octal.php"],[/0[bB][0-1']*[0-1]/,"number.binary.php"],[/\d[\d']*/,"number.php"],[/\d/,"number.php"]],phpComment:[[/\*\//,"comment.php","@pop"],[/[^*]+/,"comment.php"],[/./,"comment.php"]],phpLineComment:[[/\?>/,{token:"@rematch",next:"@pop"}],[/.$/,"comment.php","@pop"],[/[^?]+$/,"comment.php","@pop"],[/[^?]+/,"comment.php"],[/./,"comment.php"]],phpDoubleQuoteString:[[/[^\\"]+/,"string.php"],[/@escapes/,"string.escape.php"],[/\\./,"string.escape.invalid.php"],[/"/,"string.php","@pop"]],phpSingleQuoteString:[[/[^\\']+/,"string.php"],[/@escapes/,"string.escape.php"],[/\\./,"string.escape.invalid.php"],[/'/,"string.php","@pop"]]},phpKeywords:["abstract","and","array","as","break","callable","case","catch","cfunction","class","clone","const","continue","declare","default","do","else","elseif","enddeclare","endfor","endforeach","endif","endswitch","endwhile","extends","false","final","for","foreach","function","global","goto","if","implements","interface","instanceof","insteadof","namespace","new","null","object","old_function","or","private","protected","public","resource","static","switch","throw","trait","try","true","use","var","while","xor","die","echo","empty","exit","eval","include","include_once","isset","list","require","require_once","return","print","unset","yield","__construct"],phpCompileTimeConstants:["__CLASS__","__DIR__","__FILE__","__LINE__","__NAMESPACE__","__METHOD__","__FUNCTION__","__TRAIT__"],phpPreDefinedVariables:["$GLOBALS","$_SERVER","$_GET","$_POST","$_FILES","$_REQUEST","$_SESSION","$_ENV","$_COOKIE","$php_errormsg","$HTTP_RAW_POST_DATA","$http_response_header","$argc","$argv"],escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/}}}]); //# sourceMappingURL=2909.28b39275.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/2909.28b39275.chunk.js
2909.28b39275.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[5558,1324],{25558:function(e,t,n){n.r(t),n.d(t,{conf:function(){return o},language:function(){return i}});var r=n(1324),o=r.conf,i={defaultToken:"invalid",tokenPostfix:".js",keywords:["break","case","catch","class","continue","const","constructor","debugger","default","delete","do","else","export","extends","false","finally","for","from","function","get","if","import","in","instanceof","let","new","null","return","set","super","switch","symbol","this","throw","true","try","typeof","undefined","var","void","while","with","yield","async","await","of"],typeKeywords:[],operators:r.language.operators,symbols:r.language.symbols,escapes:r.language.escapes,digits:r.language.digits,octaldigits:r.language.octaldigits,binarydigits:r.language.binarydigits,hexdigits:r.language.hexdigits,regexpctl:r.language.regexpctl,regexpesc:r.language.regexpesc,tokenizer:r.language.tokenizer}},1324:function(e,t,n){n.r(t),n.d(t,{conf:function(){return u},language:function(){return m}});var r,o,i=n(37762),s=n(94389),a=Object.defineProperty,c=Object.getOwnPropertyDescriptor,g=Object.getOwnPropertyNames,l=Object.prototype.hasOwnProperty,p=function(e,t,n,r){if(t&&"object"===typeof t||"function"===typeof t){var o,s=(0,i.Z)(g(t));try{var p=function(){var i=o.value;l.call(e,i)||i===n||a(e,i,{get:function(){return t[i]},enumerable:!(r=c(t,i))||r.enumerable})};for(s.s();!(o=s.n()).done;)p()}catch(d){s.e(d)}finally{s.f()}}return e},d={};p(d,r=s,"default"),o&&p(o,r,"default");var u={wordPattern:/(-?\d*\.\d\w*)|([^\`\~\!\@\#\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)/g,comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"]],onEnterRules:[{beforeText:/^\s*\/\*\*(?!\/)([^\*]|\*(?!\/))*$/,afterText:/^\s*\*\/$/,action:{indentAction:d.languages.IndentAction.IndentOutdent,appendText:" * "}},{beforeText:/^\s*\/\*\*(?!\/)([^\*]|\*(?!\/))*$/,action:{indentAction:d.languages.IndentAction.None,appendText:" * "}},{beforeText:/^(\t|(\ \ ))*\ \*(\ ([^\*]|\*(?!\/))*)?$/,action:{indentAction:d.languages.IndentAction.None,appendText:"* "}},{beforeText:/^(\t|(\ \ ))*\ \*\/\s*$/,action:{indentAction:d.languages.IndentAction.None,removeText:1}}],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"',notIn:["string"]},{open:"'",close:"'",notIn:["string","comment"]},{open:"`",close:"`",notIn:["string","comment"]},{open:"/**",close:" */",notIn:["string"]}],folding:{markers:{start:new RegExp("^\\s*//\\s*#?region\\b"),end:new RegExp("^\\s*//\\s*#?endregion\\b")}}},m={defaultToken:"invalid",tokenPostfix:".ts",keywords:["abstract","any","as","asserts","bigint","boolean","break","case","catch","class","continue","const","constructor","debugger","declare","default","delete","do","else","enum","export","extends","false","finally","for","from","function","get","if","implements","import","in","infer","instanceof","interface","is","keyof","let","module","namespace","never","new","null","number","object","out","package","private","protected","public","override","readonly","require","global","return","set","static","string","super","switch","symbol","this","throw","true","try","type","typeof","undefined","unique","unknown","var","void","while","with","yield","async","await","of"],operators:["<=",">=","==","!=","===","!==","=>","+","-","**","*","/","%","++","--","<<","</",">>",">>>","&","|","^","!","~","&&","||","??","?",":","=","+=","-=","*=","**=","/=","%=","<<=",">>=",">>>=","&=","|=","^=","@"],symbols:/[=><!~?:&|+\-*\/\^%]+/,escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,digits:/\d+(_+\d+)*/,octaldigits:/[0-7]+(_+[0-7]+)*/,binarydigits:/[0-1]+(_+[0-1]+)*/,hexdigits:/[[0-9a-fA-F]+(_+[0-9a-fA-F]+)*/,regexpctl:/[(){}\[\]\$\^|\-*+?\.]/,regexpesc:/\\(?:[bBdDfnrstvwWn0\\\/]|@regexpctl|c[A-Z]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4})/,tokenizer:{root:[[/[{}]/,"delimiter.bracket"],{include:"common"}],common:[[/[a-z_$][\w$]*/,{cases:{"@keywords":"keyword","@default":"identifier"}}],[/[A-Z][\w\$]*/,"type.identifier"],{include:"@whitespace"},[/\/(?=([^\\\/]|\\.)+\/([dgimsuy]*)(\s*)(\.|;|,|\)|\]|\}|$))/,{token:"regexp",bracket:"@open",next:"@regexp"}],[/[()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/!(?=([^=]|$))/,"delimiter"],[/@symbols/,{cases:{"@operators":"delimiter","@default":""}}],[/(@digits)[eE]([\-+]?(@digits))?/,"number.float"],[/(@digits)\.(@digits)([eE][\-+]?(@digits))?/,"number.float"],[/0[xX](@hexdigits)n?/,"number.hex"],[/0[oO]?(@octaldigits)n?/,"number.octal"],[/0[bB](@binarydigits)n?/,"number.binary"],[/(@digits)n?/,"number"],[/[;,.]/,"delimiter"],[/"([^"\\]|\\.)*$/,"string.invalid"],[/'([^'\\]|\\.)*$/,"string.invalid"],[/"/,"string","@string_double"],[/'/,"string","@string_single"],[/`/,"string","@string_backtick"]],whitespace:[[/[ \t\r\n]+/,""],[/\/\*\*(?!\/)/,"comment.doc","@jsdoc"],[/\/\*/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^\/*]+/,"comment"],[/\*\//,"comment","@pop"],[/[\/*]/,"comment"]],jsdoc:[[/[^\/*]+/,"comment.doc"],[/\*\//,"comment.doc","@pop"],[/[\/*]/,"comment.doc"]],regexp:[[/(\{)(\d+(?:,\d*)?)(\})/,["regexp.escape.control","regexp.escape.control","regexp.escape.control"]],[/(\[)(\^?)(?=(?:[^\]\\\/]|\\.)+)/,["regexp.escape.control",{token:"regexp.escape.control",next:"@regexrange"}]],[/(\()(\?:|\?=|\?!)/,["regexp.escape.control","regexp.escape.control"]],[/[()]/,"regexp.escape.control"],[/@regexpctl/,"regexp.escape.control"],[/[^\\\/]/,"regexp"],[/@regexpesc/,"regexp.escape"],[/\\\./,"regexp.invalid"],[/(\/)([dgimsuy]*)/,[{token:"regexp",bracket:"@close",next:"@pop"},"keyword.other"]]],regexrange:[[/-/,"regexp.escape.control"],[/\^/,"regexp.invalid"],[/@regexpesc/,"regexp.escape"],[/[^\]]/,"regexp"],[/\]/,{token:"regexp.escape.control",next:"@pop",bracket:"@close"}]],string_double:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"/,"string","@pop"]],string_single:[[/[^\\']+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/'/,"string","@pop"]],string_backtick:[[/\$\{/,{token:"delimiter.bracket",next:"@bracketCounting"}],[/[^\\`$]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/`/,"string","@pop"]],bracketCounting:[[/\{/,"delimiter.bracket","@bracketCounting"],[/\}/,"delimiter.bracket","@pop"],{include:"common"}]}}}}]); //# sourceMappingURL=5558.d0dc5575.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/5558.d0dc5575.chunk.js
5558.d0dc5575.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[4969],{84969:function(e,n,i){i.r(n),i.d(n,{conf:function(){return t},language:function(){return r}});var t={comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"],["begin","end"],["case","endcase"],["casex","endcase"],["casez","endcase"],["checker","endchecker"],["class","endclass"],["clocking","endclocking"],["config","endconfig"],["function","endfunction"],["generate","endgenerate"],["group","endgroup"],["interface","endinterface"],["module","endmodule"],["package","endpackage"],["primitive","endprimitive"],["program","endprogram"],["property","endproperty"],["specify","endspecify"],["sequence","endsequence"],["table","endtable"],["task","endtask"]],autoClosingPairs:[{open:"[",close:"]"},{open:"{",close:"}"},{open:"(",close:")"},{open:"'",close:"'",notIn:["string","comment"]},{open:'"',close:'"',notIn:["string"]}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],folding:{offSide:!1,markers:{start:new RegExp("^(?:\\s*|.*(?!\\/[\\/\\*])[^\\w])(?:begin|case(x|z)?|class|clocking|config|covergroup|function|generate|interface|module|package|primitive|property|program|sequence|specify|table|task)\\b"),end:new RegExp("^(?:\\s*|.*(?!\\/[\\/\\*])[^\\w])(?:end|endcase|endclass|endclocking|endconfig|endgroup|endfunction|endgenerate|endinterface|endmodule|endpackage|endprimitive|endproperty|endprogram|endsequence|endspecify|endtable|endtask)\\b")}}},r={defaultToken:"",tokenPostfix:".sv",brackets:[{token:"delimiter.curly",open:"{",close:"}"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"delimiter.square",open:"[",close:"]"},{token:"delimiter.angle",open:"<",close:">"}],keywords:["accept_on","alias","always","always_comb","always_ff","always_latch","and","assert","assign","assume","automatic","before","begin","bind","bins","binsof","bit","break","buf","bufif0","bufif1","byte","case","casex","casez","cell","chandle","checker","class","clocking","cmos","config","const","constraint","context","continue","cover","covergroup","coverpoint","cross","deassign","default","defparam","design","disable","dist","do","edge","else","end","endcase","endchecker","endclass","endclocking","endconfig","endfunction","endgenerate","endgroup","endinterface","endmodule","endpackage","endprimitive","endprogram","endproperty","endspecify","endsequence","endtable","endtask","enum","event","eventually","expect","export","extends","extern","final","first_match","for","force","foreach","forever","fork","forkjoin","function","generate","genvar","global","highz0","highz1","if","iff","ifnone","ignore_bins","illegal_bins","implements","implies","import","incdir","include","initial","inout","input","inside","instance","int","integer","interconnect","interface","intersect","join","join_any","join_none","large","let","liblist","library","local","localparam","logic","longint","macromodule","matches","medium","modport","module","nand","negedge","nettype","new","nexttime","nmos","nor","noshowcancelled","not","notif0","notif1","null","or","output","package","packed","parameter","pmos","posedge","primitive","priority","program","property","protected","pull0","pull1","pulldown","pullup","pulsestyle_ondetect","pulsestyle_onevent","pure","rand","randc","randcase","randsequence","rcmos","real","realtime","ref","reg","reject_on","release","repeat","restrict","return","rnmos","rpmos","rtran","rtranif0","rtranif1","s_always","s_eventually","s_nexttime","s_until","s_until_with","scalared","sequence","shortint","shortreal","showcancelled","signed","small","soft","solve","specify","specparam","static","string","strong","strong0","strong1","struct","super","supply0","supply1","sync_accept_on","sync_reject_on","table","tagged","task","this","throughout","time","timeprecision","timeunit","tran","tranif0","tranif1","tri","tri0","tri1","triand","trior","trireg","type","typedef","union","unique","unique0","unsigned","until","until_with","untyped","use","uwire","var","vectored","virtual","void","wait","wait_order","wand","weak","weak0","weak1","while","wildcard","wire","with","within","wor","xnor","xor"],builtin_gates:["and","nand","nor","or","xor","xnor","buf","not","bufif0","bufif1","notif1","notif0","cmos","nmos","pmos","rcmos","rnmos","rpmos","tran","tranif1","tranif0","rtran","rtranif1","rtranif0"],operators:["=","+=","-=","*=","/=","%=","&=","|=","^=","<<=",">>+","<<<=",">>>=","?",":","+","-","!","~","&","~&","|","~|","^","~^","^~","+","-","*","/","%","==","!=","===","!==","==?","!=?","&&","||","**","<","<=",">",">=","&","|","^",">>","<<",">>>","<<<","++","--","->","<->","inside","dist","::","+:","-:","*>","&&&","|->","|=>","#=#"],symbols:/[=><!~?:&|+\-*\/\^%#]+/,escapes:/%%|\\(?:[antvf\\"']|x[0-9A-Fa-f]{1,2}|[0-7]{1,3})/,identifier:/(?:[a-zA-Z_][a-zA-Z0-9_$\.]*|\\\S+ )/,systemcall:/[$][a-zA-Z0-9_]+/,timeunits:/s|ms|us|ns|ps|fs/,tokenizer:{root:[[/^(\s*)(@identifier)/,["",{cases:{"@builtin_gates":{token:"keyword.$2",next:"@module_instance"},table:{token:"keyword.$2",next:"@table"},"@keywords":{token:"keyword.$2"},"@default":{token:"identifier",next:"@module_instance"}}}]],[/^\s*`include/,{token:"keyword.directive.include",next:"@include"}],[/^\s*`\s*\w+/,"keyword"],{include:"@identifier_or_keyword"},{include:"@whitespace"},[/\(\*.*\*\)/,"annotation"],[/@systemcall/,"variable.predefined"],[/[{}()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/@symbols/,{cases:{"@operators":"delimiter","@default":""}}],{include:"@numbers"},[/[;,.]/,"delimiter"],{include:"@strings"}],identifier_or_keyword:[[/@identifier/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"identifier"}}]],numbers:[[/\d+?[\d_]*(?:\.[\d_]+)?[eE][\-+]?\d+/,"number.float"],[/\d+?[\d_]*\.[\d_]+(?:\s*@timeunits)?/,"number.float"],[/(?:\d+?[\d_]*\s*)?'[sS]?[dD]\s*[0-9xXzZ?]+?[0-9xXzZ?_]*/,"number"],[/(?:\d+?[\d_]*\s*)?'[sS]?[bB]\s*[0-1xXzZ?]+?[0-1xXzZ?_]*/,"number.binary"],[/(?:\d+?[\d_]*\s*)?'[sS]?[oO]\s*[0-7xXzZ?]+?[0-7xXzZ?_]*/,"number.octal"],[/(?:\d+?[\d_]*\s*)?'[sS]?[hH]\s*[0-9a-fA-FxXzZ?]+?[0-9a-fA-FxXzZ?_]*/,"number.hex"],[/1step/,"number"],[/[\dxXzZ]+?[\dxXzZ_]*(?:\s*@timeunits)?/,"number"],[/'[01xXzZ]+/,"number"]],module_instance:[{include:"@whitespace"},[/(#?)(\()/,["",{token:"@brackets",next:"@port_connection"}]],[/@identifier\s*[;={}\[\],]/,{token:"@rematch",next:"@pop"}],[/@symbols|[;={}\[\],]/,{token:"@rematch",next:"@pop"}],[/@identifier/,"type"],[/;/,"delimiter","@pop"]],port_connection:[{include:"@identifier_or_keyword"},{include:"@whitespace"},[/@systemcall/,"variable.predefined"],{include:"@numbers"},{include:"@strings"},[/[,]/,"delimiter"],[/\(/,"@brackets","@port_connection"],[/\)/,"@brackets","@pop"]],whitespace:[[/[ \t\r\n]+/,""],[/\/\*/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^\/*]+/,"comment"],[/\*\//,"comment","@pop"],[/[\/*]/,"comment"]],strings:[[/"([^"\\]|\\.)*$/,"string.invalid"],[/"/,"string","@string"]],string:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"/,"string","@pop"]],include:[[/(\s*)(")([\w*\/*]*)(.\w*)(")/,["","string.include.identifier","string.include.identifier","string.include.identifier",{token:"string.include.identifier",next:"@pop"}]],[/(\s*)(<)([\w*\/*]*)(.\w*)(>)/,["","string.include.identifier","string.include.identifier","string.include.identifier",{token:"string.include.identifier",next:"@pop"}]]],table:[{include:"@whitespace"},[/[()]/,"@brackets"],[/[:;]/,"delimiter"],[/[01\-*?xXbBrRfFpPnN]/,"variable.predefined"],["endtable","keyword.endtable","@pop"]]}}}}]); //# sourceMappingURL=4969.ccedbf9b.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/4969.ccedbf9b.chunk.js
4969.ccedbf9b.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[3668],{63668:function(e,t,n){n.r(t),n.d(t,{conf:function(){return r},language:function(){return s}});var r={comments:{lineComment:"#",blockComment:["=begin","=end"]},brackets:[["(",")"],["{","}"],["[","]"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],indentationRules:{increaseIndentPattern:new RegExp("^\\s*((begin|class|(private|protected)\\s+def|def|else|elsif|ensure|for|if|module|rescue|unless|until|when|while|case)|([^#]*\\sdo\\b)|([^#]*=\\s*(case|if|unless)))\\b([^#\\{;]|(\"|'|/).*\\4)*(#.*)?$"),decreaseIndentPattern:new RegExp("^\\s*([}\\]]([,)]?\\s*(#|$)|\\.[a-zA-Z_]\\w*\\b)|(end|rescue|ensure|else|elsif|when)\\b)")}},s={tokenPostfix:".ruby",keywords:["__LINE__","__ENCODING__","__FILE__","BEGIN","END","alias","and","begin","break","case","class","def","defined?","do","else","elsif","end","ensure","for","false","if","in","module","next","nil","not","or","redo","rescue","retry","return","self","super","then","true","undef","unless","until","when","while","yield"],keywordops:["::","..","...","?",":","=>"],builtins:["require","public","private","include","extend","attr_reader","protected","private_class_method","protected_class_method","new"],declarations:["module","class","def","case","do","begin","for","if","while","until","unless"],linedecls:["def","case","do","begin","for","if","while","until","unless"],operators:["^","&","|","<=>","==","===","!~","=~",">",">=","<","<=","<<",">>","+","-","*","/","%","**","~","+@","-@","[]","[]=","`","+=","-=","*=","**=","/=","^=","%=","<<=",">>=","&=","&&=","||=","|="],brackets:[{open:"(",close:")",token:"delimiter.parenthesis"},{open:"{",close:"}",token:"delimiter.curly"},{open:"[",close:"]",token:"delimiter.square"}],symbols:/[=><!~?:&|+\-*\/\^%\.]+/,escape:/(?:[abefnrstv\\"'\n\r]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2}|u[0-9A-Fa-f]{4})/,escapes:/\\(?:C\-(@escape|.)|c(@escape|.)|@escape)/,decpart:/\d(_?\d)*/,decimal:/0|@decpart/,delim:/[^a-zA-Z0-9\s\n\r]/,heredelim:/(?:\w+|'[^']*'|"[^"]*"|`[^`]*`)/,regexpctl:/[(){}\[\]\$\^|\-*+?\.]/,regexpesc:/\\(?:[AzZbBdDfnrstvwWn0\\\/]|@regexpctl|c[A-Z]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4})?/,tokenizer:{root:[[/^(\s*)([a-z_]\w*[!?=]?)/,["white",{cases:{"for|until|while":{token:"keyword.$2",next:"@dodecl.$2"},"@declarations":{token:"keyword.$2",next:"@root.$2"},end:{token:"keyword.$S2",next:"@pop"},"@keywords":"keyword","@builtins":"predefined","@default":"identifier"}}]],[/[a-z_]\w*[!?=]?/,{cases:{"if|unless|while|until":{token:"keyword.$0x",next:"@modifier.$0x"},for:{token:"keyword.$2",next:"@dodecl.$2"},"@linedecls":{token:"keyword.$0",next:"@root.$0"},end:{token:"keyword.$S2",next:"@pop"},"@keywords":"keyword","@builtins":"predefined","@default":"identifier"}}],[/[A-Z][\w]*[!?=]?/,"constructor.identifier"],[/\$[\w]*/,"global.constant"],[/@[\w]*/,"namespace.instance.identifier"],[/@@@[\w]*/,"namespace.class.identifier"],[/<<[-~](@heredelim).*/,{token:"string.heredoc.delimiter",next:"@heredoc.$1"}],[/[ \t\r\n]+<<(@heredelim).*/,{token:"string.heredoc.delimiter",next:"@heredoc.$1"}],[/^<<(@heredelim).*/,{token:"string.heredoc.delimiter",next:"@heredoc.$1"}],{include:"@whitespace"},[/"/,{token:"string.d.delim",next:'@dstring.d."'}],[/'/,{token:"string.sq.delim",next:"@sstring.sq"}],[/%([rsqxwW]|Q?)/,{token:"@rematch",next:"pstring"}],[/`/,{token:"string.x.delim",next:"@dstring.x.`"}],[/:(\w|[$@])\w*[!?=]?/,"string.s"],[/:"/,{token:"string.s.delim",next:'@dstring.s."'}],[/:'/,{token:"string.s.delim",next:"@sstring.s"}],[/\/(?=(\\\/|[^\/\n])+\/)/,{token:"regexp.delim",next:"@regexp"}],[/[{}()\[\]]/,"@brackets"],[/@symbols/,{cases:{"@keywordops":"keyword","@operators":"operator","@default":""}}],[/[;,]/,"delimiter"],[/0[xX][0-9a-fA-F](_?[0-9a-fA-F])*/,"number.hex"],[/0[_oO][0-7](_?[0-7])*/,"number.octal"],[/0[bB][01](_?[01])*/,"number.binary"],[/0[dD]@decpart/,"number"],[/@decimal((\.@decpart)?([eE][\-+]?@decpart)?)/,{cases:{$1:"number.float","@default":"number"}}]],dodecl:[[/^/,{token:"",switchTo:"@root.$S2"}],[/[a-z_]\w*[!?=]?/,{cases:{end:{token:"keyword.$S2",next:"@pop"},do:{token:"keyword",switchTo:"@root.$S2"},"@linedecls":{token:"@rematch",switchTo:"@root.$S2"},"@keywords":"keyword","@builtins":"predefined","@default":"identifier"}}],{include:"@root"}],modifier:[[/^/,"","@pop"],[/[a-z_]\w*[!?=]?/,{cases:{end:{token:"keyword.$S2",next:"@pop"},"then|else|elsif|do":{token:"keyword",switchTo:"@root.$S2"},"@linedecls":{token:"@rematch",switchTo:"@root.$S2"},"@keywords":"keyword","@builtins":"predefined","@default":"identifier"}}],{include:"@root"}],sstring:[[/[^\\']+/,"string.$S2"],[/\\\\|\\'|\\$/,"string.$S2.escape"],[/\\./,"string.$S2.invalid"],[/'/,{token:"string.$S2.delim",next:"@pop"}]],dstring:[[/[^\\`"#]+/,"string.$S2"],[/#/,"string.$S2.escape","@interpolated"],[/\\$/,"string.$S2.escape"],[/@escapes/,"string.$S2.escape"],[/\\./,"string.$S2.escape.invalid"],[/[`"]/,{cases:{"$#==$S3":{token:"string.$S2.delim",next:"@pop"},"@default":"string.$S2"}}]],heredoc:[[/^(\s*)(@heredelim)$/,{cases:{"$2==$S2":["string.heredoc",{token:"string.heredoc.delimiter",next:"@pop"}],"@default":["string.heredoc","string.heredoc"]}}],[/.*/,"string.heredoc"]],interpolated:[[/\$\w*/,"global.constant","@pop"],[/@\w*/,"namespace.class.identifier","@pop"],[/@@@\w*/,"namespace.instance.identifier","@pop"],[/[{]/,{token:"string.escape.curly",switchTo:"@interpolated_compound"}],["","","@pop"]],interpolated_compound:[[/[}]/,{token:"string.escape.curly",next:"@pop"}],{include:"@root"}],pregexp:[{include:"@whitespace"},[/[^\(\{\[\\]/,{cases:{"$#==$S3":{token:"regexp.delim",next:"@pop"},"$#==$S2":{token:"regexp.delim",next:"@push"},"~[)}\\]]":"@brackets.regexp.escape.control","~@regexpctl":"regexp.escape.control","@default":"regexp"}}],{include:"@regexcontrol"}],regexp:[{include:"@regexcontrol"},[/[^\\\/]/,"regexp"],["/[ixmp]*",{token:"regexp.delim"},"@pop"]],regexcontrol:[[/(\{)(\d+(?:,\d*)?)(\})/,["@brackets.regexp.escape.control","regexp.escape.control","@brackets.regexp.escape.control"]],[/(\[)(\^?)/,["@brackets.regexp.escape.control",{token:"regexp.escape.control",next:"@regexrange"}]],[/(\()(\?[:=!])/,["@brackets.regexp.escape.control","regexp.escape.control"]],[/\(\?#/,{token:"regexp.escape.control",next:"@regexpcomment"}],[/[()]/,"@brackets.regexp.escape.control"],[/@regexpctl/,"regexp.escape.control"],[/\\$/,"regexp.escape"],[/@regexpesc/,"regexp.escape"],[/\\\./,"regexp.invalid"],[/#/,"regexp.escape","@interpolated"]],regexrange:[[/-/,"regexp.escape.control"],[/\^/,"regexp.invalid"],[/\\$/,"regexp.escape"],[/@regexpesc/,"regexp.escape"],[/[^\]]/,"regexp"],[/\]/,"@brackets.regexp.escape.control","@pop"]],regexpcomment:[[/[^)]+/,"comment"],[/\)/,{token:"regexp.escape.control",next:"@pop"}]],pstring:[[/%([qws])\(/,{token:"string.$1.delim",switchTo:"@qstring.$1.(.)"}],[/%([qws])\[/,{token:"string.$1.delim",switchTo:"@qstring.$1.[.]"}],[/%([qws])\{/,{token:"string.$1.delim",switchTo:"@qstring.$1.{.}"}],[/%([qws])</,{token:"string.$1.delim",switchTo:"@qstring.$1.<.>"}],[/%([qws])(@delim)/,{token:"string.$1.delim",switchTo:"@qstring.$1.$2.$2"}],[/%r\(/,{token:"regexp.delim",switchTo:"@pregexp.(.)"}],[/%r\[/,{token:"regexp.delim",switchTo:"@pregexp.[.]"}],[/%r\{/,{token:"regexp.delim",switchTo:"@pregexp.{.}"}],[/%r</,{token:"regexp.delim",switchTo:"@pregexp.<.>"}],[/%r(@delim)/,{token:"regexp.delim",switchTo:"@pregexp.$1.$1"}],[/%(x|W|Q?)\(/,{token:"string.$1.delim",switchTo:"@qqstring.$1.(.)"}],[/%(x|W|Q?)\[/,{token:"string.$1.delim",switchTo:"@qqstring.$1.[.]"}],[/%(x|W|Q?)\{/,{token:"string.$1.delim",switchTo:"@qqstring.$1.{.}"}],[/%(x|W|Q?)</,{token:"string.$1.delim",switchTo:"@qqstring.$1.<.>"}],[/%(x|W|Q?)(@delim)/,{token:"string.$1.delim",switchTo:"@qqstring.$1.$2.$2"}],[/%([rqwsxW]|Q?)./,{token:"invalid",next:"@pop"}],[/./,{token:"invalid",next:"@pop"}]],qstring:[[/\\$/,"string.$S2.escape"],[/\\./,"string.$S2.escape"],[/./,{cases:{"$#==$S4":{token:"string.$S2.delim",next:"@pop"},"$#==$S3":{token:"string.$S2.delim",next:"@push"},"@default":"string.$S2"}}]],qqstring:[[/#/,"string.$S2.escape","@interpolated"],{include:"@qstring"}],whitespace:[[/[ \t\r\n]+/,""],[/^\s*=begin\b/,"comment","@comment"],[/#.*$/,"comment"]],comment:[[/[^=]+/,"comment"],[/^\s*=begin\b/,"comment.invalid"],[/^\s*=end\b.*/,"comment","@pop"],[/[=]/,"comment"]]}}}}]); //# sourceMappingURL=3668.cd0becc2.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/3668.cd0becc2.chunk.js
3668.cd0becc2.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[1069],{91069:function(e,t,n){n.r(t),n.d(t,{conf:function(){return o},language:function(){return s}});var o={comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"],["<",">"]],autoClosingPairs:[{open:'"',close:'"',notIn:["string","comment"]},{open:"{",close:"}",notIn:["string","comment"]},{open:"[",close:"]",notIn:["string","comment"]},{open:"(",close:")",notIn:["string","comment"]}]},s={defaultToken:"",tokenPostfix:".aes",brackets:[{token:"delimiter.curly",open:"{",close:"}"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"delimiter.square",open:"[",close:"]"},{token:"delimiter.angle",open:"<",close:">"}],keywords:["contract","library","entrypoint","function","stateful","state","hash","signature","tuple","list","address","string","bool","int","record","datatype","type","option","oracle","oracle_query","Call","Bits","Bytes","Oracle","String","Crypto","Address","Auth","Chain","None","Some","bits","bytes","event","let","map","private","public","true","false","var","if","else","throw"],operators:["=",">","<","!","~","?","::",":","==","<=",">=","!=","&&","||","++","--","+","-","*","/","&","|","^","%","<<",">>",">>>","+=","-=","*=","/=","&=","|=","^=","%=","<<=",">>=",">>>="],symbols:/[=><!~?:&|+\-*\/\^%]+/,escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,integersuffix:/(ll|LL|u|U|l|L)?(ll|LL|u|U|l|L)?/,floatsuffix:/[fFlL]?/,tokenizer:{root:[[/[a-zA-Z_]\w*/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"identifier"}}],{include:"@whitespace"},[/\[\[.*\]\]/,"annotation"],[/^\s*#\w+/,"keyword"],[/int\d*/,"keyword"],[/[{}()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/@symbols/,{cases:{"@operators":"delimiter","@default":""}}],[/\d*\d+[eE]([\-+]?\d+)?(@floatsuffix)/,"number.float"],[/\d*\.\d+([eE][\-+]?\d+)?(@floatsuffix)/,"number.float"],[/0[xX][0-9a-fA-F']*[0-9a-fA-F](@integersuffix)/,"number.hex"],[/0[0-7']*[0-7](@integersuffix)/,"number.octal"],[/0[bB][0-1']*[0-1](@integersuffix)/,"number.binary"],[/\d[\d']*\d(@integersuffix)/,"number"],[/\d(@integersuffix)/,"number"],[/[;,.]/,"delimiter"],[/"([^"\\]|\\.)*$/,"string.invalid"],[/"/,"string","@string"],[/'[^\\']'/,"string"],[/(')(@escapes)(')/,["string","string.escape","string"]],[/'/,"string.invalid"]],whitespace:[[/[ \t\r\n]+/,""],[/\/\*\*(?!\/)/,"comment.doc","@doccomment"],[/\/\*/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^\/*]+/,"comment"],[/\*\//,"comment","@pop"],[/[\/*]/,"comment"]],doccomment:[[/[^\/*]+/,"comment.doc"],[/\*\//,"comment.doc","@pop"],[/[\/*]/,"comment.doc"]],string:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"/,"string","@pop"]]}}}}]); //# sourceMappingURL=1069.16bec71d.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/1069.16bec71d.chunk.js
1069.16bec71d.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[608],{20608:function(E,S,e){e.r(S),e.d(S,{conf:function(){return T},language:function(){return R}});var T={brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}]},R={defaultToken:"",tokenPostfix:".redis",ignoreCase:!0,brackets:[{open:"[",close:"]",token:"delimiter.square"},{open:"(",close:")",token:"delimiter.parenthesis"}],keywords:["APPEND","AUTH","BGREWRITEAOF","BGSAVE","BITCOUNT","BITFIELD","BITOP","BITPOS","BLPOP","BRPOP","BRPOPLPUSH","CLIENT","KILL","LIST","GETNAME","PAUSE","REPLY","SETNAME","CLUSTER","ADDSLOTS","COUNT-FAILURE-REPORTS","COUNTKEYSINSLOT","DELSLOTS","FAILOVER","FORGET","GETKEYSINSLOT","INFO","KEYSLOT","MEET","NODES","REPLICATE","RESET","SAVECONFIG","SET-CONFIG-EPOCH","SETSLOT","SLAVES","SLOTS","COMMAND","COUNT","GETKEYS","CONFIG","GET","REWRITE","SET","RESETSTAT","DBSIZE","DEBUG","OBJECT","SEGFAULT","DECR","DECRBY","DEL","DISCARD","DUMP","ECHO","EVAL","EVALSHA","EXEC","EXISTS","EXPIRE","EXPIREAT","FLUSHALL","FLUSHDB","GEOADD","GEOHASH","GEOPOS","GEODIST","GEORADIUS","GEORADIUSBYMEMBER","GETBIT","GETRANGE","GETSET","HDEL","HEXISTS","HGET","HGETALL","HINCRBY","HINCRBYFLOAT","HKEYS","HLEN","HMGET","HMSET","HSET","HSETNX","HSTRLEN","HVALS","INCR","INCRBY","INCRBYFLOAT","KEYS","LASTSAVE","LINDEX","LINSERT","LLEN","LPOP","LPUSH","LPUSHX","LRANGE","LREM","LSET","LTRIM","MGET","MIGRATE","MONITOR","MOVE","MSET","MSETNX","MULTI","PERSIST","PEXPIRE","PEXPIREAT","PFADD","PFCOUNT","PFMERGE","PING","PSETEX","PSUBSCRIBE","PUBSUB","PTTL","PUBLISH","PUNSUBSCRIBE","QUIT","RANDOMKEY","READONLY","READWRITE","RENAME","RENAMENX","RESTORE","ROLE","RPOP","RPOPLPUSH","RPUSH","RPUSHX","SADD","SAVE","SCARD","SCRIPT","FLUSH","LOAD","SDIFF","SDIFFSTORE","SELECT","SETBIT","SETEX","SETNX","SETRANGE","SHUTDOWN","SINTER","SINTERSTORE","SISMEMBER","SLAVEOF","SLOWLOG","SMEMBERS","SMOVE","SORT","SPOP","SRANDMEMBER","SREM","STRLEN","SUBSCRIBE","SUNION","SUNIONSTORE","SWAPDB","SYNC","TIME","TOUCH","TTL","TYPE","UNSUBSCRIBE","UNLINK","UNWATCH","WAIT","WATCH","ZADD","ZCARD","ZCOUNT","ZINCRBY","ZINTERSTORE","ZLEXCOUNT","ZRANGE","ZRANGEBYLEX","ZREVRANGEBYLEX","ZRANGEBYSCORE","ZRANK","ZREM","ZREMRANGEBYLEX","ZREMRANGEBYRANK","ZREMRANGEBYSCORE","ZREVRANGE","ZREVRANGEBYSCORE","ZREVRANK","ZSCORE","ZUNIONSTORE","SCAN","SSCAN","HSCAN","ZSCAN"],operators:[],builtinFunctions:[],builtinVariables:[],pseudoColumns:[],tokenizer:{root:[{include:"@whitespace"},{include:"@pseudoColumns"},{include:"@numbers"},{include:"@strings"},{include:"@scopes"},[/[;,.]/,"delimiter"],[/[()]/,"@brackets"],[/[\w@#$]+/,{cases:{"@keywords":"keyword","@operators":"operator","@builtinVariables":"predefined","@builtinFunctions":"predefined","@default":"identifier"}}],[/[<>=!%&+\-*/|~^]/,"operator"]],whitespace:[[/\s+/,"white"]],pseudoColumns:[[/[$][A-Za-z_][\w@#$]*/,{cases:{"@pseudoColumns":"predefined","@default":"identifier"}}]],numbers:[[/0[xX][0-9a-fA-F]*/,"number"],[/[$][+-]*\d*(\.\d*)?/,"number"],[/((\d+(\.\d*)?)|(\.\d+))([eE][\-+]?\d+)?/,"number"]],strings:[[/'/,{token:"string",next:"@string"}],[/"/,{token:"string.double",next:"@stringDouble"}]],string:[[/[^']+/,"string"],[/''/,"string"],[/'/,{token:"string",next:"@pop"}]],stringDouble:[[/[^"]+/,"string.double"],[/""/,"string.double"],[/"/,{token:"string.double",next:"@pop"}]],scopes:[]}}}}]); //# sourceMappingURL=608.84469af3.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/608.84469af3.chunk.js
608.84469af3.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[9214],{79214:function(e,n,t){t.r(n),t.d(n,{CompletionAdapter:function(){return pn},DefinitionAdapter:function(){return In},DiagnosticsAdapter:function(){return hn},DocumentColorAdapter:function(){return jn},DocumentFormattingEditProvider:function(){return Ln},DocumentHighlightAdapter:function(){return xn},DocumentLinkAdapter:function(){return Pn},DocumentRangeFormattingEditProvider:function(){return Fn},DocumentSymbolAdapter:function(){return Dn},FoldingRangeAdapter:function(){return On},HoverAdapter:function(){return bn},ReferenceAdapter:function(){return Rn},RenameAdapter:function(){return Tn},SelectionRangeAdapter:function(){return Nn},WorkerManager:function(){return ge},fromPosition:function(){return mn},fromRange:function(){return _n},setupMode:function(){return Un},toRange:function(){return kn},toTextEdit:function(){return yn}});var r,i,o=t(15671),a=t(43144),s=t(4942),u=t(37762),c=t(94389),d=Object.defineProperty,g=Object.getOwnPropertyDescriptor,f=Object.getOwnPropertyNames,l=Object.prototype.hasOwnProperty,h=function(e,n,t,r){if(n&&"object"===typeof n||"function"===typeof n){var i,o=(0,u.Z)(f(n));try{var a=function(){var o=i.value;l.call(e,o)||o===t||d(e,o,{get:function(){return n[o]},enumerable:!(r=g(n,o))||r.enumerable})};for(o.s();!(i=o.n()).done;)a()}catch(s){o.e(s)}finally{o.f()}}return e},v={};h(v,r=c,"default"),i&&h(i,r,"default");var p,m,_,k,w,y,b,E,C,x,A,I,S,R,T,D,M,P,L,F,Z,j,O,N,U,W,V,H,K,z,X,B,$,q,Q,G,J,Y,ee,ne,te,re,ie,oe,ae,se,ue,ce,de,ge=function(){function e(n){var t=this;(0,o.Z)(this,e),(0,s.Z)(this,"_defaults",void 0),(0,s.Z)(this,"_idleCheckInterval",void 0),(0,s.Z)(this,"_lastUsedTime",void 0),(0,s.Z)(this,"_configChangeListener",void 0),(0,s.Z)(this,"_worker",void 0),(0,s.Z)(this,"_client",void 0),this._defaults=n,this._worker=null,this._client=null,this._idleCheckInterval=window.setInterval((function(){return t._checkIfIdle()}),3e4),this._lastUsedTime=0,this._configChangeListener=this._defaults.onDidChange((function(){return t._stopWorker()}))}return(0,a.Z)(e,[{key:"_stopWorker",value:function(){this._worker&&(this._worker.dispose(),this._worker=null),this._client=null}},{key:"dispose",value:function(){clearInterval(this._idleCheckInterval),this._configChangeListener.dispose(),this._stopWorker()}},{key:"_checkIfIdle",value:function(){this._worker&&(Date.now()-this._lastUsedTime>12e4&&this._stopWorker())}},{key:"_getClient",value:function(){return this._lastUsedTime=Date.now(),this._client||(this._worker=v.editor.createWebWorker({moduleId:"vs/language/css/cssWorker",label:this._defaults.languageId,createData:{options:this._defaults.options,languageId:this._defaults.languageId}}),this._client=this._worker.getProxy()),this._client}},{key:"getLanguageServiceWorker",value:function(){for(var e,n=this,t=arguments.length,r=new Array(t),i=0;i<t;i++)r[i]=arguments[i];return this._getClient().then((function(n){e=n})).then((function(e){if(n._worker)return n._worker.withSyncedResources(r)})).then((function(n){return e}))}}]),e}();(m=p||(p={})).MIN_VALUE=-2147483648,m.MAX_VALUE=2147483647,(k=_||(_={})).MIN_VALUE=0,k.MAX_VALUE=2147483647,(y=w||(w={})).create=function(e,n){return e===Number.MAX_VALUE&&(e=_.MAX_VALUE),n===Number.MAX_VALUE&&(n=_.MAX_VALUE),{line:e,character:n}},y.is=function(e){var n=e;return fn.objectLiteral(n)&&fn.uinteger(n.line)&&fn.uinteger(n.character)},(E=b||(b={})).create=function(e,n,t,r){if(fn.uinteger(e)&&fn.uinteger(n)&&fn.uinteger(t)&&fn.uinteger(r))return{start:w.create(e,n),end:w.create(t,r)};if(w.is(e)&&w.is(n))return{start:e,end:n};throw new Error("Range#create called with invalid arguments["+e+", "+n+", "+t+", "+r+"]")},E.is=function(e){var n=e;return fn.objectLiteral(n)&&w.is(n.start)&&w.is(n.end)},(x=C||(C={})).create=function(e,n){return{uri:e,range:n}},x.is=function(e){var n=e;return fn.defined(n)&&b.is(n.range)&&(fn.string(n.uri)||fn.undefined(n.uri))},(I=A||(A={})).create=function(e,n,t,r){return{targetUri:e,targetRange:n,targetSelectionRange:t,originSelectionRange:r}},I.is=function(e){var n=e;return fn.defined(n)&&b.is(n.targetRange)&&fn.string(n.targetUri)&&(b.is(n.targetSelectionRange)||fn.undefined(n.targetSelectionRange))&&(b.is(n.originSelectionRange)||fn.undefined(n.originSelectionRange))},(R=S||(S={})).create=function(e,n,t,r){return{red:e,green:n,blue:t,alpha:r}},R.is=function(e){var n=e;return fn.numberRange(n.red,0,1)&&fn.numberRange(n.green,0,1)&&fn.numberRange(n.blue,0,1)&&fn.numberRange(n.alpha,0,1)},(D=T||(T={})).create=function(e,n){return{range:e,color:n}},D.is=function(e){var n=e;return b.is(n.range)&&S.is(n.color)},(P=M||(M={})).create=function(e,n,t){return{label:e,textEdit:n,additionalTextEdits:t}},P.is=function(e){var n=e;return fn.string(n.label)&&(fn.undefined(n.textEdit)||q.is(n))&&(fn.undefined(n.additionalTextEdits)||fn.typedArray(n.additionalTextEdits,q.is))},(F=L||(L={})).Comment="comment",F.Imports="imports",F.Region="region",(j=Z||(Z={})).create=function(e,n,t,r,i){var o={startLine:e,endLine:n};return fn.defined(t)&&(o.startCharacter=t),fn.defined(r)&&(o.endCharacter=r),fn.defined(i)&&(o.kind=i),o},j.is=function(e){var n=e;return fn.uinteger(n.startLine)&&fn.uinteger(n.startLine)&&(fn.undefined(n.startCharacter)||fn.uinteger(n.startCharacter))&&(fn.undefined(n.endCharacter)||fn.uinteger(n.endCharacter))&&(fn.undefined(n.kind)||fn.string(n.kind))},(N=O||(O={})).create=function(e,n){return{location:e,message:n}},N.is=function(e){var n=e;return fn.defined(n)&&C.is(n.location)&&fn.string(n.message)},(W=U||(U={})).Error=1,W.Warning=2,W.Information=3,W.Hint=4,(H=V||(V={})).Unnecessary=1,H.Deprecated=2,(K||(K={})).is=function(e){var n=e;return void 0!==n&&null!==n&&fn.string(n.href)},(X=z||(z={})).create=function(e,n,t,r,i,o){var a={range:e,message:n};return fn.defined(t)&&(a.severity=t),fn.defined(r)&&(a.code=r),fn.defined(i)&&(a.source=i),fn.defined(o)&&(a.relatedInformation=o),a},X.is=function(e){var n,t=e;return fn.defined(t)&&b.is(t.range)&&fn.string(t.message)&&(fn.number(t.severity)||fn.undefined(t.severity))&&(fn.integer(t.code)||fn.string(t.code)||fn.undefined(t.code))&&(fn.undefined(t.codeDescription)||fn.string(null===(n=t.codeDescription)||void 0===n?void 0:n.href))&&(fn.string(t.source)||fn.undefined(t.source))&&(fn.undefined(t.relatedInformation)||fn.typedArray(t.relatedInformation,O.is))},($=B||(B={})).create=function(e,n){for(var t=[],r=2;r<arguments.length;r++)t[r-2]=arguments[r];var i={title:e,command:n};return fn.defined(t)&&t.length>0&&(i.arguments=t),i},$.is=function(e){var n=e;return fn.defined(n)&&fn.string(n.title)&&fn.string(n.command)},(Q=q||(q={})).replace=function(e,n){return{range:e,newText:n}},Q.insert=function(e,n){return{range:{start:e,end:e},newText:n}},Q.del=function(e){return{range:e,newText:""}},Q.is=function(e){var n=e;return fn.objectLiteral(n)&&fn.string(n.newText)&&b.is(n.range)},(J=G||(G={})).create=function(e,n,t){var r={label:e};return void 0!==n&&(r.needsConfirmation=n),void 0!==t&&(r.description=t),r},J.is=function(e){var n=e;return void 0!==n&&fn.objectLiteral(n)&&fn.string(n.label)&&(fn.boolean(n.needsConfirmation)||void 0===n.needsConfirmation)&&(fn.string(n.description)||void 0===n.description)},(Y||(Y={})).is=function(e){return"string"===typeof e},(ne=ee||(ee={})).replace=function(e,n,t){return{range:e,newText:n,annotationId:t}},ne.insert=function(e,n,t){return{range:{start:e,end:e},newText:n,annotationId:t}},ne.del=function(e,n){return{range:e,newText:"",annotationId:n}},ne.is=function(e){var n=e;return q.is(n)&&(G.is(n.annotationId)||Y.is(n.annotationId))},(re=te||(te={})).create=function(e,n){return{textDocument:e,edits:n}},re.is=function(e){var n=e;return fn.defined(n)&&pe.is(n.textDocument)&&Array.isArray(n.edits)},(oe=ie||(ie={})).create=function(e,n,t){var r={kind:"create",uri:e};return void 0===n||void 0===n.overwrite&&void 0===n.ignoreIfExists||(r.options=n),void 0!==t&&(r.annotationId=t),r},oe.is=function(e){var n=e;return n&&"create"===n.kind&&fn.string(n.uri)&&(void 0===n.options||(void 0===n.options.overwrite||fn.boolean(n.options.overwrite))&&(void 0===n.options.ignoreIfExists||fn.boolean(n.options.ignoreIfExists)))&&(void 0===n.annotationId||Y.is(n.annotationId))},(se=ae||(ae={})).create=function(e,n,t,r){var i={kind:"rename",oldUri:e,newUri:n};return void 0===t||void 0===t.overwrite&&void 0===t.ignoreIfExists||(i.options=t),void 0!==r&&(i.annotationId=r),i},se.is=function(e){var n=e;return n&&"rename"===n.kind&&fn.string(n.oldUri)&&fn.string(n.newUri)&&(void 0===n.options||(void 0===n.options.overwrite||fn.boolean(n.options.overwrite))&&(void 0===n.options.ignoreIfExists||fn.boolean(n.options.ignoreIfExists)))&&(void 0===n.annotationId||Y.is(n.annotationId))},(ce=ue||(ue={})).create=function(e,n,t){var r={kind:"delete",uri:e};return void 0===n||void 0===n.recursive&&void 0===n.ignoreIfNotExists||(r.options=n),void 0!==t&&(r.annotationId=t),r},ce.is=function(e){var n=e;return n&&"delete"===n.kind&&fn.string(n.uri)&&(void 0===n.options||(void 0===n.options.recursive||fn.boolean(n.options.recursive))&&(void 0===n.options.ignoreIfNotExists||fn.boolean(n.options.ignoreIfNotExists)))&&(void 0===n.annotationId||Y.is(n.annotationId))},(de||(de={})).is=function(e){var n=e;return n&&(void 0!==n.changes||void 0!==n.documentChanges)&&(void 0===n.documentChanges||n.documentChanges.every((function(e){return fn.string(e.kind)?ie.is(e)||ae.is(e)||ue.is(e):te.is(e)})))};var fe,le,he,ve,pe,me,_e,ke,we,ye,be,Ee,Ce,xe,Ae,Ie,Se,Re,Te,De,Me,Pe,Le,Fe,Ze,je,Oe,Ne,Ue,We,Ve,He,Ke,ze,Xe,Be,$e,qe,Qe,Ge,Je,Ye,en,nn,tn,rn,on,an,sn,un,cn,dn=function(){function e(e,n){this.edits=e,this.changeAnnotations=n}return e.prototype.insert=function(e,n,t){var r,i;if(void 0===t?r=q.insert(e,n):Y.is(t)?(i=t,r=ee.insert(e,n,t)):(this.assertChangeAnnotations(this.changeAnnotations),i=this.changeAnnotations.manage(t),r=ee.insert(e,n,i)),this.edits.push(r),void 0!==i)return i},e.prototype.replace=function(e,n,t){var r,i;if(void 0===t?r=q.replace(e,n):Y.is(t)?(i=t,r=ee.replace(e,n,t)):(this.assertChangeAnnotations(this.changeAnnotations),i=this.changeAnnotations.manage(t),r=ee.replace(e,n,i)),this.edits.push(r),void 0!==i)return i},e.prototype.delete=function(e,n){var t,r;if(void 0===n?t=q.del(e):Y.is(n)?(r=n,t=ee.del(e,n)):(this.assertChangeAnnotations(this.changeAnnotations),r=this.changeAnnotations.manage(n),t=ee.del(e,r)),this.edits.push(t),void 0!==r)return r},e.prototype.add=function(e){this.edits.push(e)},e.prototype.all=function(){return this.edits},e.prototype.clear=function(){this.edits.splice(0,this.edits.length)},e.prototype.assertChangeAnnotations=function(e){if(void 0===e)throw new Error("Text edit change is not configured to manage change annotations.")},e}(),gn=function(){function e(e){this._annotations=void 0===e?Object.create(null):e,this._counter=0,this._size=0}return e.prototype.all=function(){return this._annotations},Object.defineProperty(e.prototype,"size",{get:function(){return this._size},enumerable:!1,configurable:!0}),e.prototype.manage=function(e,n){var t;if(Y.is(e)?t=e:(t=this.nextId(),n=e),void 0!==this._annotations[t])throw new Error("Id "+t+" is already in use.");if(void 0===n)throw new Error("No annotation provided for id "+t);return this._annotations[t]=n,this._size++,t},e.prototype.nextId=function(){return this._counter++,this._counter.toString()},e}();!function(){function e(e){var n=this;this._textEditChanges=Object.create(null),void 0!==e?(this._workspaceEdit=e,e.documentChanges?(this._changeAnnotations=new gn(e.changeAnnotations),e.changeAnnotations=this._changeAnnotations.all(),e.documentChanges.forEach((function(e){if(te.is(e)){var t=new dn(e.edits,n._changeAnnotations);n._textEditChanges[e.textDocument.uri]=t}}))):e.changes&&Object.keys(e.changes).forEach((function(t){var r=new dn(e.changes[t]);n._textEditChanges[t]=r}))):this._workspaceEdit={}}Object.defineProperty(e.prototype,"edit",{get:function(){return this.initDocumentChanges(),void 0!==this._changeAnnotations&&(0===this._changeAnnotations.size?this._workspaceEdit.changeAnnotations=void 0:this._workspaceEdit.changeAnnotations=this._changeAnnotations.all()),this._workspaceEdit},enumerable:!1,configurable:!0}),e.prototype.getTextEditChange=function(e){if(pe.is(e)){if(this.initDocumentChanges(),void 0===this._workspaceEdit.documentChanges)throw new Error("Workspace edit is not configured for document changes.");var n={uri:e.uri,version:e.version};if(!(r=this._textEditChanges[n.uri])){var t={textDocument:n,edits:i=[]};this._workspaceEdit.documentChanges.push(t),r=new dn(i,this._changeAnnotations),this._textEditChanges[n.uri]=r}return r}if(this.initChanges(),void 0===this._workspaceEdit.changes)throw new Error("Workspace edit is not configured for normal text edit changes.");var r;if(!(r=this._textEditChanges[e])){var i=[];this._workspaceEdit.changes[e]=i,r=new dn(i),this._textEditChanges[e]=r}return r},e.prototype.initDocumentChanges=function(){void 0===this._workspaceEdit.documentChanges&&void 0===this._workspaceEdit.changes&&(this._changeAnnotations=new gn,this._workspaceEdit.documentChanges=[],this._workspaceEdit.changeAnnotations=this._changeAnnotations.all())},e.prototype.initChanges=function(){void 0===this._workspaceEdit.documentChanges&&void 0===this._workspaceEdit.changes&&(this._workspaceEdit.changes=Object.create(null))},e.prototype.createFile=function(e,n,t){if(this.initDocumentChanges(),void 0===this._workspaceEdit.documentChanges)throw new Error("Workspace edit is not configured for document changes.");var r,i,o;if(G.is(n)||Y.is(n)?r=n:t=n,void 0===r?i=ie.create(e,t):(o=Y.is(r)?r:this._changeAnnotations.manage(r),i=ie.create(e,t,o)),this._workspaceEdit.documentChanges.push(i),void 0!==o)return o},e.prototype.renameFile=function(e,n,t,r){if(this.initDocumentChanges(),void 0===this._workspaceEdit.documentChanges)throw new Error("Workspace edit is not configured for document changes.");var i,o,a;if(G.is(t)||Y.is(t)?i=t:r=t,void 0===i?o=ae.create(e,n,r):(a=Y.is(i)?i:this._changeAnnotations.manage(i),o=ae.create(e,n,r,a)),this._workspaceEdit.documentChanges.push(o),void 0!==a)return a},e.prototype.deleteFile=function(e,n,t){if(this.initDocumentChanges(),void 0===this._workspaceEdit.documentChanges)throw new Error("Workspace edit is not configured for document changes.");var r,i,o;if(G.is(n)||Y.is(n)?r=n:t=n,void 0===r?i=ue.create(e,t):(o=Y.is(r)?r:this._changeAnnotations.manage(r),i=ue.create(e,t,o)),this._workspaceEdit.documentChanges.push(i),void 0!==o)return o}}();(le=fe||(fe={})).create=function(e){return{uri:e}},le.is=function(e){var n=e;return fn.defined(n)&&fn.string(n.uri)},(ve=he||(he={})).create=function(e,n){return{uri:e,version:n}},ve.is=function(e){var n=e;return fn.defined(n)&&fn.string(n.uri)&&fn.integer(n.version)},(me=pe||(pe={})).create=function(e,n){return{uri:e,version:n}},me.is=function(e){var n=e;return fn.defined(n)&&fn.string(n.uri)&&(null===n.version||fn.integer(n.version))},(ke=_e||(_e={})).create=function(e,n,t,r){return{uri:e,languageId:n,version:t,text:r}},ke.is=function(e){var n=e;return fn.defined(n)&&fn.string(n.uri)&&fn.string(n.languageId)&&fn.integer(n.version)&&fn.string(n.text)},(ye=we||(we={})).PlainText="plaintext",ye.Markdown="markdown",function(e){e.is=function(n){var t=n;return t===e.PlainText||t===e.Markdown}}(we||(we={})),(be||(be={})).is=function(e){var n=e;return fn.objectLiteral(e)&&we.is(n.kind)&&fn.string(n.value)},(Ce=Ee||(Ee={})).Text=1,Ce.Method=2,Ce.Function=3,Ce.Constructor=4,Ce.Field=5,Ce.Variable=6,Ce.Class=7,Ce.Interface=8,Ce.Module=9,Ce.Property=10,Ce.Unit=11,Ce.Value=12,Ce.Enum=13,Ce.Keyword=14,Ce.Snippet=15,Ce.Color=16,Ce.File=17,Ce.Reference=18,Ce.Folder=19,Ce.EnumMember=20,Ce.Constant=21,Ce.Struct=22,Ce.Event=23,Ce.Operator=24,Ce.TypeParameter=25,(Ae=xe||(xe={})).PlainText=1,Ae.Snippet=2,(Ie||(Ie={})).Deprecated=1,(Re=Se||(Se={})).create=function(e,n,t){return{newText:e,insert:n,replace:t}},Re.is=function(e){var n=e;return n&&fn.string(n.newText)&&b.is(n.insert)&&b.is(n.replace)},(De=Te||(Te={})).asIs=1,De.adjustIndentation=2,(Me||(Me={})).create=function(e){return{label:e}},(Pe||(Pe={})).create=function(e,n){return{items:e||[],isIncomplete:!!n}},(Fe=Le||(Le={})).fromPlainText=function(e){return e.replace(/[\\`*_{}[\]()#+\-.!]/g,"\\$&")},Fe.is=function(e){var n=e;return fn.string(n)||fn.objectLiteral(n)&&fn.string(n.language)&&fn.string(n.value)},(Ze||(Ze={})).is=function(e){var n=e;return!!n&&fn.objectLiteral(n)&&(be.is(n.contents)||Le.is(n.contents)||fn.typedArray(n.contents,Le.is))&&(void 0===e.range||b.is(e.range))},(je||(je={})).create=function(e,n){return n?{label:e,documentation:n}:{label:e}},(Oe||(Oe={})).create=function(e,n){for(var t=[],r=2;r<arguments.length;r++)t[r-2]=arguments[r];var i={label:e};return fn.defined(n)&&(i.documentation=n),fn.defined(t)?i.parameters=t:i.parameters=[],i},(Ue=Ne||(Ne={})).Text=1,Ue.Read=2,Ue.Write=3,(We||(We={})).create=function(e,n){var t={range:e};return fn.number(n)&&(t.kind=n),t},(He=Ve||(Ve={})).File=1,He.Module=2,He.Namespace=3,He.Package=4,He.Class=5,He.Method=6,He.Property=7,He.Field=8,He.Constructor=9,He.Enum=10,He.Interface=11,He.Function=12,He.Variable=13,He.Constant=14,He.String=15,He.Number=16,He.Boolean=17,He.Array=18,He.Object=19,He.Key=20,He.Null=21,He.EnumMember=22,He.Struct=23,He.Event=24,He.Operator=25,He.TypeParameter=26,(Ke||(Ke={})).Deprecated=1,(ze||(ze={})).create=function(e,n,t,r,i){var o={name:e,kind:n,location:{uri:r,range:t}};return i&&(o.containerName=i),o},(Be=Xe||(Xe={})).create=function(e,n,t,r,i,o){var a={name:e,detail:n,kind:t,range:r,selectionRange:i};return void 0!==o&&(a.children=o),a},Be.is=function(e){var n=e;return n&&fn.string(n.name)&&fn.number(n.kind)&&b.is(n.range)&&b.is(n.selectionRange)&&(void 0===n.detail||fn.string(n.detail))&&(void 0===n.deprecated||fn.boolean(n.deprecated))&&(void 0===n.children||Array.isArray(n.children))&&(void 0===n.tags||Array.isArray(n.tags))},(qe=$e||($e={})).Empty="",qe.QuickFix="quickfix",qe.Refactor="refactor",qe.RefactorExtract="refactor.extract",qe.RefactorInline="refactor.inline",qe.RefactorRewrite="refactor.rewrite",qe.Source="source",qe.SourceOrganizeImports="source.organizeImports",qe.SourceFixAll="source.fixAll",(Ge=Qe||(Qe={})).create=function(e,n){var t={diagnostics:e};return void 0!==n&&null!==n&&(t.only=n),t},Ge.is=function(e){var n=e;return fn.defined(n)&&fn.typedArray(n.diagnostics,z.is)&&(void 0===n.only||fn.typedArray(n.only,fn.string))},(Ye=Je||(Je={})).create=function(e,n,t){var r={title:e},i=!0;return"string"===typeof n?(i=!1,r.kind=n):B.is(n)?r.command=n:r.edit=n,i&&void 0!==t&&(r.kind=t),r},Ye.is=function(e){var n=e;return n&&fn.string(n.title)&&(void 0===n.diagnostics||fn.typedArray(n.diagnostics,z.is))&&(void 0===n.kind||fn.string(n.kind))&&(void 0!==n.edit||void 0!==n.command)&&(void 0===n.command||B.is(n.command))&&(void 0===n.isPreferred||fn.boolean(n.isPreferred))&&(void 0===n.edit||de.is(n.edit))},(nn=en||(en={})).create=function(e,n){var t={range:e};return fn.defined(n)&&(t.data=n),t},nn.is=function(e){var n=e;return fn.defined(n)&&b.is(n.range)&&(fn.undefined(n.command)||B.is(n.command))},(rn=tn||(tn={})).create=function(e,n){return{tabSize:e,insertSpaces:n}},rn.is=function(e){var n=e;return fn.defined(n)&&fn.uinteger(n.tabSize)&&fn.boolean(n.insertSpaces)},(an=on||(on={})).create=function(e,n,t){return{range:e,target:n,data:t}},an.is=function(e){var n=e;return fn.defined(n)&&b.is(n.range)&&(fn.undefined(n.target)||fn.string(n.target))},(un=sn||(sn={})).create=function(e,n){return{range:e,parent:n}},un.is=function(e){var n=e;return void 0!==n&&b.is(n.range)&&(void 0===n.parent||un.is(n.parent))},function(e){function n(e,t){if(e.length<=1)return e;var r=e.length/2|0,i=e.slice(0,r),o=e.slice(r);n(i,t),n(o,t);for(var a=0,s=0,u=0;a<i.length&&s<o.length;){var c=t(i[a],o[s]);e[u++]=c<=0?i[a++]:o[s++]}for(;a<i.length;)e[u++]=i[a++];for(;s<o.length;)e[u++]=o[s++];return e}e.create=function(e,n,t,r){return new ln(e,n,t,r)},e.is=function(e){var n=e;return!!(fn.defined(n)&&fn.string(n.uri)&&(fn.undefined(n.languageId)||fn.string(n.languageId))&&fn.uinteger(n.lineCount)&&fn.func(n.getText)&&fn.func(n.positionAt)&&fn.func(n.offsetAt))},e.applyEdits=function(e,t){for(var r=e.getText(),i=n(t,(function(e,n){var t=e.range.start.line-n.range.start.line;return 0===t?e.range.start.character-n.range.start.character:t})),o=r.length,a=i.length-1;a>=0;a--){var s=i[a],u=e.offsetAt(s.range.start),c=e.offsetAt(s.range.end);if(!(c<=o))throw new Error("Overlapping edit");r=r.substring(0,u)+s.newText+r.substring(c,r.length),o=u}return r}}(cn||(cn={}));var fn,ln=function(){function e(e,n,t,r){this._uri=e,this._languageId=n,this._version=t,this._content=r,this._lineOffsets=void 0}return Object.defineProperty(e.prototype,"uri",{get:function(){return this._uri},enumerable:!1,configurable:!0}),Object.defineProperty(e.prototype,"languageId",{get:function(){return this._languageId},enumerable:!1,configurable:!0}),Object.defineProperty(e.prototype,"version",{get:function(){return this._version},enumerable:!1,configurable:!0}),e.prototype.getText=function(e){if(e){var n=this.offsetAt(e.start),t=this.offsetAt(e.end);return this._content.substring(n,t)}return this._content},e.prototype.update=function(e,n){this._content=e.text,this._version=n,this._lineOffsets=void 0},e.prototype.getLineOffsets=function(){if(void 0===this._lineOffsets){for(var e=[],n=this._content,t=!0,r=0;r<n.length;r++){t&&(e.push(r),t=!1);var i=n.charAt(r);t="\r"===i||"\n"===i,"\r"===i&&r+1<n.length&&"\n"===n.charAt(r+1)&&r++}t&&n.length>0&&e.push(n.length),this._lineOffsets=e}return this._lineOffsets},e.prototype.positionAt=function(e){e=Math.max(Math.min(e,this._content.length),0);var n=this.getLineOffsets(),t=0,r=n.length;if(0===r)return w.create(0,e);for(;t<r;){var i=Math.floor((t+r)/2);n[i]>e?r=i:t=i+1}var o=t-1;return w.create(o,e-n[o])},e.prototype.offsetAt=function(e){var n=this.getLineOffsets();if(e.line>=n.length)return this._content.length;if(e.line<0)return 0;var t=n[e.line],r=e.line+1<n.length?n[e.line+1]:this._content.length;return Math.max(Math.min(t+e.character,r),t)},Object.defineProperty(e.prototype,"lineCount",{get:function(){return this.getLineOffsets().length},enumerable:!1,configurable:!0}),e}();!function(e){var n=Object.prototype.toString;e.defined=function(e){return"undefined"!==typeof e},e.undefined=function(e){return"undefined"===typeof e},e.boolean=function(e){return!0===e||!1===e},e.string=function(e){return"[object String]"===n.call(e)},e.number=function(e){return"[object Number]"===n.call(e)},e.numberRange=function(e,t,r){return"[object Number]"===n.call(e)&&t<=e&&e<=r},e.integer=function(e){return"[object Number]"===n.call(e)&&-2147483648<=e&&e<=2147483647},e.uinteger=function(e){return"[object Number]"===n.call(e)&&0<=e&&e<=2147483647},e.func=function(e){return"[object Function]"===n.call(e)},e.objectLiteral=function(e){return null!==e&&"object"===typeof e},e.typedArray=function(e,n){return Array.isArray(e)&&e.every(n)}}(fn||(fn={}));var hn=function(){function e(n,t,r){var i=this;(0,o.Z)(this,e),(0,s.Z)(this,"_disposables",[]),(0,s.Z)(this,"_listener",Object.create(null)),this._languageId=n,this._worker=t;var a=function(e){var n,t=e.getLanguageId();t===i._languageId&&(i._listener[e.uri.toString()]=e.onDidChangeContent((function(){window.clearTimeout(n),n=window.setTimeout((function(){return i._doValidate(e.uri,t)}),500)})),i._doValidate(e.uri,t))},u=function(e){v.editor.setModelMarkers(e,i._languageId,[]);var n=e.uri.toString(),t=i._listener[n];t&&(t.dispose(),delete i._listener[n])};this._disposables.push(v.editor.onDidCreateModel(a)),this._disposables.push(v.editor.onWillDisposeModel(u)),this._disposables.push(v.editor.onDidChangeModelLanguage((function(e){u(e.model),a(e.model)}))),this._disposables.push(r((function(e){v.editor.getModels().forEach((function(e){e.getLanguageId()===i._languageId&&(u(e),a(e))}))}))),this._disposables.push({dispose:function(){for(var e in v.editor.getModels().forEach(u),i._listener)i._listener[e].dispose()}}),v.editor.getModels().forEach(a)}return(0,a.Z)(e,[{key:"dispose",value:function(){this._disposables.forEach((function(e){return e&&e.dispose()})),this._disposables.length=0}},{key:"_doValidate",value:function(e,n){this._worker(e).then((function(n){return n.doValidation(e.toString())})).then((function(t){var r=t.map((function(e){return function(e,n){var t="number"===typeof n.code?String(n.code):n.code;return{severity:vn(n.severity),startLineNumber:n.range.start.line+1,startColumn:n.range.start.character+1,endLineNumber:n.range.end.line+1,endColumn:n.range.end.character+1,message:n.message,code:t,source:n.source}}(0,e)})),i=v.editor.getModel(e);i&&i.getLanguageId()===n&&v.editor.setModelMarkers(i,n,r)})).then(void 0,(function(e){console.error(e)}))}}]),e}();function vn(e){switch(e){case U.Error:return v.MarkerSeverity.Error;case U.Warning:return v.MarkerSeverity.Warning;case U.Information:return v.MarkerSeverity.Info;case U.Hint:return v.MarkerSeverity.Hint;default:return v.MarkerSeverity.Info}}var pn=function(){function e(n,t){(0,o.Z)(this,e),this._worker=n,this._triggerCharacters=t}return(0,a.Z)(e,[{key:"triggerCharacters",get:function(){return this._triggerCharacters}},{key:"provideCompletionItems",value:function(e,n,t,r){var i=e.uri;return this._worker(i).then((function(e){return e.doComplete(i.toString(),mn(n))})).then((function(t){if(t){var r=e.getWordUntilPosition(n),i=new v.Range(n.lineNumber,r.startColumn,n.lineNumber,r.endColumn),o=t.items.map((function(e){var n,t,r={label:e.label,insertText:e.insertText||e.label,sortText:e.sortText,filterText:e.filterText,documentation:e.documentation,detail:e.detail,command:(n=e.command,n&&"editor.action.triggerSuggest"===n.command?{id:n.command,title:n.title,arguments:n.arguments}:void 0),range:i,kind:wn(e.kind)};return e.textEdit&&("undefined"!==typeof(t=e.textEdit).insert&&"undefined"!==typeof t.replace?r.range={insert:kn(e.textEdit.insert),replace:kn(e.textEdit.replace)}:r.range=kn(e.textEdit.range),r.insertText=e.textEdit.newText),e.additionalTextEdits&&(r.additionalTextEdits=e.additionalTextEdits.map(yn)),e.insertTextFormat===xe.Snippet&&(r.insertTextRules=v.languages.CompletionItemInsertTextRule.InsertAsSnippet),r}));return{isIncomplete:t.isIncomplete,suggestions:o}}}))}}]),e}();function mn(e){if(e)return{character:e.column-1,line:e.lineNumber-1}}function _n(e){if(e)return{start:{line:e.startLineNumber-1,character:e.startColumn-1},end:{line:e.endLineNumber-1,character:e.endColumn-1}}}function kn(e){if(e)return new v.Range(e.start.line+1,e.start.character+1,e.end.line+1,e.end.character+1)}function wn(e){var n=v.languages.CompletionItemKind;switch(e){case Ee.Text:return n.Text;case Ee.Method:return n.Method;case Ee.Function:return n.Function;case Ee.Constructor:return n.Constructor;case Ee.Field:return n.Field;case Ee.Variable:return n.Variable;case Ee.Class:return n.Class;case Ee.Interface:return n.Interface;case Ee.Module:return n.Module;case Ee.Property:return n.Property;case Ee.Unit:return n.Unit;case Ee.Value:return n.Value;case Ee.Enum:return n.Enum;case Ee.Keyword:return n.Keyword;case Ee.Snippet:return n.Snippet;case Ee.Color:return n.Color;case Ee.File:return n.File;case Ee.Reference:return n.Reference}return n.Property}function yn(e){if(e)return{range:kn(e.range),text:e.newText}}var bn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideHover",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.doHover(r.toString(),mn(n))})).then((function(e){if(e)return{range:kn(e.range),contents:Cn(e.contents)}}))}}]),e}();function En(e){return"string"===typeof e?{value:e}:(n=e)&&"object"===typeof n&&"string"===typeof n.kind?"plaintext"===e.kind?{value:e.value.replace(/[\\`*_{}[\]()#+\-.!]/g,"\\$&")}:{value:e.value}:{value:"```"+e.language+"\n"+e.value+"\n```\n"};var n}function Cn(e){if(e)return Array.isArray(e)?e.map(En):[En(e)]}var xn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideDocumentHighlights",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.findDocumentHighlights(r.toString(),mn(n))})).then((function(e){if(e)return e.map((function(e){return{range:kn(e.range),kind:An(e.kind)}}))}))}}]),e}();function An(e){switch(e){case Ne.Read:return v.languages.DocumentHighlightKind.Read;case Ne.Write:return v.languages.DocumentHighlightKind.Write;case Ne.Text:return v.languages.DocumentHighlightKind.Text}return v.languages.DocumentHighlightKind.Text}var In=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideDefinition",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.findDefinition(r.toString(),mn(n))})).then((function(e){if(e)return[Sn(e)]}))}}]),e}();function Sn(e){return{uri:v.Uri.parse(e.uri),range:kn(e.range)}}var Rn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideReferences",value:function(e,n,t,r){var i=e.uri;return this._worker(i).then((function(e){return e.findReferences(i.toString(),mn(n))})).then((function(e){if(e)return e.map(Sn)}))}}]),e}(),Tn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideRenameEdits",value:function(e,n,t,r){var i=e.uri;return this._worker(i).then((function(e){return e.doRename(i.toString(),mn(n),t)})).then((function(e){return function(e){if(!e||!e.changes)return;var n=[];for(var t in e.changes){var r,i=v.Uri.parse(t),o=(0,u.Z)(e.changes[t]);try{for(o.s();!(r=o.n()).done;){var a=r.value;n.push({resource:i,versionId:void 0,textEdit:{range:kn(a.range),text:a.newText}})}}catch(s){o.e(s)}finally{o.f()}}return{edits:n}}(e)}))}}]),e}();var Dn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideDocumentSymbols",value:function(e,n){var t=e.uri;return this._worker(t).then((function(e){return e.findDocumentSymbols(t.toString())})).then((function(e){if(e)return e.map((function(e){return{name:e.name,detail:"",containerName:e.containerName,kind:Mn(e.kind),range:kn(e.location.range),selectionRange:kn(e.location.range),tags:[]}}))}))}}]),e}();function Mn(e){var n=v.languages.SymbolKind;switch(e){case Ve.File:return n.Array;case Ve.Module:return n.Module;case Ve.Namespace:return n.Namespace;case Ve.Package:return n.Package;case Ve.Class:return n.Class;case Ve.Method:return n.Method;case Ve.Property:return n.Property;case Ve.Field:return n.Field;case Ve.Constructor:return n.Constructor;case Ve.Enum:return n.Enum;case Ve.Interface:return n.Interface;case Ve.Function:return n.Function;case Ve.Variable:return n.Variable;case Ve.Constant:return n.Constant;case Ve.String:return n.String;case Ve.Number:return n.Number;case Ve.Boolean:return n.Boolean;case Ve.Array:return n.Array}return n.Function}var Pn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideLinks",value:function(e,n){var t=e.uri;return this._worker(t).then((function(e){return e.findDocumentLinks(t.toString())})).then((function(e){if(e)return{links:e.map((function(e){return{range:kn(e.range),url:e.target}}))}}))}}]),e}(),Ln=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideDocumentFormattingEdits",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.format(r.toString(),null,Zn(n)).then((function(e){if(e&&0!==e.length)return e.map(yn)}))}))}}]),e}(),Fn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideDocumentRangeFormattingEdits",value:function(e,n,t,r){var i=e.uri;return this._worker(i).then((function(e){return e.format(i.toString(),_n(n),Zn(t)).then((function(e){if(e&&0!==e.length)return e.map(yn)}))}))}}]),e}();function Zn(e){return{tabSize:e.tabSize,insertSpaces:e.insertSpaces}}var jn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideDocumentColors",value:function(e,n){var t=e.uri;return this._worker(t).then((function(e){return e.findDocumentColors(t.toString())})).then((function(e){if(e)return e.map((function(e){return{color:e.color,range:kn(e.range)}}))}))}},{key:"provideColorPresentations",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.getColorPresentations(r.toString(),n.color,_n(n.range))})).then((function(e){if(e)return e.map((function(e){var n={label:e.label};return e.textEdit&&(n.textEdit=yn(e.textEdit)),e.additionalTextEdits&&(n.additionalTextEdits=e.additionalTextEdits.map(yn)),n}))}))}}]),e}(),On=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideFoldingRanges",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.getFoldingRanges(r.toString(),n)})).then((function(e){if(e)return e.map((function(e){var n={start:e.startLine+1,end:e.endLine+1};return"undefined"!==typeof e.kind&&(n.kind=function(e){switch(e){case L.Comment:return v.languages.FoldingRangeKind.Comment;case L.Imports:return v.languages.FoldingRangeKind.Imports;case L.Region:return v.languages.FoldingRangeKind.Region}return}(e.kind)),n}))}))}}]),e}();var Nn=function(){function e(n){(0,o.Z)(this,e),this._worker=n}return(0,a.Z)(e,[{key:"provideSelectionRanges",value:function(e,n,t){var r=e.uri;return this._worker(r).then((function(e){return e.getSelectionRanges(r.toString(),n.map(mn))})).then((function(e){if(e)return e.map((function(e){for(var n=[];e;)n.push({range:kn(e.range)}),e=e.parent;return n}))}))}}]),e}();function Un(e){var n=[],t=[],r=new ge(e);n.push(r);var i=function(){return r.getLanguageServiceWorker.apply(r,arguments)};return function(){var n=e.languageId,r=e.modeConfiguration;Vn(t),r.completionItems&&t.push(v.languages.registerCompletionItemProvider(n,new pn(i,["/","-",":"]))),r.hovers&&t.push(v.languages.registerHoverProvider(n,new bn(i))),r.documentHighlights&&t.push(v.languages.registerDocumentHighlightProvider(n,new xn(i))),r.definitions&&t.push(v.languages.registerDefinitionProvider(n,new In(i))),r.references&&t.push(v.languages.registerReferenceProvider(n,new Rn(i))),r.documentSymbols&&t.push(v.languages.registerDocumentSymbolProvider(n,new Dn(i))),r.rename&&t.push(v.languages.registerRenameProvider(n,new Tn(i))),r.colors&&t.push(v.languages.registerColorProvider(n,new jn(i))),r.foldingRanges&&t.push(v.languages.registerFoldingRangeProvider(n,new On(i))),r.diagnostics&&t.push(new hn(n,i,e.onDidChange)),r.selectionRanges&&t.push(v.languages.registerSelectionRangeProvider(n,new Nn(i))),r.documentFormattingEdits&&t.push(v.languages.registerDocumentFormattingEditProvider(n,new Ln(i))),r.documentRangeFormattingEdits&&t.push(v.languages.registerDocumentRangeFormattingEditProvider(n,new Fn(i)))}(),n.push(Wn(t)),Wn(n)}function Wn(e){return{dispose:function(){return Vn(e)}}}function Vn(e){for(;e.length;)e.pop().dispose()}}}]); //# sourceMappingURL=9214.5198dfc3.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/9214.5198dfc3.chunk.js
9214.5198dfc3.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[7615],{17615:function(e,t,n){n.r(t),n.d(t,{conf:function(){return s},language:function(){return i}});var s={wordPattern:/(-?\d*\.\d\w*)|([^\`\~\!\#\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)/g,comments:{lineComment:"//",blockComment:["/*","*/"]},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"},{open:"<",close:">"}],folding:{markers:{start:new RegExp("^\\s*//\\s*(?:(?:#?region\\b)|(?:<editor-fold\\b))"),end:new RegExp("^\\s*//\\s*(?:(?:#?endregion\\b)|(?:</editor-fold>))")}}},i={defaultToken:"",tokenPostfix:".java",keywords:["abstract","continue","for","new","switch","assert","default","goto","package","synchronized","boolean","do","if","private","this","break","double","implements","protected","throw","byte","else","import","public","throws","case","enum","instanceof","return","transient","catch","extends","int","short","try","char","final","interface","static","void","class","finally","long","strictfp","volatile","const","float","native","super","while","true","false","yield","record","sealed","non-sealed","permits"],operators:["=",">","<","!","~","?",":","==","<=",">=","!=","&&","||","++","--","+","-","*","/","&","|","^","%","<<",">>",">>>","+=","-=","*=","/=","&=","|=","^=","%=","<<=",">>=",">>>="],symbols:/[=><!~?:&|+\-*\/\^%]+/,escapes:/\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,digits:/\d+(_+\d+)*/,octaldigits:/[0-7]+(_+[0-7]+)*/,binarydigits:/[0-1]+(_+[0-1]+)*/,hexdigits:/[[0-9a-fA-F]+(_+[0-9a-fA-F]+)*/,tokenizer:{root:[["non-sealed","keyword.non-sealed"],[/[a-zA-Z_$][\w$]*/,{cases:{"@keywords":{token:"keyword.$0"},"@default":"identifier"}}],{include:"@whitespace"},[/[{}()\[\]]/,"@brackets"],[/[<>](?!@symbols)/,"@brackets"],[/@symbols/,{cases:{"@operators":"delimiter","@default":""}}],[/@\s*[a-zA-Z_\$][\w\$]*/,"annotation"],[/(@digits)[eE]([\-+]?(@digits))?[fFdD]?/,"number.float"],[/(@digits)\.(@digits)([eE][\-+]?(@digits))?[fFdD]?/,"number.float"],[/0[xX](@hexdigits)[Ll]?/,"number.hex"],[/0(@octaldigits)[Ll]?/,"number.octal"],[/0[bB](@binarydigits)[Ll]?/,"number.binary"],[/(@digits)[fFdD]/,"number.float"],[/(@digits)[lL]?/,"number"],[/[;,.]/,"delimiter"],[/"([^"\\]|\\.)*$/,"string.invalid"],[/"""/,"string","@multistring"],[/"/,"string","@string"],[/'[^\\']'/,"string"],[/(')(@escapes)(')/,["string","string.escape","string"]],[/'/,"string.invalid"]],whitespace:[[/[ \t\r\n]+/,""],[/\/\*\*(?!\/)/,"comment.doc","@javadoc"],[/\/\*/,"comment","@comment"],[/\/\/.*$/,"comment"]],comment:[[/[^\/*]+/,"comment"],[/\*\//,"comment","@pop"],[/[\/*]/,"comment"]],javadoc:[[/[^\/*]+/,"comment.doc"],[/\/\*/,"comment.doc.invalid"],[/\*\//,"comment.doc","@pop"],[/[\/*]/,"comment.doc"]],string:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"/,"string","@pop"]],multistring:[[/[^\\"]+/,"string"],[/@escapes/,"string.escape"],[/\\./,"string.escape.invalid"],[/"""/,"string","@pop"],[/./,"string"]]}}}}]); //# sourceMappingURL=7615.f5af9bae.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/7615.f5af9bae.chunk.js
7615.f5af9bae.chunk.js
"use strict";(self.webpackChunkreact_admin_upgrade=self.webpackChunkreact_admin_upgrade||[]).push([[9902],{59902:function(e,r,t){t.r(r),t.d(r,{conf:function(){return i},language:function(){return a}});var i={comments:{lineComment:"#"},brackets:[["{","}"],["[","]"],["(",")"]],autoClosingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"},{open:"`",close:"`"}],surroundingPairs:[{open:"{",close:"}"},{open:"[",close:"]"},{open:"(",close:")"},{open:'"',close:'"'},{open:"'",close:"'"},{open:"`",close:"`"}]},a={defaultToken:"",ignoreCase:!0,tokenPostfix:".shell",brackets:[{token:"delimiter.bracket",open:"{",close:"}"},{token:"delimiter.parenthesis",open:"(",close:")"},{token:"delimiter.square",open:"[",close:"]"}],keywords:["if","then","do","else","elif","while","until","for","in","esac","fi","fin","fil","done","exit","set","unset","export","function"],builtins:["ab","awk","bash","beep","cat","cc","cd","chown","chmod","chroot","clear","cp","curl","cut","diff","echo","find","gawk","gcc","get","git","grep","hg","kill","killall","ln","ls","make","mkdir","openssl","mv","nc","node","npm","ping","ps","restart","rm","rmdir","sed","service","sh","shopt","shred","source","sort","sleep","ssh","start","stop","su","sudo","svn","tee","telnet","top","touch","vi","vim","wall","wc","wget","who","write","yes","zsh"],startingWithDash:/\-+\w+/,identifiersWithDashes:/[a-zA-Z]\w+(?:@startingWithDash)+/,symbols:/[=><!~?&|+\-*\/\^;\.,]+/,tokenizer:{root:[[/@identifiersWithDashes/,""],[/(\s)((?:@startingWithDash)+)/,["white","attribute.name"]],[/[a-zA-Z]\w*/,{cases:{"@keywords":"keyword","@builtins":"type.identifier","@default":""}}],{include:"@whitespace"},{include:"@strings"},{include:"@parameters"},{include:"@heredoc"},[/[{}\[\]()]/,"@brackets"],[/@symbols/,"delimiter"],{include:"@numbers"},[/[,;]/,"delimiter"]],whitespace:[[/\s+/,"white"],[/(^#!.*$)/,"metatag"],[/(^#.*$)/,"comment"]],numbers:[[/\d*\.\d+([eE][\-+]?\d+)?/,"number.float"],[/0[xX][0-9a-fA-F_]*[0-9a-fA-F]/,"number.hex"],[/\d+/,"number"]],strings:[[/'/,"string","@stringBody"],[/"/,"string","@dblStringBody"]],stringBody:[[/'/,"string","@popall"],[/./,"string"]],dblStringBody:[[/"/,"string","@popall"],[/./,"string"]],heredoc:[[/(<<[-<]?)(\s*)(['"`]?)([\w\-]+)(['"`]?)/,["constants","white","string.heredoc.delimiter","string.heredoc","string.heredoc.delimiter"]]],parameters:[[/\$\d+/,"variable.predefined"],[/\$\w+/,"variable"],[/\$[*@#?\-$!0_]/,"variable"],[/\$'/,"variable","@parameterBodyQuote"],[/\$"/,"variable","@parameterBodyDoubleQuote"],[/\$\(/,"variable","@parameterBodyParen"],[/\$\{/,"variable","@parameterBodyCurlyBrace"]],parameterBodyQuote:[[/[^#:%*@\-!_']+/,"variable"],[/[#:%*@\-!_]/,"delimiter"],[/[']/,"variable","@pop"]],parameterBodyDoubleQuote:[[/[^#:%*@\-!_"]+/,"variable"],[/[#:%*@\-!_]/,"delimiter"],[/["]/,"variable","@pop"]],parameterBodyParen:[[/[^#:%*@\-!_)]+/,"variable"],[/[#:%*@\-!_]/,"delimiter"],[/[)]/,"variable","@pop"]],parameterBodyCurlyBrace:[[/[^#:%*@\-!_}]+/,"variable"],[/[#:%*@\-!_]/,"delimiter"],[/[}]/,"variable","@pop"]]}}}}]); //# sourceMappingURL=9902.bfcd7f60.chunk.js.map
ApiLogicServer
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/create_from_model/safrs-react-admin-npm-build/static/js/9902.bfcd7f60.chunk.js
9902.bfcd7f60.chunk.js