code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
line = PRECISION_TABLE[precision] dec = 1/float(10 ** precision) return meters / line[3] * dec
def estimate_meters_to_deg(meters, precision=PRECISION_PERSON)
Meters to degrees estimation See https://en.wikipedia.org/wiki/Decimal_degrees Args: meters (float) precision (float) Returns: float: meters in degrees approximation
14.388977
16.558479
0.868979
if len(dt_str) <= 20: return datetime.datetime.strptime(dt_str, "%Y-%m-%dT%H:%M:%SZ") else: dt_str = dt_str.split(".") return isostr_to_datetime("%sZ" % dt_str[0])
def isostr_to_datetime(dt_str)
Converts iso formated text string into a datetime object Args: dt_str (str): ISO formated text string Returns: :obj:`datetime.datetime`
2.391923
2.710223
0.882556
"s -> (s0,s1), (s1,s2), (s2, s3), ..." now, nxt = tee(iterable) next(nxt, None) return izip(now, nxt)
def pairwise(iterable)
s -> (s0,s1), (s1,s2), (s2, s3), ...
2.935219
3.085716
0.951228
if self.feature_length > 0: result = list(self.labels.classes_) else: result = [] for label in labels: result.append(label) self.labels.fit(result)
def __learn_labels(self, labels)
Learns new labels, this method is intended for internal use Args: labels (:obj:`list` of :obj:`str`): Labels to learn
4.961628
5.493104
0.903247
labels = np.ravel(labels) self.__learn_labels(labels) if len(labels) == 0: return labels = self.labels.transform(labels) if self.feature_length > 0 and hasattr(self.clf, 'partial_fit'): # FIXME? check docs, may need to pass class=[...] self.clf = self.clf.partial_fit(features, labels) else: self.clf = self.clf.fit(features, labels) self.feature_length = len(features[0])
def learn(self, features, labels)
Fits the classifier If it's state is empty, the classifier is fitted, if not the classifier is partially fitted. See sklearn's SGDClassifier fit and partial_fit methods. Args: features (:obj:`list` of :obj:`list` of :obj:`float`) labels (:obj:`list` of :obj:`str`): Labels for each set of features. New features are learnt.
4.221132
4.74638
0.889337
probs = self.clf.predict_proba(features) if verbose: labels = self.labels.classes_ res = [] for prob in probs: vals = {} for i, val in enumerate(prob): label = labels[i] vals[label] = val res.append(vals) return res else: return probs
def predict(self, features, verbose=False)
Probability estimates of each feature See sklearn's SGDClassifier predict and predict_proba methods. Args: features (:obj:`list` of :obj:`list` of :obj:`float`) verbose: Boolean, optional. If true returns an array where each element is a dictionary, where keys are labels and values are the respective probabilities. Defaults to False. Returns: Array of array of numbers, or array of dictionaries if verbose i True
2.74425
2.668737
1.028295
return all( [callable(getattr(obj, method_name, None)) for method_name in ('__enter__', '__exit__')] + [any([callable(getattr(obj, method_name, None)) for method_name in ('next', '__iter__')])] )
def is_file(obj)
Retrun True is object has 'next', '__enter__' and '__exit__' methods. Suitable to check both builtin ``file`` and ``django.core.file.File`` instances.
3.494177
2.751827
1.269766
if is_file(file_or_path): file_obj = file_or_path else: file_obj = open(file_or_path, 'rb') yield file_obj file_obj.close()
def file_adapter(file_or_path)
Context manager that works similar to ``open(file_path)``but also accepts already openned file-like objects.
2.229391
1.86653
1.194404
if not isinstance(source_paths, Iterable) or len(source_paths) < 3: raise TypeError('FromCSVTablesGenerator.source_loader accepts list of 3 paths as argument. Got `%s` instead' % source_paths) self.native_language = '' self.languages = [] self.templates = [] self.tables = {} self.load_settings(source_paths[0]) template_slugs = self.load_templates(source_paths[1]) self.load_tables(source_paths[2]) if create_missing_tables: self.create_missing_tables(template_slugs) self.full_forms_for_languages = set()
def source_loader(self, source_paths, create_missing_tables=True)
Load source from 3 csv files. First file should contain global settings: * ``native_lagnauge,languages`` header on first row * appropriate values on following rows Example:: native_lagnauge,languages ru,ru ,en Second file should contain templates: * ``template_name,probability,genders,template`` header on first row * appropriate values on following rows (separate values with semicolon ";" in template column) Example:: template_name,probability,genders,template male_1,5,m,prefixes;male_suffixes baby_1,1,m;f,prefixes;descriptive Third file should contain tables with values for template slugs in all languages: * first row should contain slugs with language code after colon for each * appropriate values on following rows. Multiple forms may be specified using semicolon as separator Example:: prefixes:ru,prefixes:en,male_suffixes:ru,male_suffixes:en,descriptive:ru,descriptive:en Бж,Bzh,пра,pra,быстряк;быстряку,fasty дон;дону,don,Иван;Ивану,Ivan,Иванов;Иванову,Ivanov Note: you may use slugs without ":lang_code" suffix in csv header of tables file. Such headers will be treated as headers for native language If tables are missing for some slug then it is automatically created with values equeal to slug itself. So you may use some slugs without specifying tables data for them. Example for apostrophe and space: male_1,5,m,prefixes;';male_suffixes male_full,5,m,first_name; ;last_name
4.609742
3.896008
1.183196
url = request.args.get('url') # read image URL as a request URL param response = requests.get(url) # make request to static image file return response.content
def loader()
Load image from URL, and preprocess for Resnet.
9.781054
9.024106
1.083881
prediction = prediction.data.numpy()[0] top_predictions = prediction.argsort()[-3:][::-1] return [labels[prediction] for prediction in top_predictions]
def postprocessor(prediction)
Map prediction tensor to labels.
3.528961
3.387003
1.041913
logger = logging.getLogger(name) logger.setLevel(getenv('LOGLEVEL', 'INFO')) return logger
def get_logger(name)
Get a logger with the specified name.
3.704875
3.492337
1.060858
if is_serializable(data): return data # if numpy array convert to list try: return data.tolist() except AttributeError: pass except Exception as e: logger.debug('{} exception ({}): {}'.format(type(e).__name__, e, data)) # try serializing each child element if isinstance(data, dict): return {key: make_serializable(value) for key, value in data.items()} try: return [make_serializable(element) for element in data] except TypeError: # not iterable pass except Exception: logger.debug('Could not serialize {}; converting to string'.format(data)) # last resort: convert to string return str(data)
def make_serializable(data)
Ensure data is serializable.
2.980819
2.888973
1.031792
data = request.get_json() logger.debug('Received JSON data of length {:,}'.format(len(data))) return data
def json_numpy_loader()
Load data from JSON request and convert to numpy array.
6.068301
5.138002
1.181062
from keras.preprocessing import image import numpy as np from PIL import Image from io import BytesIO def preprocess_image_bytes(data_bytes): try: img = Image.open(BytesIO(data_bytes)) # open image except OSError as e: raise ValueError('Please provide a raw image') img = img.resize(image_dims, Image.ANTIALIAS) # model requires 224x224 pixels x = image.img_to_array(img) # convert image to numpy array x = np.expand_dims(x, axis=0) # model expects dim 0 to be iterable across images return x return preprocess_image_bytes
def get_bytes_to_image_callback(image_dims=(224, 224))
Return a callback to process image bytes for ImageNet.
3.275921
3.140115
1.043249
logger.error(message, exc_info=True) return make_response( message, status_code, dict(exception_type=type(exception).__name__, exception_message=str(exception)), )
def exception_log_and_respond(exception, logger, message, status_code)
Log an error and send jsonified respond.
2.718419
2.754186
0.987014
response_body = dict(message=message) if details: response_body['details'] = details response = jsonify(response_body) response.status_code = status_code return response
def make_response(message, status_code, details=None)
Make a jsonified response with specified message and status code.
1.978549
1.968883
1.00491
# copy instance variables to local scope for resource class predict = self.predict logger = self.app.logger # create restful resource class Predictions(Resource): @staticmethod def post(): # read data from API request try: data = data_loader() except Exception as e: return exception_log_and_respond(e, logger, 'Unable to fetch data', 400) try: if hasattr(preprocessor, '__iter__'): for preprocessor_step in preprocessor: data = preprocessor_step(data) else: data = preprocessor(data) # preprocess data data = np.array(data) if to_numpy else data # convert to numpy except Exception as e: return exception_log_and_respond(e, logger, 'Could not preprocess data', 400) # sanity check using user defined callback (default is no check) validation_pass, validation_reason = input_validation(data) if not validation_pass: # if validation fails, log the reason code, log the data, and send a 400 response validation_message = 'Input validation failed with reason: {}'.format(validation_reason) logger.error(validation_message) logger.debug('Data: {}'.format(data)) return make_response(validation_message, 400) try: prediction = predict(data) except Exception as e: # log exception and return the message in a 500 response logger.debug('Data: {}'.format(data)) return exception_log_and_respond(e, logger, 'Unable to make prediction', 500) logger.debug(prediction) try: # preprocess data if hasattr(postprocessor, '__iter__'): for postprocessor_step in postprocessor: prediction = postprocessor_step(prediction) else: prediction = postprocessor(prediction) # cast to serializable types if make_serializable_post: return make_serializable(prediction) else: return prediction except Exception as e: return exception_log_and_respond(e, logger, 'Postprocessing failed', 500) # map resource to endpoint self.api.add_resource(Predictions, '/predictions')
def _create_prediction_endpoint( self, to_numpy=True, data_loader=json_numpy_loader, preprocessor=lambda x: x, input_validation=lambda data: (True, None), postprocessor=lambda x: x, make_serializable_post=True)
Create an endpoint to serve predictions. Arguments: - input_validation (fn): takes a numpy array as input; returns True if validation passes and False otherwise - data_loader (fn): reads flask request and returns data preprocessed to be used in the `predict` method - postprocessor (fn): transforms the predictions from the `predict` method
2.776935
2.798585
0.992264
# make sure data is serializable data = make_serializable(data) # create generic restful resource to serve static JSON data class InfoBase(Resource): @staticmethod def get(): return data def info_factory(name): class NewClass(InfoBase): pass NewClass.__name__ = "{}_{}".format(name, InfoBase.__name__) return NewClass path = '/info/{}'.format(name) self.api.add_resource(info_factory(name), path) logger.info('Regestered informational resource to {} (available via GET)'.format(path)) logger.debug('Endpoint {} will now serve the following static data:\n{}'.format(path, data))
def create_info_endpoint(self, name, data)
Create an endpoint to serve info GET requests.
5.209114
5.281765
0.986245
model = self.model # parse model details model_details = {} for key, value in model.__dict__.items(): model_details[key] = make_serializable(value) # create generic restful resource to serve model information as JSON class ModelInfo(Resource): @staticmethod def get(): return model_details self.api.add_resource(ModelInfo, path) self.app.logger.info('Regestered informational resource to {} (available via GET)'.format(path)) self.app.logger.debug('Endpoint {} will now serve the following static data:\n{}'.format(path, model_details))
def _create_model_info_endpoint(self, path='/info/model')
Create an endpoint to serve info GET requests.
4.936531
4.868094
1.014058
from meinheld import server, middleware # self.app.run(host=host, port=port) server.listen((host, port)) server.run(middleware.WebSocketMiddleware(self.app))
def serve(self, host='127.0.0.1', port=5000)
Serve predictions as an API endpoint.
5.997481
6.628015
0.904868
model = Sequential() model.add(Dense(100, input_dim=input_dim, activation='sigmoid')) model.add(Dense(1)) model.compile(loss='mean_squared_error', optimizer='SGD') return model
def get_model(input_dim)
Create and compile simple model.
1.858098
1.748258
1.062828
global data # check num dims if input_data.ndim != 2: return False, 'Data should have two dimensions.' # check number of columns if input_data.shape[1] != data.data.shape[1]: reason = '{} features required, {} features provided'.format( data.data.shape[1], input_data.shape[1]) return False, reason # validation passed return True, None
def validator(input_data)
Simple model input validator. Validator ensures the input data array is - two dimensional - has the correct number of features.
3.999215
3.763785
1.062551
with open(chd_file, "rb") as f: header = { "cinefileheader": cine.CINEFILEHEADER(), "bitmapinfoheader": cine.BITMAPINFOHEADER(), "setup": cine.SETUP(), } f.readinto(header["cinefileheader"]) f.readinto(header["bitmapinfoheader"]) f.readinto(header["setup"]) return header
def read_chd_header(chd_file)
read the .chd header file created when Vision Research software saves the images in a file format other than .cine
3.11705
2.924409
1.065873
if version is None: versions = short_versions else: versions = [get_canonical_version(version)] for version in versions: url = "http://docs.python.org/{}/objects.inv".format(version) modules = sorted( list( fetch_inventory(DummyApp(), "", url).get("py:module").keys() ) ) with open(os.path.join(list_dir, "{}.txt".format(version)), "w") as f: for module in modules: f.write(module) f.write("\n")
def fetch_list(version=None)
For the given version of Python (or all versions if no version is set), this function: - Uses the `fetch_inventory` function of :py:mod`sphinx.ext.intersphinx` to grab and parse the Sphinx object inventory (ie ``http://docs.python.org/<version>/objects.inv``) for the given version. - Grabs the names of all of the modules in the parsed inventory data. - Writes the sorted list of module names to file (within the `lists` subfolder). :param str|None version: A specified version of Python. If not specified, then all available versions of Python will have their inventory objects fetched and parsed, and have their module names written to file. (one of ``"2.6"``, ``"2.7"``, ``"3.2"``, ``"3.3"``, ``"3.4"``, ``"3.5"``, or ``None``)
4.123953
3.126953
1.318841
version = get_canonical_version(version) if version is not None else '.'.join( str(x) for x in sys.version_info[:2]) module_list_file = os.path.join(list_dir, "{}.txt".format(version)) with open(module_list_file) as f: result = [y for y in [x.strip() for x in f.readlines()] if y] return result
def stdlib_list(version=None)
Given a ``version``, return a ``list`` of names of the Python Standard Libraries for that version. These names are obtained from the Sphinx inventory file (used in :py:mod:`sphinx.ext.intersphinx`). :param str|None version: The version (as a string) whose list of libraries you want (one of ``"2.6"``, ``"2.7"``, ``"3.2"``, ``"3.3"``, ``"3.4"``, or ``"3.5"``). If not specified, the current version of Python will be used. :return: A list of standard libraries from the specified version of Python :rtype: list
3.176702
3.792331
0.837665
return autohash_decorate(cls, include=include, exclude=exclude, only_constructor_args=only_constructor_args, only_public_fields=only_public_fields)
def autohash(include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] only_constructor_args=False, # type: bool only_public_fields=False, # type: bool cls=DECORATED )
A decorator to makes objects of the class implement __hash__, so that they can be used correctly for example in sets. Parameters allow to customize the list of attributes that are taken into account in the hash. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :param only_constructor_args: if False (default), all fields will be included in the hash, even if they are defined in the constructor or dynamically. If True, only constructor arguments will be included in the hash, not any other field that would be created in the constructor or dynamically. Please note that this behaviour is the opposite from @autodict. :param only_public_fields: this parameter is only used when only_constructor_args is set to False. If only_public_fields is set to False (default), all fields are used in the hash. Otherwise, class-private fields will not be taken into account in the hash. Please note that this behaviour is the opposite from @autodict. :return:
2.674799
2.948284
0.907239
# type: (...) -> Type[T] # first check that we do not conflict with other known decorators _check_known_decorators(cls, '@autohash') # perform the class mod _execute_autohash_on_class(cls, include=include, exclude=exclude, only_constructor_args=only_constructor_args, only_public_fields=only_public_fields) return cls
def autohash_decorate(cls, # type: Type[T] include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] only_constructor_args=False, # type: bool only_public_fields=False, # type: bool )
To automatically generate the appropriate methods so that objects of this class are hashable, manually, without using @autohash decorator. :param cls: the class on which to execute. Note that it won't be wrapped. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :param only_constructor_args: if False (default), all fields will be included in the hash, even if they are defined in the constructor or dynamically. If True, only constructor arguments will be included in the hash, not any other field that would be created in the constructor or dynamically. Please note that this behaviour is the opposite from @autodict. :param only_public_fields: this parameter is only used when only_constructor_args is set to False. If only_public_fields is set to False (default), all fields are used in the hash. Otherwise, class-private fields will not be taken into account in the hash. Please note that this behaviour is the opposite from @autodict. :return:
4.387878
5.530842
0.793347
# First check parameters validate_include_exclude(include, exclude) # Override hash method if not already implemented if not method_already_there(object_type, '__hash__'): if only_constructor_args: # a. Find the __init__ constructor signature constructor = get_constructor(object_type, allow_inheritance=True) s = signature(constructor) # b. Collect all attributes that are not 'self' and are included and not excluded added = [] # we assume that the order of attributes will always be the same here.... for attr_name in s.parameters.keys(): if is_attr_selected(attr_name, include=include, exclude=exclude): added.append(attr_name) # c. Finally build the method def __hash__(self): # note: we prepend a unique hash for the class > NO, it is more intuitive to not do that. # return hash(tuple([type(self)] + [getattr(self, att_name) for att_name in added])) return hash(tuple(getattr(self, att_name) for att_name in added)) else: # ** all dynamic fields are allowed if include is None and exclude is None and not only_public_fields: # easy: all of vars values is included in the hash def __hash__(self): # note: we prepend a unique hash for the class > NO, it is more intuitive to not do that. # return hash(tuple([type(self)] + list(vars(self).values()))) return hash(tuple(vars(self).values())) else: # harder: dynamic filters # private_name_prefix = '_' + object_type.__name__ + '_' private_name_prefix = '_' def __hash__(self): # note: we prepend a unique hash for the class > NO, it is more intuitive to not do that. # to_hash = [type(self)] to_hash = [] for att_name, att_value in vars(self).items(): att_name = possibly_replace_with_property_name(self.__class__, att_name) if is_attr_selected(att_name, include=include, exclude=exclude): if not only_public_fields \ or (only_public_fields and not att_name.startswith(private_name_prefix)): to_hash.append(att_value) return hash(tuple(to_hash)) # now set the method on the class object_type.__hash__ = __hash__ return
def _execute_autohash_on_class(object_type, # type: Type[T] include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] only_constructor_args=False, # type: bool only_public_fields=False, # type: bool )
A decorator to make objects of the class implement __hash__, so that they can be used correctly for example in sets. Parameters allow to customize the list of attributes that are taken into account in the hash. :param object_type: the class on which to execute. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :param only_constructor_args: if False (default), all fields will be included in the hash, even if they are defined in the constructor or dynamically. If True, only constructor arguments will be included in the hash, not any other field that would be created in the constructor or dynamically. Please note that this behaviour is the opposite from @autodict. :param only_public_fields: this parameter is only used when only_constructor_args is set to False. If only_public_fields is set to False (default), all fields are used in the hash. Otherwise, class-private fields will not be taken into account in the hash. Please note that this behaviour is the opposite from @autodict. :return:
3.681509
3.542531
1.039232
return autoclass_decorate(cls, include=include, exclude=exclude, autoargs=autoargs, autoprops=autoprops, autodict=autodict, autohash=autohash)
def autoclass(include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] autoargs=True, # type: bool autoprops=True, # type: bool autodict=True, # type: bool autohash=True, # type: bool cls=DECORATED )
A decorator to perform @autoargs, @autoprops and @autodict all at once with the same include/exclude list. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :param autoargs: a boolean to enable autoargs on the consturctor (default: True) :param autoprops: a boolean to enable autoargs on the consturctor (default: True) :param autodict: a boolean to enable autoargs on the consturctor (default: True) :param autohash: a boolean to enable autohash on the constructor (default: True) :return:
2.401993
2.999464
0.800808
check_cmd(cmd) all_pkgs_str = " ".join(all_pkgs) print("INSTALLING: " + cmd + " install " + all_pkgs_str) subprocess.check_call([cmd, 'install'] + packages)
def install(cmd, packages)
Installs all packages provided at once :param packages: :return:
4.630583
5.409034
0.856083
return autodict_decorate(cls, include=include, exclude=exclude, only_constructor_args=only_constructor_args, only_public_fields=only_public_fields)
def autodict(include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] only_constructor_args=True, # type: bool only_public_fields=True, # type: bool cls=DECORATED )
A decorator to makes objects of the class behave like a read-only `dict`. It does several things: * it adds collections.Mapping to the list of parent classes (i.e. to the class' `__bases__`) * it generates `__len__`, `__iter__` and `__getitem__` in order for the appropriate fields to be exposed in the dict view. * it adds a static from_dict method to build objects from dicts (only if only_constructor_args=True) * it overrides eq method if not already implemented * it overrides str and repr method if not already implemented Parameters allow to customize the list of fields that will be visible. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :param only_constructor_args: if True (default), only constructor arguments will be exposed through the dictionary view, not any other field that would be created in the constructor or dynamically. This makes it very convenient to use in combination with @autoargs. If set to False, the dictionary is a direct view of public object fields. :param only_public_fields: this parameter is only used when only_constructor_args is set to False. If only_public_fields is set to False, all fields are visible. Otherwise (default), class-private fields will be hidden :return:
2.194981
2.702475
0.812211
# type: (...) -> Type[T] # first check that we do not conflict with other known decorators _check_known_decorators(cls, '@autodict') # perform the class mod _execute_autodict_on_class(cls, include=include, exclude=exclude, only_constructor_args=only_constructor_args, only_public_fields=only_public_fields) return cls
def autodict_decorate(cls, # type: Type[T] include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] only_constructor_args=True, # type: bool only_public_fields=True # type: bool )
To automatically generate the appropriate methods so that objects of this class behave like a `dict`, manually, without using @autodict decorator. :param cls: the class on which to execute. Note that it won't be wrapped. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :param only_constructor_args: if True (default), only constructor arguments will be exposed through the dictionary view, not any other field that would be created in the constructor or dynamically. This makes it very convenient to use in combination with @autoargs. If set to False, the dictionary is a direct view of public object fields. :param only_public_fields: this parameter is only used when only_constructor_args is set to False. If only_public_fields is set to False, all fields are visible. Otherwise (default), class-private fields will be hidden :return:
4.440939
5.482401
0.810035
# type: (...) -> Callable if func.__name__ not in {Mapping.__iter__.__name__, Mapping.__getitem__.__name__, Mapping.__len__.__name__}: raise ValueError('@autodict_override can only be used on one of the three Mapping methods __iter__,' '__getitem__ and __len__. Found: ' + func.__name__) # Simply annotate the function if hasattr(func, __AUTODICT_OVERRIDE_ANNOTATION): raise DuplicateOverrideError('Function is overridden twice : ' + func.__name__) else: setattr(func, __AUTODICT_OVERRIDE_ANNOTATION, True) return func
def autodict_override_decorate(func # type: Callable )
Used to decorate a function as an overridden dictionary method (such as __iter__), without using the @autodict_override annotation. :param func: the function on which to execute. Note that it won't be wrapped but simply annotated. :return:
4.607988
4.774953
0.965033
'''Map each value in the dictionary D to f(value).''' return { key:f(val) for key,val in D.iteritems() }
def map_values(f, D)
Map each value in the dictionary D to f(value).
4.417874
3.943075
1.120413
'''Produce a representation using the default repr() regardless of whether the object provides an implementation of its own.''' if isproxy(obj): return '<%s with prime_id=%d>' % (obj.__class__.__name__, obj.prime_id) else: return repr(obj)
def raw_repr(obj)
Produce a representation using the default repr() regardless of whether the object provides an implementation of its own.
7.628779
3.902781
1.954703
'''A helper function invoked on the server to tell it to import a module.''' # TODO: handle the case that the module is already loaded try: # First try to find a non-builtin, non-frozen, non-special # module using the client's search path fd, filename, info = imp.find_module(module_name, path) except ImportError: # The above will fail for builtin, frozen, or special # modules. We search for those now... fd, filename, info = imp.find_module(module_name) # Now import the module given the info found above try: return imp.load_module(module_name, fd, filename, info) finally: if fd is not None: fd.close()
def _load_module(module_name, path)
A helper function invoked on the server to tell it to import a module.
4.722891
3.839308
1.230141
'''Return a copy of the underlying object for which the argument is a proxy.''' assert isinstance(proxy, Proxy) return proxy.client.execute(ByValueDelegate(proxy))
def byvalue(proxy)
Return a copy of the underlying object for which the argument is a proxy.
13.065154
6.575062
1.987077
'''Import an module into an isolated context as if with "__import__('module_name')"''' sys.modules[module_name] = load_module(module_name, path=path) return __import__(module_name, fromlist=fromlist, level=level)
def import_isolated(module_name, fromlist=[], level=-1, path=None)
Import an module into an isolated context as if with "__import__('module_name')"
4.667885
2.58046
1.808935
'''Change the state of the client. This is one of the values defined in ClientStates.''' logger.debug('client changing to state=%s', ClientState.Names[state]) self._state = state
def state(self, state)
Change the state of the client. This is one of the values defined in ClientStates.
10.36564
5.460045
1.898453
'''Read an object from a channel, possibly retrying if the attempt is interrupted by a signal from the operating system.''' for i in range(num_retries): self._assert_alive() try: return self._result_channel.get() except IOError as ex: if ex.errno == 4: # errno=4 corresponds to "System call interrupted", # which means a signal was recieved before any data # was sent. For now I think it's safe to ignore this # and continue. logger.exception('attempt to read from channel was interrupted by something') sys.exc_clear() else: # Something else went wrong - raise the exception as usual raise ex raise ChannelError('failed to read from channel after %d retries' % num_retries)
def _read_result(self, num_retries)
Read an object from a channel, possibly retrying if the attempt is interrupted by a signal from the operating system.
6.207318
4.707125
1.318707
'''Stop the server process and change our state to TERMINATING. Only valid if state=READY.''' logger.debug('client.terminate() called (state=%s)', self.strstate) if self.state == ClientState.WAITING_FOR_RESULT: raise ClientStateError('terimate() called while state='+self.strstate) if self.state == ClientState.TERMINATING: raise ClientStateError('terimate() called while state='+self.strstate) elif self.state in ClientState.TerminatedSet: assert not self._server_process.is_alive() return elif self.state == ClientState.READY: # Check that the process itself is still alive self._assert_alive() # Make sure the SIGCHLD signal handler doesn't throw any exceptions self.state = ClientState.TERMINATING # Do not call execute() because that function will check # whether the process is alive and throw an exception if not # TODO: can the queue itself throw exceptions? self._delegate_channel.put(FunctionCallDelegate(_raise_terminate)) # Wait for acknowledgement try: self._read_result(num_retries=5) except ProcessTerminationError as ex: pass except ChannelError as ex: # Was interrupted five times in a row! Ignore for now logger.debug('client failed to read sentinel from channel after 5 retries - will terminate anyway') self.state = ClientState.TERMINATED_CLEANLY
def terminate(self)
Stop the server process and change our state to TERMINATING. Only valid if state=READY.
7.579033
6.621552
1.144601
'''Terminate this client if it has not already terminated.''' if self.state == ClientState.WAITING_FOR_RESULT: # There is an ongoing call to execute() # Not sure what to do here logger.warn('cleanup() called while state is WAITING_FOR_RESULT: ignoring') elif self.state == ClientState.TERMINATING: # terminate() has been called but we have not recieved SIGCHLD yet # Not sure what to do here logger.warn('cleanup() called while state is TERMINATING: ignoring') elif self.state in ClientState.TerminatedSet: # We have already terminated # TODO: should we deal with TERMINATED_ASYNC in some special way? logger.debug('cleanup() called while state is TERMINATING: nothing needs to be done') else: logger.debug('cleanup() called while state is %s: attempting to terminate', self.strstate) try: self.terminate() except ProcessTerminationError as ex: # Terminate can throw a ProcessTerminationError if the # process terminated at some point between the last # execute() and the call to terminate() # For now we just ignore this. pass
def cleanup(self)
Terminate this client if it has not already terminated.
4.921413
4.561202
1.078973
'''Create a process in which the isolated code will be run.''' assert self._client is None logger.debug('IsolationContext[%d] starting', id(self)) # Create the queues request_queue = multiprocessing.Queue() response_queue = multiprocessing.Queue() # Launch the server process server = Server(request_queue, response_queue) # Do not keep a reference to this object! server_process = multiprocessing.Process(target=server.loop) server_process.start() # Create a client to talk to the server self._client = Client(server_process, request_queue, response_queue)
def start(self)
Create a process in which the isolated code will be run.
4.974845
4.120302
1.207398
'''Import a module into this isolation context and return a proxy for it.''' self.ensure_started() if path is None: path = sys.path mod = self.client.call(_load_module, module_name, path) mod.__isolation_context__ = self return mod
def load_module(self, module_name, path=None)
Import a module into this isolation context and return a proxy for it.
6.143056
4.338307
1.416003
if not path.exists(dest_folder): makedirs(dest_folder) # , exist_ok=True) not python 2 compliant if test_stats.success_percentage < 50: color = 'red' elif test_stats.success_percentage < 75: color = 'orange' elif test_stats.success_percentage < 90: color = 'green' else: color = 'brightgreen' left_txt = "tests" # right_txt = "%s%%" % test_stats.success_percentage right_txt = "%s/%s" % (test_stats.success, test_stats.runned) url = 'https://img.shields.io/badge/%s-%s-%s.svg' % (left_txt, quote_plus(right_txt), color) dest_file = path.join(dest_folder, 'junit-badge.svg') print('Generating junit badge from : ' + url) response = requests.get(url, stream=True) with open(dest_file, 'wb') as out_file: response.raw.decode_content = True shutil.copyfileobj(response.raw, out_file) del response
def download_badge(test_stats, # type: TestStats dest_folder='reports/junit' # type: str )
Downloads the badge corresponding to the provided success percentage, from https://img.shields.io. :param test_stats: :param dest_folder: :return:
2.422428
2.456398
0.986171
return autoprops_decorate(cls, include=include, exclude=exclude)
def autoprops(include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] cls=DECORATED)
A decorator to automatically generate all properties getters and setters from the class constructor. * if a @contract annotation exist on the __init__ method, mentioning a contract for a given parameter, the parameter contract will be added on the generated setter method * The user may override the generated getter and/or setter by creating them explicitly in the class and annotating them with @getter_override or @setter_override. Note that the contract will still be dynamically added on the setter, even if the setter already has one (in such case a `UserWarning` will be issued) :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :return:
7.28973
13.038162
0.559107
# type: (...) -> Type[T] # first check that we do not conflict with other known decorators _check_known_decorators(cls, '@autoprops') # perform the class mod _execute_autoprops_on_class(cls, include=include, exclude=exclude) # TODO better create a wrapper than modify the class? Probably not # class Autoprops_Wrapper(object): # def __init__(self, *args, **kwargs): # self.wrapped = cls(*args, **kwargs) # # return Autoprops_Wrapper return cls
def autoprops_decorate(cls, # type: Type[T] include=None, # type: Union[str, Tuple[str]] exclude=None # type: Union[str, Tuple[str]] )
To automatically generate all properties getters and setters from the class constructor manually, without using @autoprops decorator. * if a @contract annotation exist on the __init__ method, mentioning a contract for a given parameter, the parameter contract will be added on the generated setter method * The user may override the generated getter and/or setter by creating them explicitly in the class and annotating them with @getter_override or @setter_override. Note that the contract will still be dynamically added on the setter, even if the setter already has one (in such case a `UserWarning` will be issued) :param cls: the class on which to execute. Note that it won't be wrapped. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :return:
5.823493
6.505975
0.895099
# 0. first check parameters validate_include_exclude(include, exclude) # 1. Find the __init__ constructor signature and possible pycontracts @contract constructor = get_constructor(object_type, allow_inheritance=True) s = signature(constructor) # option a) pycontracts contracts_dict = constructor.__contracts__ if hasattr(constructor, '__contracts__') else {} # option b) valid8 validators_dict = constructor.__validators__ if hasattr(constructor, '__validators__') else {} # 2. For each attribute that is not 'self' and is included and not excluded, add the property added = [] for attr_name in s.parameters.keys(): if is_attr_selected(attr_name, include=include, exclude=exclude): added.append(attr_name) # pycontract if attr_name in contracts_dict.keys(): pycontract = contracts_dict[attr_name] else: pycontract = None # valid8 validators: create copies, because we will modify them (changing the validated function ref) if attr_name in validators_dict.keys(): validators = [copy(v) for v in validators_dict[attr_name]] else: validators = None _add_property(object_type, s.parameters[attr_name], pycontract=pycontract, validators=validators) # 3. Finally check that there is no overridden setter or getter that does not correspond to an attribute extra_overrides = getmembers(object_type, predicate=(lambda fun: callable(fun) and (hasattr(fun, __GETTER_OVERRIDE_ANNOTATION) and getattr(fun, __GETTER_OVERRIDE_ANNOTATION) not in added) or (hasattr(fun, __SETTER_OVERRIDE_ANNOTATION)) and getattr(fun, __SETTER_OVERRIDE_ANNOTATION) not in added) ) if len(extra_overrides) > 0: raise AttributeError('Attribute named \'' + extra_overrides[0][0] + '\' was not found in constructor signature.' 'Therefore its getter/setter can not be overridden by function ' + extra_overrides[0][1].__qualname__)
def _execute_autoprops_on_class(object_type, # type: Type[T] include=None, # type: Union[str, Tuple[str]] exclude=None # type: Union[str, Tuple[str]] )
This method will automatically add one getter and one setter for each constructor argument, except for those overridden using autoprops_override_decorate(), @getter_override or @setter_override. It will add a @contract on top of all setters (generated or overridden, if they don't already have one) :param object_type: the class on which to execute. :param include: a tuple of explicit attribute names to include (None means all) :param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None. :return:
4.346601
4.16152
1.044474
property_name = parameter.name # 1. create the private field name , e.g. '_foobar' private_property_name = '_' + property_name # 2. property getter (@property) - create or use overridden getter_fun = _get_getter_fun(object_type, parameter, private_property_name) # 3. property setter (@property_name.setter) - create or use overridden setter_fun, var_name = _get_setter_fun(object_type, parameter, private_property_name) # 4. add the contract to the setter, if any setter_fun_with_possible_contract = setter_fun if pycontract is not None: setter_fun_with_possible_contract = _add_contract_to_setter(setter_fun, var_name, pycontract, property_name) elif validators is not None: setter_fun_with_possible_contract = _add_validators_to_setter(setter_fun, var_name, validators, property_name) # 5. change the function name to make it look nice setter_fun_with_possible_contract.__name__ = property_name setter_fun_with_possible_contract.__module__ = object_type.__module__ setter_fun_with_possible_contract.__qualname__ = object_type.__name__ + '.' + property_name # __annotations__ # __doc__ # __dict__ # 6. Finally add the property to the class # WARNING : property_obj.setter(f) does absolutely nothing :) > we have to assign the result # setattr(object_type, property_name, property_obj.setter(f)) new_prop = property(fget=getter_fun, fset=setter_fun_with_possible_contract) # # specific for enforce: here we might wrap the overriden property setter on which enforce has already written # # something. # if hasattr(setter_fun_with_possible_contract, '__enforcer__'): # new_prop.__enforcer__ = setter_fun_with_possible_contract.__enforcer__ # DESIGN DECISION > although this would probably work, it is probably better to 'force' users to always use the # @autoprops annotation BEFORE any other annotation. This is now done in autoprops_decorate setattr(object_type, property_name, new_prop)
def _add_property(object_type, # type: Type[T] parameter, # type: Parameter pycontract=None, # type: Any validators=None # type: Any )
A method to dynamically add a property to a class with the optional given pycontract or validators. If the property getter and/or setter have been overridden, it is taken into account too. :param object_type: the class on which to execute. :param parameter: :param pycontract: :param validators: :return:
4.170465
4.185939
0.996303
def matches_property_name(fun): return callable(fun) and hasattr(fun, annotation) \ and getattr(fun, annotation) is value return matches_property_name
def _has_annotation(annotation, value)
Returns a function that can be used as a predicate in get_members, that
9.358498
8.163032
1.146449
property_name = parameter.name # -- check overridden getter for this property name overridden_getters = getmembers(object_type, predicate=_has_annotation(__GETTER_OVERRIDE_ANNOTATION, property_name)) if len(overridden_getters) > 0: if len(overridden_getters) > 1: raise DuplicateOverrideError('Getter is overridden more than once for attribute name : ' + property_name) # --use the overridden getter getter_fun = overridden_getters[0][1] # --check its signature s = signature(getter_fun) if not ('self' in s.parameters.keys() and len(s.parameters.keys()) == 1): raise IllegalGetterSignatureException('overridden getter must only have a self parameter, found ' + str(len(s.parameters.items()) - 1) + ' for function ' + str( getter_fun.__qualname__)) # --use the overridden getter ? property_obj = property(getter_fun) else: # -- generate the getter : def autoprops_generated_getter(self): return getattr(self, private_property_name) # -- use the generated getter getter_fun = autoprops_generated_getter try: annotations = getter_fun.__annotations__ except AttributeError: # python 2 pass else: annotations['return'] = parameter.annotation # add type hint to output declaration return getter_fun
def _get_getter_fun(object_type, # type: Type parameter, # type: Parameter private_property_name # type: str )
Utility method to find the overridden getter function for a given property, or generate a new one :param object_type: :param property_name: :param private_property_name: :return:
4.327716
4.337162
0.997822
# the property will have the same name than the constructor argument property_name = parameter.name overridden_setters = getmembers(object_type, _has_annotation(__SETTER_OVERRIDE_ANNOTATION, property_name)) if len(overridden_setters) > 0: # --check that we only have one if len(overridden_setters) > 1: raise DuplicateOverrideError('Setter is overridden more than once for attribute name : %s' % property_name) # --use the overridden setter setter_fun = overridden_setters[0][1] try: # python 2 setter_fun = setter_fun.im_func except AttributeError: pass # --find the parameter name and check the signature s = signature(setter_fun) p = [attribute_name for attribute_name, param in s.parameters.items() if attribute_name is not 'self'] if len(p) != 1: try: qname = setter_fun.__qualname__ except AttributeError: qname = setter_fun.__name__ raise IllegalSetterSignatureException('overridden setter must have only 1 non-self argument, found ' + '%s for function %s' '' % (len(s.parameters.items()) - 1, qname)) var_name = p[0] else: # --create the setter, equivalent of: # ** Dynamically compile a wrapper with correct argument name ** sig = Signature(parameters=[Parameter('self', kind=Parameter.POSITIONAL_OR_KEYWORD), parameter]) @with_signature(sig) def autoprops_generated_setter(self, **kwargs): setattr(self, private_property_name, kwargs.popitem()[1]) setter_fun = autoprops_generated_setter var_name = property_name return setter_fun, var_name
def _get_setter_fun(object_type, # type: Type parameter, # type: Parameter private_property_name # type: str )
Utility method to find the overridden setter function for a given property, or generate a new one :param object_type: :param property_name: :param property_type: :param private_property_name: :return:
4.407262
4.378376
1.006597
return autoprops_override_decorate(f, attribute=attribute, is_getter=True)
def getter_override(attribute=None, # type: str f=DECORATED )
A decorator to indicate an overridden getter for a given attribute. If the attribute name is None, the function name will be used as the attribute name. :param attribute: the attribute name for which the decorated function is an overridden getter :return:
16.32107
34.88512
0.467852
return autoprops_override_decorate(f, attribute=attribute, is_getter=False)
def setter_override(attribute=None, # type: str f=DECORATED )
A decorator to indicate an overridden setter for a given attribute. If the attribute name is None, the function name will be used as the attribute name. The @contract will still be dynamically added. :param attribute: the attribute name for which the decorated function is an overridden setter :return:
17.658104
34.472855
0.512232
# type: (...) -> Callable # Simply annotate the fact that this is a function attr_name = attribute or func.__name__ if is_getter: if hasattr(func, __GETTER_OVERRIDE_ANNOTATION): raise DuplicateOverrideError('Getter is overridden twice for attribute name : ' + attr_name) else: # func.__getter_override__ = attr_name setattr(func, __GETTER_OVERRIDE_ANNOTATION, attr_name) else: if hasattr(func, __SETTER_OVERRIDE_ANNOTATION): raise DuplicateOverrideError('Setter is overridden twice for attribute name : ' + attr_name) else: # func.__setter_override__ = attr_name setattr(func, __SETTER_OVERRIDE_ANNOTATION, attr_name) return func
def autoprops_override_decorate(func, # type: Callable attribute=None, # type: str is_getter=True # type: bool )
Used to decorate a function as an overridden getter or setter, without using the @getter_override or @setter_override annotations. If the overridden setter has no @contract, the contract will still be dynamically added. Note: this should be executed BEFORE @autoprops or autoprops_decorate(). :param func: the function on which to execute. Note that it won't be wrapped but simply annotated. :param attribute: the attribute name. If None, the function name will be used :param is_getter: True for a getter override, False for a setter override. :return:
2.618662
2.749561
0.952393
# 1- AUTHENTICATION if user is not None and secret is None: # using username and password # validate('user', user, instance_of=str) assert isinstance(user, str) # validate('pwd', pwd, instance_of=str) assert isinstance(pwd, str) g = Github(user, pwd) elif user is None and secret is not None: # or using an access token # validate('secret', secret, instance_of=str) assert isinstance(secret, str) g = Github(secret) else: raise ValueError("You should either provide username/password OR an access token") click.echo("Logged in as {user_name}".format(user_name=g.get_user())) # 2- CHANGELOG VALIDATION regex_pattern = "[\s\S]*[\n][#]+[\s]*(?P<title>[\S ]*%s[\S ]*)[\n]+?(?P<body>[\s\S]*?)[\n]*?(\n#|$)" % re.escape(tag) changelog_section = re.compile(regex_pattern) if changelog_file is not None: # validate('changelog_file', changelog_file, custom=os.path.exists, # help_msg="changelog file should be a valid file path") assert os.path.exists(changelog_file), "changelog file should be a valid file path" with open(changelog_file) as f: contents = f.read() match = changelog_section.match(contents).groupdict() if match is None or len(match) != 2: raise ValueError("Unable to find changelog section matching regexp pattern in changelog file.") else: title = match['title'] message = match['body'] else: title = tag message = '' # append footer if doc url is provided message += "\n\nSee [documentation page](%s) for details." % doc_url # 3- REPOSITORY EXPLORATION # validate('repo_slug', repo_slug, instance_of=str, min_len=1, help_msg="repo_slug should be a non-empty string") assert isinstance(repo_slug, str) and len(repo_slug) > 0, "repo_slug should be a non-empty string" repo = g.get_repo(repo_slug) # -- Is there a tag with that name ? try: tag_ref = repo.get_git_ref("tags/" + tag) except UnknownObjectException: raise ValueError("No tag with name %s exists in repository %s" % (tag, repo.name)) # -- Is there already a release with that tag name ? click.echo("Checking if release %s already exists in repository %s" % (tag, repo.name)) try: release = repo.get_release(tag) if release is not None: raise ValueError("Release %s already exists in repository %s. Please set overwrite to True if you wish to " "update the release (Not yet supported)" % (tag, repo.name)) except UnknownObjectException: # Release does not exist: we can safely create it. click.echo("Creating release %s on repo: %s" % (tag, repo.name)) click.echo("Release title: '%s'" % title) click.echo("Release message:\n--\n%s\n--\n" % message) repo.create_git_release(tag=tag, name=title, message=message, draft=False, prerelease=False) # add the asset file if needed if data_file is not None: release = None while release is None: release = repo.get_release(tag) release.upload_asset(path=data_file, label=path.split(data_file)[1], content_type="application/gzip")
def create_or_update_release(user, pwd, secret, repo_slug, changelog_file, doc_url, data_file, tag)
Creates or updates (TODO) a github release corresponding to git tag <TAG>.
2.81623
2.807973
1.00294
if include is not None and exclude is not None: raise ValueError("Only one of 'include' or 'exclude' argument should be provided.") validate('include', include, instance_of=(str, Sequence), enforce_not_none=False) validate('exclude', exclude, instance_of=(str, Sequence), enforce_not_none=False)
def validate_include_exclude(include, exclude)
Common validator for include and exclude arguments :param include: :param exclude: :return:
2.998163
3.209198
0.93424
if include is not None and exclude is not None: raise ValueError('Only one of \'include\' or \'exclude\' argument should be provided.') # win time by not doing this # check_var(include, var_name='include', var_types=[str, tuple], enforce_not_none=False) # check_var(exclude, var_name='exclude', var_types=[str, tuple], enforce_not_none=False) if attr_name is 'self': return False if exclude and attr_name in exclude: return False if not include or attr_name in include: return True else: return False
def is_attr_selected(attr_name, # type: str include=None, # type: Union[str, Tuple[str]] exclude=None # type: Union[str, Tuple[str]] )
decide whether an action has to be performed on the attribute or not, based on its name
3.608013
3.46001
1.042775
if allow_inheritance: return typ.__init__ else: # check that the constructor is really defined here if '__init__' in typ.__dict__: return typ.__init__ else: raise Exception('No explicit constructor was found for class ' + str(typ))
def get_constructor(typ, allow_inheritance=False # type: bool )
Utility method to return the unique constructor (__init__) of a type :param typ: a type :param allow_inheritance: if True, the constructor will be returned even if it is not defined in this class (inherited). By default this is set to False: an exception is raised when no constructor is explicitly defined in the class :return: the found constructor
4.292213
3.918195
1.095457
# type: (...) -> bool for member in typ.__dict__.values(): if hasattr(member, '__enforcer__'): raise AutoclassDecorationException('It seems that @runtime_validation decorator was applied to type <' + str(typ) + '> BEFORE ' + calling_decorator + '. This is not supported ' 'as it may lead to counter-intuitive behaviour, please change the order ' 'of the decorators on <' + str(typ) + '>')
def _check_known_decorators(typ, calling_decorator # type: str )
Checks that a given type is not already decorated by known decorators that may cause trouble. If so, it raises an Exception :return:
7.377251
8.327147
0.885928
if this_class_only: return method_name in vars(object_type) # or object_type.__dict__ else: try: method = getattr(object_type, method_name) except AttributeError: return False else: return method is not None and method is not getattr(object, method_name, None)
def method_already_there(object_type, method_name, this_class_only=False)
Returns True if method `method_name` is already implemented by object_type, that is, its implementation differs from the one in `object`. :param object_type: :param method_name: :param this_class_only: :return:
2.701776
2.733407
0.988428
return autoargs_decorate(f, include=include, exclude=exclude)
def autoargs(include=None, # type: Union[str, Tuple[str]] exclude=None, # type: Union[str, Tuple[str]] f=DECORATED )
Defines a decorator with parameters, to automatically assign the inputs of a function to self PRIOR to executing the function. In other words: ``` @autoargs def myfunc(a): print('hello') ``` will create the equivalent of ``` def myfunc(a): self.a = a print('hello') ``` Initial code from http://stackoverflow.com/questions/3652851/what-is-the-best-way-to-do-automatic-attribute-assignment-in-python-and-is-it-a#answer-3653049 :param include: a tuple of attribute names to include in the auto-assignment. If None, all arguments will be included by default :param exclude: a tuple of attribute names to exclude from the auto-assignment. In such case, include should be None :return:
6.422693
9.286619
0.691607
# type: (...) -> Callable # (0) first check parameters validate_include_exclude(include, exclude) # (1) then retrieve function signature # attrs, varargs, varkw, defaults = getargspec(func) func_sig = signature(func) # check that include/exclude dont contain names that are incorrect if include is not None: incorrect = set([include] if isinstance(include, str) else include) - set(func_sig.parameters.keys()) if len(incorrect) > 0: raise ValueError("@autoargs definition exception: include contains '%s' that is/are " "not part of signature for %s" % (incorrect, func)) if exclude is not None: incorrect = set([exclude] if isinstance(exclude, str) else exclude) - set(func_sig.parameters.keys()) if len(incorrect) > 0: raise ValueError("@autoargs definition exception: exclude contains '%s' that is/are " "not part of signature for %s" % (incorrect, func)) # TODO this should be in @autoslots decorator at class level, not here. # (2) Optionally lock the class only for the provided fields # Does not work for the moment. Besides locking fields seems to have issues with pickle serialization # so we'd rather not propose this option. # # See 'attrs' project for this kind of advanced features https://github.com/python-attrs/attrs # # if lock_class_fields: # if signature_varkw: # raise Exception('cant lock field names with variable kwargs') # else: # object_type = get_class_that_defined_method(func) # if include: # fields = include # else: # fields = signature_attrs[1:] # if signature_varargs: # fields.append(signature_varargs) # if exclude: # for a in exclude: # fields.remove(a) # # # right now, doesnot work # _lock_fieldnames_class(object_type, field_names=tuple(fields)) # (3) Finally, create a wrapper around the function so that all attributes included/not excluded are # set to self BEFORE executing the function. @wraps(func) def init_wrapper(self, *args, **kwargs): # match the received arguments with the signature to know who is who, and add default values to get a full list bound_values = func_sig.bind(self, *args, **kwargs) apply_defaults(bound_values) # Assign to self the ones that needs to for att_name, att_value in bound_values.arguments.items(): if is_attr_selected(att_name, include=include, exclude=exclude): # value = a normal value, or cur_kwargs as a whole setattr(self, att_name, att_value) # finally execute the constructor function return func(self, *args, **kwargs) # return wrapper return init_wrapper
def autoargs_decorate(func, # type: Callable include=None, # type: Union[str, Tuple[str]] exclude=None # type: Union[str, Tuple[str]] )
Defines a decorator with parameters, to automatically assign the inputs of a function to self PRIOR to executing the function. This is the inline way to apply the decorator ``` myfunc2 = autoargs_decorate(myfunc) ``` See autoargs for details. :param func: the function to wrap :param include: a tuple of attribute names to include in the auto-assignment. If None, all arguments will be included by default :param exclude: a tuple of attribute names to exclude from the auto-assignment. In such case, include should be None :return:
5.089844
5.140944
0.99006
self.__items.append((key, value)) try: dict_getitem(self, key).append(value) except KeyError: dict_setitem(self, key, [value])
def append(self, key, value)
Adds a (name, value) pair, doesn't overwrite the value if it already exists.
3.958762
3.98728
0.992848
if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) iterable = args[0] if args else None if iterable: if isinstance(iterable, Mapping) or hasattr(iterable, 'items'): for key, value in iterable.items(): self.append(key, value) elif hasattr(iterable, 'keys'): for key in iterable.keys(): self.append(key, iterable[key]) else: for key, value in iterable: self.append(key, value) for key, value in kwargs.items(): self.append(key, value)
def extend(self, *args, **kwargs)
Add key value pairs for an iterable.
1.762839
1.641959
1.073619
def check_func(self, key, new_item, instance=0): if key not in self.keys(): raise KeyError("%s not a key in label" % (key)) if not isinstance(new_item, (list, OrderedMultiDict)): raise TypeError("The new item must be a list or PVLModule") if isinstance(new_item, OrderedMultiDict): new_item = list(new_item) return func(self, key, new_item, instance) return check_func
def __insert_wrapper(func)
Make sure the arguments given to the insert methods are correct
4.211362
4.011349
1.049862
if instance == 0: # Index method will return the first occurence of the key index = self.keys().index(key) else: occurrence = -1 for index, k in enumerate(self.keys()): if k == key: occurrence += 1 if occurrence == instance: # Found the key and the correct occurence of the key break if occurrence != instance: # Gone through the entire list of keys and the instance number # given is too high for the number of occurences of the key raise ValueError( ( "Cannot insert before/after the %d " "instance of the key '%s' since there are " "only %d occurences of the key" % ( instance, key, occurrence) )) return index
def _get_index_for_insert(self, key, instance)
Get the index of the key to insert before or after
4.453219
4.228964
1.053028
index = self._get_index_for_insert(key, instance) index = index + 1 if is_after else index self.__items = self.__items[:index] + new_item + self.__items[index:] # Make sure indexing works with new items for new_key, new_value in new_item: if new_key in self: value_list = [val for k, val in self.__items if k == new_key] dict_setitem(self, new_key, value_list) else: dict_setitem(self, new_key, [new_value])
def _insert_item(self, key, new_item, instance, is_after)
Insert a new item before or after another item
2.963186
2.917529
1.015649
self._insert_item(key, new_item, instance, True)
def insert_after(self, key, new_item, instance=0)
Insert an item after a key
4.843205
4.458274
1.086341
self._insert_item(key, new_item, instance, False)
def insert_before(self, key, new_item, instance=0)
Insert an item before a key
4.949448
4.738151
1.044595
buf = self._read_buf pos = self._read_pos end = pos + n if end <= len(buf): # Fast path: the data to read is fully buffered. self._read_pos += n return self._update_pos(buf[pos:end]) # Slow path: read from the stream until enough bytes are read, # or until an EOF occurs or until read() would block. wanted = max(self.buffer_size, n) while len(buf) < end: chunk = self.raw.read(wanted) if not chunk: break buf += chunk self._read_buf = buf[end:] # Save the extra data in the buffer. self._read_pos = 0 return self._update_pos(buf[pos:end])
def read(self, n)
Read n bytes. Returns exactly n bytes of data unless the underlying raw IO stream reaches EOF.
3.533336
3.466053
1.019412
pos = self._read_pos self._read_pos = min(len(self.raw), pos + n) return self.raw[pos:self._read_pos]
def read(self, n)
Read n bytes. Returns exactly n bytes of data unless the underlying raw IO stream reaches EOF.
3.227504
2.922743
1.104272
pos = self._read_pos end = pos + n return self.raw[pos:end]
def peek(self, n)
Returns buffered bytes without advancing the position. The argument indicates a desired minimal number of bytes; we do at most one raw read to satisfy it. We never return more than self.buffer_size.
6.457321
5.767959
1.119516
statements = [] while 1: self.skip_whitespace_or_comment(stream) if has_end(stream): return statements statement = self.parse_statement(stream) if isinstance(statement, EmptyValueAtLine): if len(statements) == 0: self.raise_unexpected(stream) self.skip_whitespace_or_comment(stream) value = self.parse_value(stream) last_statement = statements.pop(-1) fixed_last = ( last_statement[0], statement ) statements.append(fixed_last) statements.append((last_statement[1], value)) else: statements.append(statement)
def parse_block(self, stream, has_end)
PVLModuleContents ::= (Statement | WSC)* EndStatement? AggrObject ::= BeginObjectStmt AggrContents EndObjectStmt AggrGroup ::= BeginGroupStmt AggrContents EndGroupStmt AggrContents := WSC Statement (WSC | Statement)*
3.37974
3.453728
0.978577
self.skip_whitespace_or_comment(stream) self.optional(stream, self.statement_delimiter)
def skip_statement_delimiter(self, stream)
Ensure that a Statement Delimiter consists of one semicolon, optionally preceded by multiple White Spaces and/or Comments, OR one or more Comments and/or White Space sequences. StatementDelim ::= WSC (SemiColon | WhiteSpace | Comment) | EndProvidedOctetSeq
6.323692
4.837947
1.307102
if self.has_group(stream): return self.parse_group(stream) if self.has_object(stream): return self.parse_object(stream) if self.has_assignment(stream): return self.parse_assignment(stream) if self.has_assignment_symbol(stream): return self.broken_assignment(stream.lineno - 1) self.raise_unexpected(stream)
def parse_statement(self, stream)
Statement ::= AggrGroup | AggrObject | AssignmentStmt
3.294953
3.042736
1.082892
for token in self.end_tokens: if not self.has_next(token, stream): continue offset = len(token) if self.has_eof(stream, offset): return True if self.has_whitespace(stream, offset): return True if self.has_comment(stream, offset): return True if self.has_next(self.statement_delimiter, stream, offset): return True return self.has_eof(stream)
def has_end(self, stream)
EndStatement ::= EndKeyword (SemiColon | WhiteSpace | Comment | EndProvidedOctetSeq)
3.213764
2.959201
1.086024
self.expect_in(stream, self.begin_group_tokens) self.ensure_assignment(stream) name = self.next_token(stream) self.skip_statement_delimiter(stream) statements = self.parse_block(stream, self.has_end_group) self.expect_in(stream, self.end_group_tokens) self.parse_end_assignment(stream, name) self.skip_statement_delimiter(stream) return name.decode('utf-8'), PVLGroup(statements)
def parse_group(self, stream)
Block Name must match Block Name in paired End Group Statement if Block Name is present in End Group Statement. BeginGroupStmt ::= BeginGroupKeywd WSC AssignmentSymbol WSC BlockName StatementDelim
4.761158
4.640353
1.026034
self.expect_in(stream, self.begin_object_tokens) self.ensure_assignment(stream) name = self.next_token(stream) self.skip_statement_delimiter(stream) statements = self.parse_block(stream, self.has_end_object) self.expect_in(stream, self.end_object_tokens) self.parse_end_assignment(stream, name) self.skip_statement_delimiter(stream) return name.decode('utf-8'), PVLObject(statements)
def parse_object(self, stream)
Block Name must match Block Name in paired End Object Statement if Block Name is present in End Object Statement StatementDelim. BeginObjectStmt ::= BeginObjectKeywd WSC AssignmentSymbol WSC BlockName StatementDelim
4.881321
4.752299
1.027149
lineno = stream.lineno name = self.next_token(stream) self.ensure_assignment(stream) at_an_end = any(( self.has_end_group(stream), self.has_end_object(stream), self.has_end(stream), self.has_next(self.statement_delimiter, stream, 0))) if at_an_end: value = self.broken_assignment(lineno) self.skip_whitespace_or_comment(stream) else: value = self.parse_value(stream) self.skip_statement_delimiter(stream) return name.decode('utf-8'), value
def parse_assignment(self, stream)
AssignmentStmt ::= Name WSC AssignmentSymbol WSC Value StatementDelim
5.142797
4.927813
1.043627
if self.has_sequence(stream): value = self.parse_sequence(stream) elif self.has_set(stream): value = self.parse_set(stream) else: value = self.parse_simple_value(stream) self.skip_whitespace_or_comment(stream) if self.has_units(stream): return Units(value, self.parse_units(stream)) return value
def parse_value(self, stream)
Value ::= (SimpleValue | Set | Sequence) WSC UnitsExpression?
2.707927
2.13478
1.268481
values = [] self.expect(stream, start) self.skip_whitespace_or_comment(stream) if self.has_next(end, stream): self.expect(stream, end) return values while 1: self.skip_whitespace_or_comment(stream) values.append(self.parse_value(stream)) self.skip_whitespace_or_comment(stream) if self.has_next(end, stream): self.expect(stream, end) return values self.expect(stream, self.seporator)
def parse_iterable(self, stream, start, end)
Sequence ::= SequenceStart WSC SequenceValue? WSC SequenceEnd Set := SetStart WSC SequenceValue? WSC SetEnd SequenceValue ::= Value (WSC SeparatorSymbol WSC Value)*
2.46241
2.428566
1.013936
value = b'' self.expect(stream, self.begin_units) while not self.has_next(self.end_units, stream): if self.has_eof(stream): self.raise_unexpected_eof(stream) value += stream.read(1) self.expect(stream, self.end_units) return value.strip(b''.join(self.whitespace)).decode('utf-8')
def parse_units(self, stream)
UnitsExpression ::= UnitsStart WhiteSpace* UnitsValue WhiteSpace* UnitsEnd
3.863584
3.770118
1.024791
if self.has_quoted_string(stream): return self.parse_quoted_string(stream) if self.has_binary_number(stream): return self.parse_binary_number(stream) if self.has_octal_number(stream): return self.parse_octal_number(stream) if self.has_decimal_number(stream): return self.parse_decimal_number(stream) if self.has_hex_number(stream): return self.parse_hex_number(stream) if self.has_unquoated_string(stream): return self.parse_unquoated_string(stream) if self.has_end(stream): return self.broken_assignment(stream.lineno) self.raise_unexpected(stream)
def parse_simple_value(self, stream)
SimpleValue ::= Integer | FloatingPoint | Exponential | BinaryNum | OctalNum | HexadecimalNum | DateTimeValue | QuotedString | UnquotedString
2.202741
2.079443
1.059294
value = b'' sign = self.parse_sign(stream) self.expect(stream, b(str(radix)) + self.radix_symbole) sign *= self.parse_sign(stream) while not self.has_next(self.radix_symbole, stream): next = stream.read(1) if not next: self.raise_unexpected_eof(stream) if next not in chars: self.raise_unexpected(stream, next) value += next if not value: self.raise_unexpected(stream, self.radix_symbole) self.expect(stream, self.radix_symbole) return sign * int(value, radix)
def parse_radix(self, radix, chars, stream)
BinaryNum ::= [+-]? '2' RadixSymbol [0-1]+ RadixSymbol OctalChar ::= [+-]? '8' RadixSymbol [0-7]+ RadixSymbol HexadecimalNum ::= [+-]? '16' RadixSymbol [0-9a-zA-Z]+ RadixSymbol
3.155536
3.163239
0.997565
assert field is not None, "The field parameter must be passed to the 'varchar' method." max_length = field.max_length def source(): length = random.choice(range(1, max_length + 1)) return "".join(random.choice(general_chars) for i in xrange(length)) return self.get_allowed_value(source, field)
def varchar(self, field=None)
Returns a chunk of text, of maximum length 'max_length'
5.177127
4.731478
1.094188
return self.get_allowed_value(lambda: self.faker.bothify(pattern), field)
def simple_pattern(self, pattern, field=None)
Use a simple pattern to make the field - # is replaced with a random number, ? with a random letter.
16.451748
10.547418
1.559789
if val is None: def source(): tzinfo = get_default_timezone() if settings.USE_TZ else None return datetime.fromtimestamp(randrange(1, 2100000000), tzinfo) else: def source(): tzinfo = get_default_timezone() if settings.USE_TZ else None return datetime.fromtimestamp(int(val.strftime("%s")) + randrange(-365*24*3600*2, 365*24*3600*2), tzinfo) return self.get_allowed_value(source, field)
def datetime(self, field=None, val=None)
Returns a random datetime. If 'val' is passed, a datetime within two years of that date will be returned.
2.947306
2.842916
1.036719
return self.datetime(field=field, val=val).date()
def date(self, field=None, val=None)
Like datetime, but truncated to be a date only
5.644554
4.073215
1.385773
if val == '': return '' if val is not None: def generate(length): # Get lorem ipsum of a specific length. collect = "" while len(collect) < length: collect += ' %s' % self.faker.sentence() collect = collect[:length] return collect # We want to match the pattern of the text - linebreaks # in the same places. def source(): parts = val.split("\n") for i, p in enumerate(parts): # Replace each bit with lorem ipsum of the same length parts[i] = generate(len(p)) return "\n".join(parts) else: def source(): return ' '.join(self.faker.sentences()) return self.get_allowed_value(source, field)
def lorem(self, field=None, val=None)
Returns lorem ipsum text. If val is provided, the lorem ipsum text will be the same length as the original text, and with the same pattern of line breaks.
5.72912
5.289328
1.083147
lorem_text = self.lorem(field, val) max_length = getattr(field, 'max_length', None) suffix_str = str(self.unique_suffixes[field]) unique_text = lorem_text + suffix_str if max_length is not None: # take the last max_length chars unique_text = unique_text[-max_length:] self.unique_suffixes[field] += 1 return unique_text
def unique_lorem(self, field=None, val=None)
Returns lorem ipsum text guaranteed to be unique. First uses lorem function then adds a unique integer suffix.
3.374242
2.982387
1.13139
for attname, field, replacer in self.replacers: currentval = getattr(obj, attname) replacement = replacer(self, obj, field, currentval) setattr(obj, attname, replacement)
def alter_object(self, obj)
Alters all the attributes in an individual object. If it returns False, the object will not be saved
4.417346
5.657707
0.780766
return anon.faker.uuid(field=field)
def uuid(anon, obj, field, val)
Returns a random uuid string
17.296112
13.706557
1.261886
return anon.faker.varchar(field=field)
def varchar(anon, obj, field, val)
Returns random data for a varchar field.
22.895212
15.764924
1.452288
return anon.faker.bool(field=field)
def bool(anon, obj, field, val)
Returns a random boolean value (True/False)
20.608763
10.82688
1.903481
return anon.faker.integer(field=field)
def integer(anon, obj, field, val)
Returns a random integer (for a Django IntegerField)
19.942253
11.217731
1.777744
return anon.faker.positive_integer(field=field)
def positive_integer(anon, obj, field, val)
Returns a random positive integer (for a Django PositiveIntegerField)
16.560415
10.419806
1.589321
return anon.faker.small_integer(field=field)
def small_integer(anon, obj, field, val)
Returns a random small integer (for a Django SmallIntegerField)
13.121313
9.030035
1.453074
return anon.faker.positive_small_integer(field=field)
def positive_small_integer(anon, obj, field, val)
Returns a positive small random integer (for a Django PositiveSmallIntegerField)
11.221386
7.55276
1.485733