code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
'''
Add lines to ViewList, for further rendering.
:param value: --line that would be added to render list
:type value: str, unicode
:param indent_depth: --value that show indent from left border
:type indent_depth: integer
:return:
'''
indent_depth = indent_depth
self.__view_list.append(self.indent * indent_depth + value, '<openapi>')
|
def write(self, value, indent_depth=0)
|
Add lines to ViewList, for further rendering.
:param value: --line that would be added to render list
:type value: str, unicode
:param indent_depth: --value that show indent from left border
:type indent_depth: integer
:return:
| 11.258512 | 2.669243 | 4.217867 |
'''
Main function for prepare and render OpenAPI specification
:return:
'''
# Loading yaml
self.load_yaml()
# Print paths from schema
section_title = '**API Paths**'
self.write(section_title)
self.write('=' * len(section_title))
self.print_paths()
# Print models
section_title = '**Schemas Description**'
self.write(section_title)
self.write('=' * len(section_title))
self.print_schemas()
# Render by sphinx
node = nodes.section()
node.document = self.state.document
nested_parse_with_titles(self.state, self.__view_list, node)
return node.children
|
def run(self)
|
Main function for prepare and render OpenAPI specification
:return:
| 5.204007 | 4.06233 | 1.28104 |
'''
Cycle for prepare information about paths
:return:
'''
for path_key, path_value in self.paths.items():
# Handler for request in path
self.current_path = path_key
for request_key, request_value in path_value.items():
if request_key == 'parameters':
continue
self.get_main_title(path_key, request_key)
self.get_description(request_value)
self.get_status_code_and_schema_rst(request_value['responses'])
self.get_params(path_value['parameters'], 'param')
self.get_params(request_value['parameters'], 'query')
|
def print_paths(self)
|
Cycle for prepare information about paths
:return:
| 4.939641 | 4.099397 | 1.204968 |
'''
Print all schemas, one by one
:return:
'''
self.indent_depth += 1
for i in self.definitions:
def_name = i.split('/')[-1]
self.write('.. _{}:'.format(def_name))
self.write('')
self.write('{} Schema'.format(def_name))
self.write('{}'.format('`' * (len(def_name) + 7)))
self.write('')
self.write('.. code-block:: json', self.indent_depth)
self.indent_depth += 1
self.write('')
self.definition_rst(def_name)
self.indent_depth -= 1
self.write('')
self.write('')
self.indent_depth -= 1
|
def print_schemas(self)
|
Print all schemas, one by one
:return:
| 3.139249 | 2.794955 | 1.123184 |
'''
Get title, from request type and path
:param path_name: --path for create title
:type path_name: str, unicode
:param request_name: --name of request
:type request_name: str, unicode
:return:
'''
main_title = '.. http:{}:: {}'.format(request_name, path_name)
self.write(main_title)
self.write('')
|
def get_main_title(self, path_name, request_name)
|
Get title, from request type and path
:param path_name: --path for create title
:type path_name: str, unicode
:param request_name: --name of request
:type request_name: str, unicode
:return:
| 5.315701 | 2.623356 | 2.026298 |
'''
Function for prepare information about responses with example, prepare only
responses with status code from `101` to `299`
:param responses: -- dictionary that contains responses, with status code as key
:type responses: dict
:return:
'''
for status_code, response_schema in responses.items():
status_code = int(status_code)
schema = response_schema.get('schema', None)
status = HTTP_STATUS_CODES.get(status_code, None)
if status is None or not (100 < status_code < 300):
continue
self.write('**Example Response**', 1)
self.write('')
self.write('.. code-block:: http', 1)
self.write('')
self.write('HTTP/1.1 {} {}'.format(status_code, status), 2)
self.write('Vary: {}'.format(response_schema['description']), 2)
self.write('Content-Type: application/json', 2)
self.write('')
if schema:
self.schema_handler(schema)
else:
self.write('{}', self.indent_depth)
|
def get_status_code_and_schema_rst(self, responses)
|
Function for prepare information about responses with example, prepare only
responses with status code from `101` to `299`
:param responses: -- dictionary that contains responses, with status code as key
:type responses: dict
:return:
| 3.888865 | 2.377455 | 1.635726 |
'''
Function prepare body of response with examples and create detailed information
about response fields
:param schema: --dictionary with information about answer
:type schema: dict
:return:
'''
dict_for_render = schema.get('properties', dict()).items()
if schema.get('$ref', None):
def_name = schema.get('$ref').split('/')[-1]
dict_for_render = self.definitions[def_name].get('properties', dict()).items()
elif schema.get('properties', None) is None:
return ''
answer_dict = dict()
json_dict = dict()
for opt_name, opt_value in dict_for_render:
var_type = opt_value.get('format', None) or opt_value.get('type', None) or 'object'
json_name = self.indent + ':jsonparameter {} {}:'.format(var_type, opt_name)
json_dict[json_name] = self.get_json_props_for_response(var_type, opt_value)
answer_dict[opt_name] = self.get_response_example(opt_name, var_type, opt_value)
if var_type == 'string':
answer_dict[opt_name] = answer_dict[opt_name].format(opt_name)
self.write('')
for line in json.dumps(answer_dict, indent=4).split('\n'):
self.write(line, self.indent_depth)
self.write('')
for json_param_name, json_param_value in json_dict.items():
desc = '{}{}'.format(
json_param_value['title'], json_param_value['props_str']
) or 'None'
self.write(json_param_name + ' ' + desc)
|
def schema_handler(self, schema)
|
Function prepare body of response with examples and create detailed information
about response fields
:param schema: --dictionary with information about answer
:type schema: dict
:return:
| 3.863075 | 2.947994 | 1.310408 |
'''
Prepare JSON section with detailed information about response
:param var_type: --contains variable type
:type var_type: , unicode
:param option_value: --dictionary that contains information about property
:type option_value: dict
:return: dictionary that contains, title and all properties of field
:rtype: dict
'''
props = list()
for name, value in option_value.items():
if var_type in ['dynamic', 'select2']:
break
elif name in ['format', 'title', 'type']:
continue
elif isinstance(value, dict) and value.get('$ref', None):
props.append(':ref:`{}`'.format(value['$ref'].split('/')[-1]))
elif '$ref' in name:
props.append(':ref:`{}`'.format(value.split('/')[-1]))
elif var_type == 'autocomplete':
props.append('Example values: ' + ', '.join(value))
else:
props.append('{}={}'.format(name, value))
if len(props):
props_str = '(' + ', '.join(props) + ')'
else:
props_str = ''
return dict(props_str=props_str, title=option_value.get('title', ''))
|
def get_json_props_for_response(self, var_type, option_value)
|
Prepare JSON section with detailed information about response
:param var_type: --contains variable type
:type var_type: , unicode
:param option_value: --dictionary that contains information about property
:type option_value: dict
:return: dictionary that contains, title and all properties of field
:rtype: dict
| 3.898261 | 2.380895 | 1.637309 |
'''
Depends on type of variable, return string with example
:param opt_name: --option name
:type opt_name: str,unicode
:param var_type: --type of variable
:type var_type: str, unicode
:param opt_values: --dictionary with properties of this variable
:type opt_values: dict
:return: example for `var_type` variable
:rtype: str, unicode
'''
if opt_name == 'previous' and var_type == 'uri':
result = None
elif var_type == 'uri':
params = {i.group(0): 1 for i in self.find_param.finditer(self.current_path)}
result = self.type_dict[var_type].format(self.current_path.format(**params))
if opt_name == 'next':
result += '?limit=1&offset=1'
elif opt_name == 'count' and var_type == 'integer':
result = 2
elif var_type == 'array':
items = opt_values.get('items', dict()).get('$ref', None)
item = 'array_example'
if items:
item = self.get_object_example(items.split('/')[-1])
result = [item]
elif var_type == 'autocomplete':
result = opt_values.get('enum', list())[0]
elif var_type in [None, 'object']:
def_name = opt_values.get('$ref').split('/')[-1]
result = self.get_object_example(def_name)
elif var_type =='select2':
def_name = opt_values['additionalProperties']['model']['$ref'].split('/')[-1]
value_field_name = opt_values['additionalProperties']['value_field']
def_model = self.definitions[def_name].get('properties')
value_field = def_model.get(value_field_name, None)
var_type = value_field.get('format', None) or value_field.get('type', None)
result = self.get_response_example(opt_name, var_type, def_model)
else:
var_type = var_type.replace('-', '_')
result = opt_values.get('default', None) or self.type_dict[var_type]
return result
|
def get_response_example(self, opt_name, var_type, opt_values)
|
Depends on type of variable, return string with example
:param opt_name: --option name
:type opt_name: str,unicode
:param var_type: --type of variable
:type var_type: str, unicode
:param opt_values: --dictionary with properties of this variable
:type opt_values: dict
:return: example for `var_type` variable
:rtype: str, unicode
| 3.131265 | 2.540731 | 1.232427 |
'''
Create example for response, from object structure
:param def_name: --deffinition name of structure
:type def_name: str, unicode
:return: example of object
:rtype: dict
'''
def_model = self.definitions[def_name]
example = dict()
for opt_name, opt_value in def_model.get('properties', dict()).items():
var_type = opt_value.get('format', None) or opt_value.get('type', None)
example[opt_name] = self.get_response_example(opt_name, var_type, opt_value)
if var_type == 'string':
example[opt_name] = example[opt_name].format(opt_name)
return example
|
def get_object_example(self, def_name)
|
Create example for response, from object structure
:param def_name: --deffinition name of structure
:type def_name: str, unicode
:return: example of object
:rtype: dict
| 3.601798 | 2.25289 | 1.598746 |
'''
Prepare and write information about definition
:param definition: --name of definition that would be prepared for render
:type definition: str, unicode
:param spec_path: --path to definitions
:type spec_path: str, unicode
:return:
'''
spec_path = spec_path or self.models_path
definitions = self.spec[spec_path]
definition_property = definitions[definition]['properties'].copy()
if not definition_property:
self.write('{}', self.indent_depth)
return
self.indent_depth += 1
definition_property = self.find_nested_models(definition_property, definitions)
json_str = json.dumps(definition_property, indent=4)
for line in json_str.split('\n'):
self.write(line, self.indent_depth)
self.indent_depth -= 1
|
def definition_rst(self, definition, spec_path=None)
|
Prepare and write information about definition
:param definition: --name of definition that would be prepared for render
:type definition: str, unicode
:param spec_path: --path to definitions
:type spec_path: str, unicode
:return:
| 4.188241 | 2.663834 | 1.57226 |
'''
Prepare dictionary with reference to another definitions, create one dictionary
that contains full information about model, with all nested reference
:param model: --dictionary that contains information about model
:type model: dict
:param definitions: --dictionary that contains copy of all definitions
:type definitions: dict
:return: dictionary with all nested reference
:rtype: dict
'''
for key, value in model.items():
if isinstance(value, dict):
model[key] = self.find_nested_models(value, definitions)
elif key == '$ref':
def_name = value.split('/')[-1]
def_property = definitions[def_name]['properties']
return self.find_nested_models(def_property, definitions)
return model
|
def find_nested_models(self, model, definitions)
|
Prepare dictionary with reference to another definitions, create one dictionary
that contains full information about model, with all nested reference
:param model: --dictionary that contains information about model
:type model: dict
:param definitions: --dictionary that contains copy of all definitions
:type definitions: dict
:return: dictionary with all nested reference
:rtype: dict
| 4.145543 | 1.682195 | 2.464365 |
'''
Prepare and add for further render parameters.
:param params: --dictionary with parameters
:type params: dict
:param name_request: --type of the parameters
:type name_request: str, unicode
:return:
'''
self.write('')
for elem in params:
request_type = elem['type'] if elem.get('type', None) else 'schema'
name = elem['name']
if elem.get('required', None):
name += '(required)'
schema = elem.get('schema', None)
name = ':{} {} {}:'.format(name_request, request_type, name)
if schema:
definition = schema['$ref'].split('/')[-1]
self.write(name + ' :ref:`{}`'.format(definition), 1)
self.write('')
else:
desc = elem.get('description', '')
self.write(name)
self.write('{}'.format(desc), self.indent_depth + 1)
self.write('')
|
def get_params(self, params, name_request)
|
Prepare and add for further render parameters.
:param params: --dictionary with parameters
:type params: dict
:param name_request: --type of the parameters
:type name_request: str, unicode
:return:
| 4.439259 | 3.072016 | 1.445064 |
translation.activate(trans)
config = loader.get_template(name)
result = config.render(data).replace('\r', '')
translation.deactivate()
return result
|
def get_render(name, data, trans='en')
|
Render string based on template
:param name: -- full template name
:type name: str,unicode
:param data: -- dict of rendered vars
:type data: dict
:param trans: -- translation for render. Default 'en'.
:type trans: str,unicode
:return: -- rendered string
:rtype: str,unicode
| 3.692657 | 6.471908 | 0.570567 |
result = self.fd.write(wr_string)
self.fd.flush()
return result
|
def write(self, wr_string)
|
Write to file and flush
:param wr_string: -- writable string
:type wr_string: str
:return: None
:rtype: None
| 4.029923 | 4.2512 | 0.94795 |
# pylint: disable=access-member-before-definition
if hasattr(cls, '__django_settings__'):
return getattr(cls.__django_settings__, name, default)
from django.conf import settings
cls.__django_settings__ = settings
return cls.get_django_settings(name)
|
def get_django_settings(cls, name, default=None)
|
Get params from Django settings.
:param name: name of param
:type name: str,unicode
:param default: default value of param
:type default: object
:return: Param from Django settings or default.
| 3.188616 | 3.560957 | 0.895438 |
if self.working_handler is not None:
t = Thread(target=self._handle_process, args=(proc, stream))
t.start()
out = getattr(proc, stream)
try:
for line in iter(out.readline, ""):
yield line.rstrip()
finally:
out.close()
|
def _unbuffered(self, proc, stream='stdout')
|
Unbuffered output handler.
:type proc: subprocess.Popen
:type stream: six.text_types
:return:
| 3.433229 | 4.092425 | 0.838923 |
self.output = ""
env = os.environ.copy()
env.update(self.env)
if six.PY2: # nocv
# Ugly hack because python 2.7.
if self._stdout == self.DEVNULL:
self._stdout = open(os.devnull, 'w+b')
if self._stderr == self.DEVNULL:
self._stderr = open(os.devnull, 'w+b')
proc = subprocess.Popen(
cmd, stdout=self._stdout, stderr=self._stderr,
bufsize=0, universal_newlines=True,
cwd=cwd, env=env,
close_fds=ON_POSIX
)
for line in self._unbuffered(proc):
self.line_handler(line)
return_code = proc.poll()
if return_code:
logger.error(self.output)
raise subprocess.CalledProcessError(
return_code, cmd, output=str(self.output)
)
return self.output
|
def execute(self, cmd, cwd)
|
Execute commands and output this
:param cmd: -- list of cmd command and arguments
:type cmd: list
:param cwd: -- workdir for executions
:type cwd: str,unicode
:return: -- string with full output
:rtype: str
| 2.781478 | 2.774767 | 1.002419 |
try:
backend = self.get_backend_handler_path(name)
if backend is None:
raise ex.VSTUtilsException("Backend is 'None'.") # pragma: no cover
return self._get_baskend(backend)
except KeyError or ImportError:
msg = "{} ({})".format(name, self.err_message) if self.err_message else name
raise ex.UnknownTypeException(msg)
|
def backend(self, name)
|
Get backend class
:param name: -- name of backend type
:type name: str
:return: class of backend
:rtype: class,module,object
| 8.192863 | 8.301223 | 0.986946 |
return self[name](obj, **self.opts(name))
|
def get_object(self, name, obj)
|
:param name: -- string name of backend
:param name: str
:param obj: -- model object
:type obj: django.db.models.Model
:return: backend object
:rtype: object
| 18.511028 | 24.338507 | 0.760565 |
regexp = name
options = self.opts(regexp)
options.update(kwargs)
args = options.pop('view_args', argv)
csrf_enable = self.get_backend_data(regexp).get('CSRF_ENABLE', True)
if regexp in self.settings_urls:
regexp = r'^{}'.format(self.get_django_settings(regexp)[1:])
view = self[name].as_view()
if not csrf_enable:
view = csrf_exempt(view)
return url(regexp, view, *args, **options)
|
def get_object(self, name, *argv, **kwargs)
|
Get url object tuple for url
:param name: url regexp from
:type name: str
:param argv: overrided args
:param kwargs: overrided kwargs
:return: url object
:rtype: django.conf.urls.url
| 6.802655 | 6.633102 | 1.025562 |
# pylint: disable=unused-argument
'''
Copy instance with deps.
'''
instance = self.copy_instance(self.get_object())
serializer = self.get_serializer(instance, data=request.data, partial=True)
serializer.is_valid()
serializer.save()
return Response(serializer.data, status.HTTP_201_CREATED).resp
|
def copy(self, request, **kwargs)
|
Copy instance with deps.
| 3.468217 | 2.74442 | 1.263734 |
'''
Django overrloaded method for add cyfunction.
'''
def create_method(name, method):
def manager_method(self, *args, **kwargs):
return getattr(self.get_queryset(), name)(*args, **kwargs)
manager_method.__name__ = method.__name__
manager_method.__doc__ = method.__doc__
return manager_method
orig_method = models.Manager._get_queryset_methods
new_methods = orig_method(queryset_class)
inspect_func = inspect.isfunction
for name, method in inspect.getmembers(queryset_class, predicate=inspect_func):
# Only copy missing methods.
if hasattr(cls, name) or name in new_methods:
continue
queryset_only = getattr(method, 'queryset_only', None)
if queryset_only or (queryset_only is None and name.startswith('_')):
continue
# Copy the method onto the manager.
new_methods[name] = create_method(name, method)
return new_methods
|
def _get_queryset_methods(cls, queryset_class)
|
Django overrloaded method for add cyfunction.
| 3.865033 | 2.924276 | 1.321706 |
'''
Active Directory auth function
:param ad: LDAP connection string ('ldap://server')
:param username: username with domain ('[email protected]')
:param password: auth password
:return: ldap connection or None if error
'''
result = None
conn = ldap.initialize(ad)
conn.protocol_version = 3
conn.set_option(ldap.OPT_REFERRALS, 0)
user = self.__prepare_user_with_domain(username)
self.logger.debug("Trying to auth with user '{}' to {}".format(user, ad))
try:
conn.simple_bind_s(user, password)
result = conn
self.username, self.password = username, password
self.logger.debug("Successfull login as {}".format(username))
except ldap.INVALID_CREDENTIALS:
result = False
self.logger.debug(traceback.format_exc())
self.logger.debug("Invalid ldap-creds.")
except Exception as ex: # nocv
self.logger.debug(traceback.format_exc())
self.logger.debug("Unknown error: {}".format(str(ex)))
return result
|
def __authenticate(self, ad, username, password)
|
Active Directory auth function
:param ad: LDAP connection string ('ldap://server')
:param username: username with domain ('[email protected]')
:param password: auth password
:return: ldap connection or None if error
| 3.124301 | 2.406589 | 1.298228 |
'''
Indicates that object auth worked
:return: True or False
'''
if isinstance(self.__conn, ldap.ldapobject.LDAPObject) or self.__conn:
return True
return False
|
def isAuth(self)
|
Indicates that object auth worked
:return: True or False
| 13.411896 | 4.762015 | 2.816433 |
# pylint: disable=unused-argument
'''
Prepare ENV for web-application
:param default_settings: minimal needed settings for run app
:type default_settings: dict
:param kwargs: other overrided settings
:rtype: None
'''
for key, value in default_settings.items():
os.environ.setdefault(key, value)
os.environ.update(kwargs)
if six.PY2: # nocv
warnings.warn(
'Python 2.7 is deprecated and will dropped in 2.0, use Python >3.5',
DeprecationWarning
)
|
def prepare_environment(default_settings=_default_settings, **kwargs)
|
Prepare ENV for web-application
:param default_settings: minimal needed settings for run app
:type default_settings: dict
:param kwargs: other overrided settings
:rtype: None
| 4.570334 | 2.962051 | 1.542963 |
# pylint: disable=unused-variable
'''
Main function to executes from cmd. Emulates django-admin.py execution.
:param kwargs: overrided env-settings
:rtype: None
'''
from django.core.management import execute_from_command_line
prepare_environment(**kwargs)
args = list(sys.argv)
args[0] = os.getenv("VST_CTL_SCRIPT", sys.argv[0])
execute_from_command_line(args or sys.argv)
|
def cmd_execution(*args, **kwargs)
|
Main function to executes from cmd. Emulates django-admin.py execution.
:param kwargs: overrided env-settings
:rtype: None
| 6.197649 | 2.81277 | 2.203397 |
def get_celery_app(name=None, **kwargs): # nocv
# pylint: disable=import-error
'''
Function to return celery-app. Works only if celery installed.
:param name: Application name
:param kwargs: overrided env-settings
:return: Celery-app object
'''
from celery import Celery
prepare_environment(**kwargs)
name = name or os.getenv("VST_PROJECT")
celery_app = Celery(name)
celery_app.config_from_object('django.conf:settings', namespace='CELERY')
celery_app.autodiscover_tasks()
return celery_app
|
Function to return celery-app. Works only if celery installed.
:param name: Application name
:param kwargs: overrided env-settings
:return: Celery-app object
| null | null | null |
|
'''
Register a model with the registry.
Arguments:
*model* can be either a model class or a string that contains the model's
app label and class name seperated by a dot, e.g. ``"app.ModelClass"``.
*autofixture* is the :mod:`AutoFixture` subclass that shall be used to
generated instances of *model*.
By default :func:`register` will raise :exc:`ValueError` if the given
*model* is already registered. You can overwrite the registered *model* if
you pass ``True`` to the *overwrite* argument.
The :exc:`ValueError` that is usually raised if a model is already
registered can be suppressed by passing ``True`` to the *fail_silently*
argument.
'''
from .compat import get_model
if isinstance(model, string_types):
model = get_model(*model.split('.', 1))
if not overwrite and model in REGISTRY:
if fail_silently:
return
raise ValueError(
u'%s.%s is already registered. You can overwrite the registered '
u'autofixture by providing the `overwrite` argument.' % (
model._meta.app_label,
model._meta.object_name,
))
REGISTRY[model] = autofixture
|
def register(model, autofixture, overwrite=False, fail_silently=False)
|
Register a model with the registry.
Arguments:
*model* can be either a model class or a string that contains the model's
app label and class name seperated by a dot, e.g. ``"app.ModelClass"``.
*autofixture* is the :mod:`AutoFixture` subclass that shall be used to
generated instances of *model*.
By default :func:`register` will raise :exc:`ValueError` if the given
*model* is already registered. You can overwrite the registered *model* if
you pass ``True`` to the *overwrite* argument.
The :exc:`ValueError` that is usually raised if a model is already
registered can be suppressed by passing ``True`` to the *fail_silently*
argument.
| 3.392699 | 1.585652 | 2.139624 |
'''
Remove one or more models from the autofixture registry.
'''
from django.db import models
from .compat import get_model
if issubclass(model_or_iterable, models.Model):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, string_types):
model = get_model(*model.split('.', 1))
try:
del REGISTRY[model]
except KeyError:
if fail_silently:
continue
raise ValueError(
u'The model %s.%s is not registered.' % (
model._meta.app_label,
model._meta.object_name,
))
|
def unregister(model_or_iterable, fail_silently=False)
|
Remove one or more models from the autofixture registry.
| 2.449415 | 2.157935 | 1.135073 |
'''
Get an autofixture instance for the passed in *model* sing the either an
appropiate autofixture that was :ref:`registry <registry>` or fall back
to the default:class:`AutoFixture` class. *model* can be a model class or
its string representation (e.g. ``"app.ModelClass"``).
All positional and keyword arguments are passed to the autofixture
constructor.
'''
from .compat import get_model
if isinstance(model, string_types):
model = get_model(*model.split('.', 1))
if model in REGISTRY:
return REGISTRY[model](model, *args, **kwargs)
else:
return AutoFixture(model, *args, **kwargs)
|
def get(model, *args, **kwargs)
|
Get an autofixture instance for the passed in *model* sing the either an
appropiate autofixture that was :ref:`registry <registry>` or fall back
to the default:class:`AutoFixture` class. *model* can be a model class or
its string representation (e.g. ``"app.ModelClass"``).
All positional and keyword arguments are passed to the autofixture
constructor.
| 5.923831 | 1.699402 | 3.485833 |
'''
Create *count* instances of *model* using the either an appropiate
autofixture that was :ref:`registry <registry>` or fall back to the
default:class:`AutoFixture` class. *model* can be a model class or its
string representation (e.g. ``"app.ModelClass"``).
All positional and keyword arguments are passed to the autofixture
constructor. It is demonstrated in the example below which will create ten
superusers::
import autofixture
admins = autofixture.create('auth.User', 10, field_values={'is_superuser': True})
.. note:: See :ref:`AutoFixture` for more information.
:func:`create` will return a list of the created objects.
'''
from .compat import get_model
if isinstance(model, string_types):
model = get_model(*model.split('.', 1))
if model in REGISTRY:
autofixture_class = REGISTRY[model]
else:
autofixture_class = AutoFixture
# Get keyword arguments that the create_one method accepts and pass them
# into create_one instead of AutoFixture.__init__
argnames = set(getargnames(autofixture_class.create_one))
argnames -= set(['self'])
create_kwargs = {}
for argname in argnames:
if argname in kwargs:
create_kwargs[argname] = kwargs.pop(argname)
autofixture = autofixture_class(model, *args, **kwargs)
return autofixture.create(count, **create_kwargs)
|
def create(model, count, *args, **kwargs)
|
Create *count* instances of *model* using the either an appropiate
autofixture that was :ref:`registry <registry>` or fall back to the
default:class:`AutoFixture` class. *model* can be a model class or its
string representation (e.g. ``"app.ModelClass"``).
All positional and keyword arguments are passed to the autofixture
constructor. It is demonstrated in the example below which will create ten
superusers::
import autofixture
admins = autofixture.create('auth.User', 10, field_values={'is_superuser': True})
.. note:: See :ref:`AutoFixture` for more information.
:func:`create` will return a list of the created objects.
| 4.176296 | 1.812736 | 2.303863 |
'''
Auto-discover INSTALLED_APPS autofixtures.py and tests.py modules and fail
silently when not present. This forces an import on them to register any
autofixture bits they may want.
'''
from .compat import importlib
# Bail out if autodiscover didn't finish loading from a previous call so
# that we avoid running autodiscover again when the URLconf is loaded by
# the exception handler to resolve the handler500 view. This prevents an
# autofixtures.py module with errors from re-registering models and raising a
# spurious AlreadyRegistered exception (see #8245).
global LOADING
if LOADING:
return
LOADING = True
app_paths = {}
# For each app, we need to look for an autofixture.py inside that app's
# package. We can't use os.path here -- recall that modules may be
# imported different ways (think zip files) -- so we need to get
# the app's __path__ and look for autofixture.py on that path.
# Step 1: find out the app's __path__ Import errors here will (and
# should) bubble up, but a missing __path__ (which is legal, but weird)
# fails silently -- apps that do weird things with __path__ might
# need to roll their own autofixture registration.
import imp
try:
from django.apps import apps
for app_config in apps.get_app_configs():
app_paths[app_config.name] = [app_config.path]
except ImportError:
# Django < 1.7
from django.conf import settings
for app in settings.INSTALLED_APPS:
mod = importlib.import_module(app)
try:
app_paths[app] = mod.__path__
except AttributeError:
continue
for app, app_path in app_paths.items():
# Step 2: use imp.find_module to find the app's autofixtures.py. For some
# reason imp.find_module raises ImportError if the app can't be found
# but doesn't actually try to import the module. So skip this app if
# its autofixtures.py doesn't exist
try:
file, _, _ = imp.find_module('autofixtures', app_path)
except ImportError:
continue
else:
if file:
file.close()
# Step 3: import the app's autofixtures file. If this has errors we want them
# to bubble up.
try:
importlib.import_module("%s.autofixtures" % app)
except Exception as e:
warnings.warn(u'Error while importing %s.autofixtures: %r' %
(app, e))
for app, app_path in app_paths.items():
try:
file, _, _ = imp.find_module('tests', app_path)
except ImportError:
continue
else:
if file:
file.close()
try:
importlib.import_module("%s.tests" % app)
except Exception as e:
warnings.warn(u'Error while importing %s.tests: %r' %
(app, e))
# autodiscover was successful, reset loading flag.
LOADING = False
|
def autodiscover()
|
Auto-discover INSTALLED_APPS autofixtures.py and tests.py modules and fail
silently when not present. This forces an import on them to register any
autofixture bits they may want.
| 3.628424 | 3.304337 | 1.098079 |
size = (width, height)
text = text if text else '{0}x{1}'.format(width, height)
try:
font = ImageFont.truetype(font, size=fontsize, encoding=encoding)
except IOError:
font = ImageFont.load_default()
result_img = Image.new(mode, size, bg_color)
text_size = font.getsize(text)
text_img = Image.new("RGBA", size, bg_color)
#position for the text:
left = size[0] / 2 - text_size[0] / 2
top = size[1] / 2 - text_size[1] / 2
drawing = ImageDraw.Draw(text_img)
drawing.text((left, top),
text,
font=font,
fill=fg_color)
txt_img = ImageOps.fit(text_img, size, method=Image.BICUBIC, centering=(0.5, 0.5))
result_img.paste(txt_img)
file_obj = io.BytesIO()
txt_img.save(file_obj, fmt)
return file_obj.getvalue()
|
def get_placeholder_image(width, height, name=None, fg_color=get_color('black'),
bg_color=get_color('grey'), text=None, font=u'Verdana.ttf',
fontsize=42, encoding=u'unic', mode='RGBA', fmt=u'PNG')
|
Little spin-off from https://github.com/Visgean/python-placeholder
that not saves an image and instead returns it.
| 2.094577 | 2.084401 | 1.004882 |
'''
Checks if the field is the automatically created OneToOneField used by
django mulit-table inheritance
'''
return (
isinstance(field, related.OneToOneField) and
field.primary_key and
issubclass(field.model, get_remote_field_to(field))
)
|
def is_inheritance_parent(self, field)
|
Checks if the field is the automatically created OneToOneField used by
django mulit-table inheritance
| 9.935149 | 3.832776 | 2.592155 |
'''
Return a random value that can be assigned to the passed *field*
instance.
'''
if field not in self._field_generators:
self._field_generators[field] = self.get_generator(field)
generator = self._field_generators[field]
if generator is None:
return self.IGNORE_FIELD
value = generator()
return value
|
def get_value(self, field)
|
Return a random value that can be assigned to the passed *field*
instance.
| 4.568207 | 2.756958 | 1.656974 |
'''
Return fieldnames which need recalculation.
'''
recalc_fields = []
for constraint in self.constraints:
try:
constraint(self.model, instance)
except constraints.InvalidConstraint as e:
recalc_fields.extend(e.fields)
return recalc_fields
|
def check_constraints(self, instance)
|
Return fieldnames which need recalculation.
| 5.039383 | 2.799473 | 1.800119 |
'''
Create and return one model instance. If *commit* is ``False`` the
instance will not be saved and many to many relations will not be
processed.
Subclasses that override ``create_one`` can specify arbitrary keyword
arguments. They will be passed through by the
:meth:`autofixture.base.AutoFixture.create` method and the helper
functions :func:`autofixture.create` and
:func:`autofixture.create_one`.
May raise :exc:`CreateInstanceError` if constraints are not satisfied.
'''
tries = self.tries
instance = self.model()
process = instance._meta.fields
while process and tries > 0:
for field in process:
self.process_field(instance, field)
process = self.check_constraints(instance)
tries -= 1
if tries == 0:
raise CreateInstanceError(
u'Cannot solve constraints for "%s", tried %d times. '
u'Please check value generators or model constraints. '
u'At least the following fields are involved: %s' % (
'%s.%s' % (
self.model._meta.app_label,
self.model._meta.object_name),
self.tries,
', '.join([field.name for field in process]),
))
instance = self.pre_process_instance(instance)
if commit:
instance.save()
#to handle particular case of GenericRelation
#in Django pre 1.6 it appears in .many_to_many
many_to_many = [f for f in instance._meta.many_to_many
if not isinstance(f, get_GenericRelation())]
for field in many_to_many:
self.process_m2m(instance, field)
signals.instance_created.send(
sender=self,
model=self.model,
instance=instance,
committed=commit)
post_process_kwargs = {}
if 'commit' in getargnames(self.post_process_instance):
post_process_kwargs['commit'] = commit
else:
warnings.warn(
"Subclasses of AutoFixture need to provide a `commit` "
"argument for post_process_instance methods", DeprecationWarning)
return self.post_process_instance(instance, **post_process_kwargs)
|
def create_one(self, commit=True)
|
Create and return one model instance. If *commit* is ``False`` the
instance will not be saved and many to many relations will not be
processed.
Subclasses that override ``create_one`` can specify arbitrary keyword
arguments. They will be passed through by the
:meth:`autofixture.base.AutoFixture.create` method and the helper
functions :func:`autofixture.create` and
:func:`autofixture.create_one`.
May raise :exc:`CreateInstanceError` if constraints are not satisfied.
| 4.069438 | 2.82311 | 1.441474 |
'''
Create and return ``count`` model instances. If *commit* is ``False``
the instances will not be saved and many to many relations will not be
processed.
May raise ``CreateInstanceError`` if constraints are not satisfied.
The method internally calls :meth:`create_one` to generate instances.
'''
object_list = []
for i in range(count):
instance = self.create_one(commit=commit, **kwargs)
object_list.append(instance)
return object_list
|
def create(self, count=1, commit=True, **kwargs)
|
Create and return ``count`` model instances. If *commit* is ``False``
the instances will not be saved and many to many relations will not be
processed.
May raise ``CreateInstanceError`` if constraints are not satisfied.
The method internally calls :meth:`create_one` to generate instances.
| 5.066774 | 1.738806 | 2.913939 |
init_path = os.path.join(PROJECT_PATH, package, '__init__.py')
init_py = open(init_path).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
|
def get_release(package)
|
Return package version as listed in `__version__` in `init.py`.
| 2.353435 | 1.949834 | 1.206992 |
'''
Rebuild documentation and opens it in your browser.
Use the first argument to specify how it should be opened:
`d` or `default`: Open in new tab or new window, using the default
method of your browser.
`t` or `tab`: Open documentation in new tab.
`n`, `w` or `window`: Open documentation in new window.
'''
import webbrowser
docs_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'docs')
index = os.path.join(docs_dir, '_build/html/%s.html' % where)
builddocs('html')
url = 'file://%s' % os.path.abspath(index)
if how in ('d', 'default'):
webbrowser.open(url)
elif how in ('t', 'tab'):
webbrowser.open_new_tab(url)
elif how in ('n', 'w', 'window'):
webbrowser.open_new(url)
|
def opendocs(where='index', how='default')
|
Rebuild documentation and opens it in your browser.
Use the first argument to specify how it should be opened:
`d` or `default`: Open in new tab or new window, using the default
method of your browser.
`t` or `tab`: Open documentation in new tab.
`n`, `w` or `window`: Open documentation in new window.
| 3.23306 | 1.765807 | 1.830924 |
items = [Item('Name1', 'Description1'),
Item('Name2', 'Description2'),
Item('Name3', 'Description3')]
table = ItemTable(items)
# or {{ table }} in jinja
print(table.__html__())
|
def main()
|
Outputs:
<table>
<thead>
<tr>
<th class="my-name-class">Name</th>
<th class="my-description-class"
data-something="my-data"
data-something-else="my-description-th-class">
Description
</th>
</tr>
</thead>
<tbody>
<tr>
<td class="my-name-class">Name1</td>
<td class="my-description-class"
data-something="my-td-only-data">
Description1
</td>
</tr>
<tr>
<td class="my-name-class">Name2</td>
<td class="my-description-class"
data-something="my-td-only-data">
Description2
</td>
</tr>
<tr>
<td class="my-name-class">Name3</td>
<td class="my-description-class"
data-something="my-td-only-data">
Description3
</td>
</tr>
</tbody>
</table>
Except it doesn't bother to prettify the output.
| 7.335194 | 6.764545 | 1.084359 |
items = [Item('Name1', 'Description1', [SubItem('r1sr1c1', 'r1sr1c2'),
SubItem('r1sr2c1', 'r1sr2c2')]),
Item('Name2', 'Description2', [SubItem('r2sr1c1', 'r2sr1c2'),
SubItem('r2sr2c1', 'r2sr2c2')]),
]
table = ItemTable(items)
# or {{ table }} in jinja
print(table.__html__())
|
def main()
|
Outputs:
<table>
<thead>
<tr><th>Name</th><th>Description</th><th>Subtable</th></tr>
</thead>
<tbody>
<tr><td>Name1</td><td>Description1</td><td><table>
<thead>
<tr><th>Sub-column 1</th><th>Sub-column 2</th></tr>
</thead>
<tbody>
<tr><td>r1sr1c1</td><td>r1sr1c2</td></tr>
<tr><td>r1sr2c1</td><td>r1sr2c2</td></tr>
</tbody>
</table></td></tr>
<tr><td>Name2</td><td>Description2</td><td><table>
<thead>
<tr><th>Sub-column 1</th><th>Sub-column 2</th></tr>
</thead>
<tbody>
<tr><td>r2sr1c1</td><td>r2sr1c2</td></tr>
<tr><td>r2sr2c1</td><td>r2sr2c2</td></tr>
</tbody>
</table></td></tr>
</tbody>
</table>
Except it doesn't bother to prettify the output.
| 3.885139 | 2.636075 | 1.473835 |
TableCls = create_table()\
.add_column('name', Col('Name'))\
.add_column('description', Col('Description'))
items = [dict(name='Name1', description='Description1'),
dict(name='Name2', description='Description2'),
dict(name='Name3', description='Description3')]
table = TableCls(items)
print(table.__html__())
|
def main()
|
Outputs:
<table>
<thead>
<tr>
<th>Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>Name1</td>
<td>Description1</td>
</tr>
<tr>
<td>Name2</td>
<td>Description2</td>
</tr>
<tr>
<td>Name3</td>
<td>Description3</td>
</tr>
</tbody>
</table>
| 4.671209 | 3.854656 | 1.211835 |
try:
base = tuple(base)
except TypeError:
# Then assume that what we have is a single class, so make it
# into a 1-tuple.
base = (base,)
return TableMeta(name, base, options or {})
|
def create_table(name=str('_Table'), base=Table, options=None)
|
Creates and returns a new table class. You can specify a name for
you class if you wish. You can also set the base class (or
classes) that should be used when creating the class.
| 6.507131 | 6.161989 | 1.056011 |
return self.td_format(self.from_attr_list(item, attr_list))
|
def td_contents(self, item, attr_list)
|
Given an item and an attr, return the contents of the td.
This method is a likely candidate to override when extending
the Col class, which is done in LinkCol and
ButtonCol. Override this method if you need to get some extra
data from the item.
Note that the output of this function is NOT escaped.
| 6.663419 | 8.207305 | 0.811889 |
if "t" in mode:
if "b" in mode:
raise ValueError("Invalid mode: %r" % (mode,))
else:
if encoding is not None:
raise ValueError("Argument 'encoding' not supported in binary mode")
if errors is not None:
raise ValueError("Argument 'errors' not supported in binary mode")
if newline is not None:
raise ValueError("Argument 'newline' not supported in binary mode")
lz_mode = mode.replace("t", "")
binary_file = LZMAFile(filename, lz_mode, format=format, check=check,
preset=preset, filters=filters)
if "t" in mode:
return io.TextIOWrapper(binary_file, encoding, errors, newline)
else:
return binary_file
|
def open(filename, mode="rb",
format=None, check=-1, preset=None, filters=None,
encoding=None, errors=None, newline=None)
|
Open an LZMA-compressed file in binary or text mode.
filename can be either an actual file name (given as a str or bytes object),
in which case the named file is opened, or it can be an existing file object
to read from or write to.
The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for
binary mode, or "rt", "wt" or "at" for text mode.
The format, check, preset and filters arguments specify the compression
settings, as for LZMACompressor, LZMADecompressor and LZMAFile.
For binary mode, this function is equivalent to the LZMAFile constructor:
LZMAFile(filename, mode, ...). In this case, the encoding, errors and
newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
io.TextIOWrapper instance with the specified encoding, error handling
behavior, and line ending(s).
| 2.11246 | 2.137938 | 0.988083 |
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
|
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None)
|
Compress a block of data.
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
For incremental compression, use an LZMACompressor object instead.
| 3.280609 | 3.918288 | 0.837256 |
results = []
while True:
decomp = LZMADecompressor(format, memlimit, filters)
try:
res = decomp.decompress(data)
except LZMAError:
if results:
break # Leftover data is not a valid LZMA/XZ stream; ignore it.
else:
raise # Error on the first iteration; bail out.
results.append(res)
if not decomp.eof:
raise LZMAError("Compressed data ended before the "
"end-of-stream marker was reached")
data = decomp.unused_data
if not data:
break
return b"".join(results)
|
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None)
|
Decompress a block of data.
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
For incremental decompression, use a LZMADecompressor object instead.
| 3.203604 | 3.213548 | 0.996906 |
if self._mode == _MODE_CLOSED:
return
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
self._buffer = None
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
finally:
try:
if self._closefp:
self._fp.close()
finally:
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
|
def close(self)
|
Flush and close the file.
May be called more than once without error. Once the file is
closed, any other operation on it will raise a ValueError.
| 2.930287 | 2.843734 | 1.030436 |
self._check_can_read()
if self._mode == _MODE_READ_EOF or not self._fill_buffer():
return b""
return self._buffer
|
def peek(self, size=-1)
|
Return buffered data without advancing the file position.
Always returns at least one byte of data, unless at EOF.
The exact number of bytes returned is unspecified.
| 8.774299 | 8.027993 | 1.092963 |
self._check_can_read()
if size is None:
#This is not needed on Python 3 where the comparison to zeo
#will fail with a TypeError.
raise TypeError("Read size should be an integer, not None")
if self._mode == _MODE_READ_EOF or size == 0:
return b""
elif size < 0:
return self._read_all()
else:
return self._read_block(size)
|
def read(self, size=-1)
|
Read up to size uncompressed bytes from the file.
If size is negative or omitted, read until EOF is reached.
Returns b"" if the file is already at EOF.
| 6.439005 | 6.256279 | 1.029207 |
# Usually, read1() calls _fp.read() at most once. However, sometimes
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
if size is None:
#This is not needed on Python 3 where the comparison to zero
#will fail with a TypeError.
raise TypeError("Read size should be an integer, not None")
if (size == 0 or self._mode == _MODE_READ_EOF or
not self._fill_buffer()):
return b""
if 0 < size < len(self._buffer):
data = self._buffer[:size]
self._buffer = self._buffer[size:]
else:
data = self._buffer
self._buffer = None
self._pos += len(data)
return data
|
def read1(self, size=-1)
|
Read up to size uncompressed bytes, while trying to avoid
making multiple reads from the underlying stream.
Returns b"" if the file is at EOF.
| 5.91983 | 5.589278 | 1.05914 |
self._check_can_write()
compressed = self._compressor.compress(data)
self._fp.write(compressed)
self._pos += len(data)
return len(data)
|
def write(self, data)
|
Write a bytes object to the file.
Returns the number of uncompressed bytes written, which is
always len(data). Note that due to buffering, the file on disk
may not reflect the data written until close() is called.
| 3.851685 | 3.584188 | 1.074633 |
self._check_can_seek()
# Recalculate offset as an absolute file position.
if whence == 0:
pass
elif whence == 1:
offset = self._pos + offset
elif whence == 2:
# Seeking relative to EOF - we need to know the file's size.
if self._size < 0:
self._read_all(return_data=False)
offset = self._size + offset
else:
raise ValueError("Invalid value for whence: {}".format(whence))
# Make it so that offset is the number of bytes to skip forward.
if offset is None:
#This is not needed on Python 3 where the comparison to self._pos
#will fail with a TypeError.
raise TypeError("Seek offset should be an integer, not None")
if offset < self._pos:
self._rewind()
else:
offset -= self._pos
# Read and discard data until we reach the desired position.
if self._mode != _MODE_READ_EOF:
self._read_block(offset, return_data=False)
return self._pos
|
def seek(self, offset, whence=0)
|
Change the file position.
The new position is specified by offset, relative to the
position indicated by whence. Possible values for whence are:
0: start of stream (default): offset must not be negative
1: current stream position
2: end of stream; offset must not be positive
Returns the new file position.
Note that seeking is emulated, sp depending on the parameters,
this operation may be extremely slow.
| 4.492284 | 4.803172 | 0.935275 |
with db.session.begin_nested():
obj = cls(user_id=user_id, github_id=github_id, name=name,
**kwargs)
db.session.add(obj)
return obj
|
def create(cls, user_id, github_id=None, name=None, **kwargs)
|
Create the repository.
| 2.107477 | 2.026617 | 1.039899 |
repo = cls.query.filter((Repository.github_id == github_id) |
(Repository.name == name)).one()
if (check_owner and repo and repo.user_id and
repo.user_id != int(user_id)):
raise RepositoryAccessError(
u'User {user} cannot access repository {repo}({repo_id}).'
.format(user=user_id, repo=name, repo_id=github_id)
)
return repo
|
def get(cls, user_id, github_id=None, name=None, check_owner=True)
|
Return a repository.
:param integer user_id: User identifier.
:param integer github_id: GitHub repository identifier.
:param str name: GitHub repository full name.
:returns: The repository object.
:raises: :py:exc:`~sqlalchemy.orm.exc.NoResultFound`: if the repository
doesn't exist.
:raises: :py:exc:`~sqlalchemy.orm.exc.MultipleResultsFound`: if
multiple repositories with the specified GitHub id and/or name
exist.
:raises: :py:exc:`RepositoryAccessError`: if the user is not the owner
of the repository.
| 2.928919 | 2.641462 | 1.108825 |
try:
repo = cls.get(user_id, github_id=github_id, name=name)
except NoResultFound:
repo = cls.create(user_id=user_id, github_id=github_id, name=name)
repo.hook = hook
repo.user_id = user_id
return repo
|
def enable(cls, user_id, github_id, name, hook)
|
Enable webhooks for a repository.
If the repository does not exist it will create one.
:param user_id: User identifier.
:param repo_id: GitHub repository identifier.
:param name: Fully qualified name of the repository.
:param hook: GitHub hook identifier.
| 2.153744 | 2.394969 | 0.899278 |
repo = cls.get(user_id, github_id=github_id, name=name)
repo.hook = None
repo.user_id = None
return repo
|
def disable(cls, user_id, github_id, name)
|
Disable webhooks for a repository.
Disables the webhook from a repository if it exists in the DB.
:param user_id: User identifier.
:param repo_id: GitHub id of the repository.
:param name: Fully qualified name of the repository.
| 4.057872 | 4.928616 | 0.823329 |
# Check if the release has already been received
release_id = event.payload['release']['id']
existing_release = Release.query.filter_by(
release_id=release_id,
).first()
if existing_release:
raise ReleaseAlreadyReceivedError(
u'{release} has already been received.'
.format(release=existing_release)
)
# Create the Release
repo_id = event.payload['repository']['id']
repo = Repository.get(user_id=event.user_id, github_id=repo_id)
if repo.enabled:
with db.session.begin_nested():
release = cls(
release_id=release_id,
tag=event.payload['release']['tag_name'],
repository=repo,
event=event,
status=ReleaseStatus.RECEIVED,
)
db.session.add(release)
return release
else:
current_app.logger.warning(
u'Release creation attempt on disabled {repo}.'
.format(repo=repo)
)
raise RepositoryDisabledError(
u'{repo} is not enabled for webhooks.'.format(repo=repo)
)
|
def create(cls, event)
|
Create a new Release model.
| 2.622209 | 2.538611 | 1.032931 |
if self.recordmetadata:
return Record(self.recordmetadata.json, model=self.recordmetadata)
else:
return None
|
def record(self)
|
Get Record object.
| 9.057518 | 7.647877 | 1.184318 |
return url_for('invenio_formatter_badges.badge',
title=pid.pid_type, value=pid.pid_value, ext=ext)
|
def get_badge_image_url(pid, ext='svg')
|
Return the badge for a DOI.
| 6.993835 | 6.259275 | 1.117355 |
pid = get_pid_of_latest_release_or_404(name=repo_name)
return redirect(get_badge_image_url(pid))
|
def index_old(user_id, repo_name)
|
Generate a badge for a specific GitHub repository.
| 8.813543 | 7.905951 | 1.114799 |
# Note at this point the remote account and all associated data have
# already been deleted. The celery task is passed the access_token to make
# some last cleanup and afterwards delete itself remotely.
import github3
from .api import GitHubAPI
try:
gh = github3.login(token=access_token)
for repo_id, repo_hook in repo_hooks:
ghrepo = gh.repository_with_id(repo_id)
if ghrepo:
hook = ghrepo.hook(repo_hook)
if hook and hook.delete():
info_msg = u'Deleted hook {hook} from {repo}'.format(
hook=hook.id, repo=ghrepo.full_name)
current_app.logger.info(info_msg)
# If we finished our clean-up successfully, we can revoke the token
GitHubAPI.revoke_token(access_token)
except Exception as exc:
# Retry in case GitHub may be down...
disconnect_github.retry(exc=exc)
|
def disconnect_github(access_token, repo_hooks)
|
Uninstall webhooks.
| 5.454397 | 5.372025 | 1.015333 |
from .api import GitHubAPI
try:
# Sync hooks
gh = GitHubAPI(user_id=user_id)
for repo_id in repositories:
try:
with db.session.begin_nested():
gh.sync_repo_hook(repo_id)
# We commit per repository, because while the task is running
# the user might enable/disable a hook.
db.session.commit()
except RepositoryAccessError as e:
current_app.logger.warning(e.message, exc_info=True)
except NoResultFound:
pass # Repository not in DB yet
except Exception as exc:
sync_hooks.retry(exc=exc)
|
def sync_hooks(user_id, repositories)
|
Sync repository hooks for a user.
| 4.818015 | 4.795673 | 1.004659 |
from invenio_db import db
from invenio_rest.errors import RESTException
from .errors import InvalidSenderError
from .models import Release, ReleaseStatus
from .proxies import current_github
release_model = Release.query.filter(
Release.release_id == release_id,
Release.status.in_([ReleaseStatus.RECEIVED, ReleaseStatus.FAILED]),
).one()
release_model.status = ReleaseStatus.PROCESSING
db.session.commit()
release = current_github.release_api_class(release_model)
if verify_sender and not release.verify_sender():
raise InvalidSenderError(
u'Invalid sender for event {event} for user {user}'
.format(event=release.event.id, user=release.event.user_id)
)
def _get_err_obj(msg):
err = {'errors': msg}
if hasattr(g, 'sentry_event_id'):
err['error_id'] = str(g.sentry_event_id)
return err
try:
release.publish()
release.model.status = ReleaseStatus.PUBLISHED
except RESTException as rest_ex:
release.model.errors = json.loads(rest_ex.get_body())
release.model.status = ReleaseStatus.FAILED
current_app.logger.exception(
u'Error while processing {release}'.format(release=release.model))
# TODO: We may want to handle GitHub errors differently in the future
# except GitHubError as github_ex:
# release.model.errors = {'error': str(e)}
# release.model.status = ReleaseStatus.FAILED
# current_app.logger.exception(
# 'Error while processing {release}'
# .format(release=release.model))
except CustomGitHubMetadataError as e:
release.model.errors = _get_err_obj(str(e))
release.model.status = ReleaseStatus.FAILED
current_app.logger.exception(
u'Error while processing {release}'.format(release=release.model))
except Exception:
release.model.errors = _get_err_obj('Unknown error occured.')
release.model.status = ReleaseStatus.FAILED
current_app.logger.exception(
u'Error while processing {release}'.format(release=release.model))
finally:
db.session.commit()
|
def process_release(release_id, verify_sender=False)
|
Process a received Release.
| 2.552336 | 2.533901 | 1.007275 |
val = val.replace(tzinfo=pytz.utc) \
if isinstance(val, datetime) else parse(val)
now = datetime.utcnow().replace(tzinfo=pytz.utc)
return humanize.naturaltime(now - val)
|
def naturaltime(val)
|
Get humanized version of time.
| 2.98491 | 2.974413 | 1.003529 |
github = GitHubAPI(user_id=current_user.id)
token = github.session_token
ctx = dict(connected=False)
if token:
# The user is authenticated and the token we have is still valid.
if github.account.extra_data.get('login') is None:
github.init_account()
db.session.commit()
# Sync if needed
if request.method == 'POST' or github.check_sync():
# When we're in an XHR request, we want to synchronously sync hooks
github.sync(async_hooks=(not request.is_xhr))
db.session.commit()
# Generate the repositories view object
extra_data = github.account.extra_data
repos = extra_data['repos']
if repos:
# 'Enhance' our repos dict, from our database model
db_repos = Repository.query.filter(
Repository.github_id.in_([int(k) for k in repos.keys()]),
).all()
for repo in db_repos:
repos[str(repo.github_id)]['instance'] = repo
repos[str(repo.github_id)]['latest'] = GitHubRelease(
repo.latest_release())
last_sync = humanize.naturaltime(
(utcnow() - parse_timestamp(extra_data['last_sync'])))
ctx.update({
'connected': True,
'repos': sorted(repos.items(), key=lambda x: x[1]['full_name']),
'last_sync': last_sync,
})
return render_template(current_app.config['GITHUB_TEMPLATE_INDEX'], **ctx)
|
def index()
|
Display list of the user's repositories.
| 4.536906 | 4.54538 | 0.998136 |
user_id = current_user.id
github = GitHubAPI(user_id=user_id)
token = github.session_token
if token:
repos = github.account.extra_data.get('repos', [])
repo = next((repo for repo_id, repo in repos.items()
if repo.get('full_name') == name), {})
if not repo:
abort(403)
try:
# NOTE: Here we do not check for repository ownership, since it
# might have changed even though the user might have made releases
# in the past.
repo_instance = Repository.get(user_id=user_id,
github_id=repo['id'],
check_owner=False)
except RepositoryAccessError:
abort(403)
except NoResultFound:
repo_instance = Repository(name=repo['full_name'],
github_id=repo['id'])
releases = [
current_github.release_api_class(r) for r in (
repo_instance.releases.order_by(db.desc(Release.created)).all()
if repo_instance.id else []
)
]
return render_template(
current_app.config['GITHUB_TEMPLATE_VIEW'],
repo=repo_instance,
releases=releases,
serializer=current_github.record_serializer,
)
abort(403)
|
def repository(name)
|
Display selected repository.
| 4.140233 | 4.068871 | 1.017538 |
repo_id = request.json['id']
github = GitHubAPI(user_id=current_user.id)
repos = github.account.extra_data['repos']
if repo_id not in repos:
abort(404)
if request.method == 'DELETE':
try:
if github.remove_hook(repo_id, repos[repo_id]['full_name']):
db.session.commit()
return '', 204
else:
abort(400)
except Exception:
abort(403)
elif request.method == 'POST':
try:
if github.create_hook(repo_id, repos[repo_id]['full_name']):
db.session.commit()
return '', 201
else:
abort(400)
except Exception:
abort(403)
else:
abort(400)
|
def hook()
|
Install or remove GitHub webhook.
| 2.313935 | 2.188612 | 1.057261 |
github = GitHubAPI(user_id=current_user.id)
repos = github.account.extra_data['repos']
if repo_id not in repos:
abort(404)
if action == 'disable':
if github.remove_hook(repo_id, repos[repo_id]['full_name']):
db.session.commit()
return redirect(url_for('.index'))
else:
abort(400)
elif action == 'enable':
if github.create_hook(repo_id, repos[repo_id]['full_name']):
db.session.commit()
return redirect(url_for('.index'))
else:
abort(400)
else:
abort(400)
|
def hook_action(action, repo_id)
|
Display selected repository.
| 2.453536 | 2.463026 | 0.996147 |
cls = current_app.config['GITHUB_RELEASE_CLASS']
if isinstance(cls, string_types):
cls = import_string(cls)
assert issubclass(cls, GitHubRelease)
return cls
|
def release_api_class(self)
|
Github Release API class.
| 4.108575 | 3.336905 | 1.231253 |
imp = current_app.config['GITHUB_RECORD_SERIALIZER']
if isinstance(imp, string_types):
return import_string(imp)
return imp
|
def record_serializer(self)
|
Github Release API class.
| 5.919749 | 4.434867 | 1.33482 |
self.init_config(app)
app.extensions['invenio-github'] = self
@app.before_first_request
def connect_signals():
from invenio_oauthclient.models import RemoteAccount
from invenio_oauthclient.signals import account_setup_committed
from .api import GitHubAPI
from .handlers import account_post_init
account_setup_committed.connect(
account_post_init,
sender=GitHubAPI.remote._get_current_object()
)
@event.listens_for(RemoteAccount, 'before_delete')
def receive_before_delete(mapper, connection, target):
|
def init_app(self, app)
|
Flask application initialization.
| 3.58616 | 3.590937 | 0.99867 |
if self.user_id:
return RemoteToken.get(
self.user_id, self.remote.consumer_key
).access_token
return self.remote.get_request_token()[0]
|
def access_token(self)
|
Return OAuth access token.
| 6.378299 | 5.935706 | 1.074565 |
session_token = None
if self.user_id is not None:
session_token = token_getter(self.remote)
if session_token:
token = RemoteToken.get(
self.user_id, self.remote.consumer_key,
access_token=session_token[0]
)
return token
return None
|
def session_token(self)
|
Return OAuth session token.
| 5.058564 | 4.686734 | 1.079337 |
webhook_token = ProviderToken.query.filter_by(
id=self.account.extra_data['tokens']['webhook']
).first()
if webhook_token:
wh_url = current_app.config.get('GITHUB_WEBHOOK_RECEIVER_URL')
if wh_url:
return wh_url.format(token=webhook_token.access_token)
else:
raise RuntimeError('You must set GITHUB_WEBHOOK_RECEIVER_URL.')
|
def webhook_url(self)
|
Return the url to be used by a GitHub webhook.
| 3.857021 | 3.589659 | 1.074481 |
ghuser = self.api.me()
# Setup local access tokens to be used by the webhooks
hook_token = ProviderToken.create_personal(
'github-webhook',
self.user_id,
scopes=['webhooks:event'],
is_internal=True,
)
# Initial structure of extra data
self.account.extra_data = dict(
id=ghuser.id,
login=ghuser.login,
name=ghuser.name,
tokens=dict(
webhook=hook_token.id,
),
repos=dict(),
last_sync=iso_utcnow(),
)
db.session.add(self.account)
# Sync data from GitHub, but don't check repository hooks yet.
self.sync(hooks=False)
|
def init_account(self)
|
Setup a new GitHub account.
| 7.242117 | 6.823851 | 1.061295 |
active_repos = {}
github_repos = {repo.id: repo for repo in self.api.repositories()
if repo.permissions['admin']}
for gh_repo_id, gh_repo in github_repos.items():
active_repos[gh_repo_id] = {
'id': gh_repo_id,
'full_name': gh_repo.full_name,
'description': gh_repo.description,
}
if hooks:
self._sync_hooks(list(active_repos.keys()),
asynchronous=async_hooks)
# Update changed names for repositories stored in DB
db_repos = Repository.query.filter(
Repository.user_id == self.user_id,
Repository.github_id.in_(github_repos.keys())
)
for repo in db_repos:
gh_repo = github_repos.get(repo.github_id)
if gh_repo and repo.name != gh_repo.full_name:
repo.name = gh_repo.full_name
db.session.add(repo)
# Remove ownership from repositories that the user has no longer
# 'admin' permissions, or have been deleted.
Repository.query.filter(
Repository.user_id == self.user_id,
~Repository.github_id.in_(github_repos.keys())
).update(dict(user_id=None, hook=None), synchronize_session=False)
# Update repos and last sync
self.account.extra_data.update(dict(
repos=active_repos,
last_sync=iso_utcnow(),
))
self.account.extra_data.changed()
db.session.add(self.account)
|
def sync(self, hooks=True, async_hooks=True)
|
Synchronize user repositories.
:param bool hooks: True for syncing hooks.
:param bool async_hooks: True for sending of an asynchronous task to
sync hooks.
.. note::
Syncing happens from GitHub's direction only. This means that we
consider the information on GitHub as valid, and we overwrite our
own state based on this information.
| 2.887703 | 2.826034 | 1.021822 |
if not asynchronous:
for repo_id in repos:
try:
with db.session.begin_nested():
self.sync_repo_hook(repo_id)
db.session.commit()
except RepositoryAccessError as e:
current_app.logger.warning(e.message, exc_info=True)
except NoResultFound:
pass # Repository not in DB yet
else:
# FIXME: We have to commit, in order to have all necessary data?
db.session.commit()
sync_hooks.delay(self.user_id, repos)
|
def _sync_hooks(self, repos, asynchronous=True)
|
Check if a hooks sync task needs to be started.
| 4.472182 | 4.224764 | 1.058564 |
# Get the hook that we may have set in the past
gh_repo = self.api.repository_with_id(repo_id)
hooks = (hook.id for hook in gh_repo.hooks()
if hook.config.get('url', '') == self.webhook_url)
hook_id = next(hooks, None)
# If hook on GitHub exists, get or create corresponding db object and
# enable the hook. Otherwise remove the old hook information.
if hook_id:
Repository.enable(user_id=self.user_id,
github_id=gh_repo.id,
name=gh_repo.full_name,
hook=hook_id)
else:
Repository.disable(user_id=self.user_id,
github_id=gh_repo.id,
name=gh_repo.full_name)
|
def sync_repo_hook(self, repo_id)
|
Sync a GitHub repo's hook with the locally stored repo.
| 3.847433 | 3.803982 | 1.011423 |
# If refresh interval is not specified, we should refresh every time.
expiration = utcnow()
refresh_td = current_app.config.get('GITHUB_REFRESH_TIMEDELTA')
if refresh_td:
expiration -= refresh_td
last_sync = parse_timestamp(self.account.extra_data['last_sync'])
return last_sync < expiration
|
def check_sync(self)
|
Check if sync is required based on last sync date.
| 7.868483 | 6.717825 | 1.171284 |
config = dict(
url=self.webhook_url,
content_type='json',
secret=current_app.config['GITHUB_SHARED_SECRET'],
insecure_ssl='1' if current_app.config['GITHUB_INSECURE_SSL']
else '0',
)
ghrepo = self.api.repository_with_id(repo_id)
if ghrepo:
try:
hook = ghrepo.create_hook(
'web', # GitHub identifier for webhook service
config,
events=['release'],
)
except github3.GitHubError as e:
# Check if hook is already installed
hook_errors = (m for m in e.errors
if m['code'] == 'custom' and
m['resource'] == 'Hook')
if next(hook_errors, None):
hooks = (h for h in ghrepo.hooks()
if h.config.get('url', '') == config['url'])
hook = next(hooks, None)
if hook:
hook.edit(config=config, events=['release'])
finally:
if hook:
Repository.enable(user_id=self.user_id,
github_id=repo_id,
name=repo_name,
hook=hook.id)
return True
return False
|
def create_hook(self, repo_id, repo_name)
|
Create repository hook.
| 3.457695 | 3.438473 | 1.00559 |
ghrepo = self.api.repository_with_id(repo_id)
if ghrepo:
hooks = (h for h in ghrepo.hooks()
if h.config.get('url', '') == self.webhook_url)
hook = next(hooks, None)
if not hook or hook.delete():
Repository.disable(user_id=self.user_id,
github_id=repo_id,
name=name)
return True
return False
|
def remove_hook(self, repo_id, name)
|
Remove repository hook.
| 4.164212 | 4.170793 | 0.998422 |
gh = github3.GitHub()
gh.set_client_id(cls.remote.consumer_key, cls.remote.consumer_secret)
return gh
|
def _dev_api(cls)
|
Get a developer instance for GitHub API access.
| 6.669916 | 4.983205 | 1.338479 |
cls = current_app.config['GITHUB_DEPOSIT_CLASS']
if isinstance(cls, string_types):
cls = import_string(cls)
assert isinstance(cls, type)
return cls
|
def deposit_class(self)
|
Return a class implementing `publish` method.
| 3.96872 | 3.660164 | 1.084301 |
return Repository.query.filter_by(
user_id=self.event.user_id,
github_id=self.repository['id'],
).one()
|
def repo_model(self)
|
Return repository model from database.
| 5.607845 | 4.429186 | 1.266112 |
if self.event:
if self.release['name']:
return u'{0}: {1}'.format(
self.repository['full_name'], self.release['name']
)
return u'{0} {1}'.format(self.repo_model.name, self.model.tag)
|
def title(self)
|
Extract title from a release.
| 5.498758 | 4.941775 | 1.112709 |
if self.release.get('body'):
return markdown(self.release['body'])
elif self.repository.get('description'):
return self.repository['description']
return 'No description provided.'
|
def description(self)
|
Extract description from a release.
| 5.013409 | 3.90085 | 1.285209 |
yield dict(
identifier=u'https://github.com/{0}/tree/{1}'.format(
self.repository['full_name'], self.release['tag_name']
),
relation='isSupplementTo',
)
|
def related_identifiers(self)
|
Yield related identifiers.
| 5.161778 | 4.376273 | 1.179492 |
return dict(
access_right='open',
description=self.description,
license='other-open',
publication_date=self.release['published_at'][:10],
related_identifiers=list(self.related_identifiers),
version=self.version,
title=self.title,
upload_type='software',
)
|
def defaults(self)
|
Return default metadata.
| 6.309938 | 5.561476 | 1.13458 |
return get_extra_metadata(
self.gh.api,
self.repository['owner']['login'],
self.repository['name'],
self.release['tag_name'],
)
|
def extra_metadata(self)
|
Get extra metadata for file in repository.
| 5.510043 | 4.726544 | 1.165766 |
tag_name = self.release['tag_name']
repo_name = self.repository['full_name']
zipball_url = self.release['zipball_url']
filename = u'{name}-{tag}.zip'.format(name=repo_name, tag=tag_name)
response = self.gh.api.session.head(zipball_url)
assert response.status_code == 302, \
u'Could not retrieve archive from GitHub: {0}'.format(zipball_url)
yield filename, zipball_url
|
def files(self)
|
Extract files to download from GitHub payload.
| 3.710757 | 3.348324 | 1.108243 |
output = dict(self.defaults)
output.update(self.extra_metadata)
return output
|
def metadata(self)
|
Return extracted metadata.
| 7.150988 | 6.110945 | 1.170194 |
if self.model.status == ReleaseStatus.PUBLISHED and self.record:
fetcher = current_pidstore.fetchers[
current_app.config.get('GITHUB_PID_FETCHER')]
return fetcher(self.record.id, self.record)
|
def pid(self)
|
Get PID object for the Release record.
| 9.665538 | 7.743829 | 1.24816 |
with db.session.begin_nested():
deposit = self.deposit_class.create(self.metadata)
deposit['_deposit']['created_by'] = self.event.user_id
deposit['_deposit']['owners'] = [self.event.user_id]
# Fetch the deposit files
for key, url in self.files:
deposit.files[key] = self.gh.api.session.get(
url, stream=True).raw
deposit.publish()
recid, record = deposit.fetch_published()
self.model.recordmetadata = record.model
|
def publish(self)
|
Publish GitHub release as record.
| 7.714606 | 7.078098 | 1.089926 |
repo_id = event.payload['repository']['id']
# Ping event - update the ping timestamp of the repository
if 'hook_id' in event.payload and 'zen' in event.payload:
repository = Repository.query.filter_by(
github_id=repo_id
).one()
repository.ping = datetime.utcnow()
db.session.commit()
return
# Release event
if 'release' in event.payload and \
event.payload.get('action') == 'published':
try:
release = Release.create(event)
db.session.commit()
# FIXME: If we want to skip the processing, we should do it
# here (eg. We're in the middle of a migration).
# if current_app.config['GITHUB_PROCESS_RELEASES']:
process_release.delay(
release.release_id,
verify_sender=self.verify_sender
)
except (ReleaseAlreadyReceivedError, RepositoryDisabledError) as e:
event.response_code = 409
event.response = dict(message=str(e), status=409)
except RepositoryAccessError as e:
event.response = 403
event.response = dict(message=str(e), status=403)
|
def run(self, event)
|
Process an event.
.. note::
We should only do basic server side operation here, since we send
the rest of the processing to a Celery task which will be mainly
accessing the GitHub API.
| 4.547953 | 4.424041 | 1.028009 |
dt = dateutil.parser.parse(x)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=pytz.utc)
return dt
|
def parse_timestamp(x)
|
Parse ISO8601 formatted timestamp.
| 2.29138 | 2.213208 | 1.035321 |
try:
content = gh.repository(owner, repo_name).file_contents(
path=current_app.config['GITHUB_METADATA_FILE'], ref=ref
)
if not content:
# File does not exists in the given ref
return {}
return json.loads(content.decoded.decode('utf-8'))
except ValueError:
raise CustomGitHubMetadataError(
u'Metadata file "{file}" is not valid JSON.'
.format(file=current_app.config['GITHUB_METADATA_FILE'])
)
|
def get_extra_metadata(gh, owner, repo_name, ref)
|
Get the metadata file.
| 3.561627 | 3.516636 | 1.012794 |
try:
u = gh.user(owner)
name = u.name or u.login
company = u.company or ''
return [dict(name=name, affiliation=company)]
except Exception:
return None
|
def get_owner(gh, owner)
|
Get owner of repository as a creator.
| 4.077673 | 4.189579 | 0.97329 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.