code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if resource.exists(): self.resources[resource] = self.timekeeper.get_indicator(resource) else: self.resources[resource] = None
def add_resource(self, resource)
Add a resource to the list of interesting resources
6.787057
6.580491
1.031391
path = resource.real_path # on dos, mtime does not change for a folder when files are added if os.name != 'posix' and os.path.isdir(path): return (os.path.getmtime(path), len(os.listdir(path)), os.path.getsize(path)) return (os.path.getmtime(path), os.path.getsize(path))
def get_indicator(self, resource)
Return the modification time and size of a `Resource`.
4.261868
3.782553
1.126717
project_path = path_relative_to_project_root(project, path) if project_path is None: project_path = rope.base.project._realpath(path) project = rope.base.project.get_no_project() if type is None: return project.get_resource(project_path) if type == 'file': return project.get_file(project_path) if type == 'folder': return project.get_folder(project_path) return None
def path_to_resource(project, path, type=None)
Get the resource at path You only need to specify `type` if `path` does not exist. It can be either 'file' or 'folder'. If the type is `None` it is assumed that the resource already exists. Note that this function uses `Project.get_resource()`, `Project.get_file()`, and `Project.get_folder()` methods.
2.865122
2.864053
1.000373
resource = path_to_resource(project, path) if resource is None: return for observer in list(project.observers): observer.resource_changed(resource) if project.pycore.automatic_soa: rope.base.pycore.perform_soa_on_changed_scopes(project, resource, old_content)
def report_change(project, path, old_content)
Report that the contents of file at `path` was changed The new contents of file is retrieved by reading the file.
6.371296
7.09141
0.898453
resources = project.get_python_files() job_set = task_handle.create_jobset('Analyzing Modules', len(resources)) for resource in resources: job_set.started_job(resource.path) analyze_module(project, resource) job_set.finished_job()
def analyze_modules(project, task_handle=taskhandle.NullTaskHandle())
Perform static object analysis on all python files in the project Note that this might be really time consuming.
4.067086
3.850672
1.056202
return pyobjectsdef.PyModule(project.pycore, code, resource, force_errors=force_errors)
def get_string_module(project, code, resource=None, force_errors=False)
Returns a `PyObject` object for the given code If `force_errors` is `True`, `exceptions.ModuleSyntaxError` is raised if module has syntax errors. This overrides ``ignore_syntax_errors`` project config.
11.765722
12.560221
0.936745
if resources is None: resources = self.project.get_python_files() changes = ChangeSet('Introduce factory method <%s>' % factory_name) job_set = task_handle.create_jobset('Collecting Changes', len(resources)) self._change_module(resources, changes, factory_name, global_factory, job_set) return changes
def get_changes(self, factory_name, global_factory=False, resources=None, task_handle=taskhandle.NullTaskHandle())
Get the changes this refactoring makes `factory_name` indicates the name of the factory function to be added. If `global_factory` is `True` the factory will be global otherwise a static method is added to the class. `resources` can be a list of `rope.base.resource.File`\s that this refactoring should be applied on; if `None` all python files in the project are searched.
6.782833
6.302292
1.076249
for p in self.PEP0484_TYPE_COMMENT_PATTERNS: match = p.search(code) if match: return [match.group(1)]
def _search_type_in_type_comment(self, code)
For more info see: https://www.python.org/dev/peps/pep-0484/#type-comments >>> AssignmentProvider()._search_type_in_type_comment('type: int') ['int']
4.278099
4.239247
1.009165
for import_stmt in self.imports[:]: import_info = import_stmt.import_info if import_info.is_empty() or import_stmt.readonly: continue if len(import_info.names_and_aliases) > 1: for name_and_alias in import_info.names_and_aliases: if hasattr(import_info, "module_name"): new_import = importinfo.FromImport( import_info.module_name, import_info.level, [name_and_alias]) else: new_import = importinfo.NormalImport([name_and_alias]) self.add_import(new_import) import_stmt.empty_import()
def force_single_imports(self)
force a single import per statement
3.049039
2.922744
1.043211
visitor = actions.RemovePyNameVisitor(self.project, self.pymodule, pyname, self._current_folder()) for import_stmt in self.imports: import_stmt.accept(visitor)
def remove_pyname(self, pyname)
Removes pyname when imported in ``from mod import x``
8.528584
7.497482
1.137526
args = list(call_node.args) args.extend(call_node.keywords) called = call_node.func # XXX: Handle constructors if _is_method_call(primary, pyfunction) and \ isinstance(called, ast.Attribute): args.insert(0, called.value) return Arguments(args, scope)
def create_arguments(primary, pyfunction, call_node, scope)
A factory for creating `Arguments`
4.352844
4.418801
0.985073
if key in self.callbacks: self.callbacks[key](value) else: self.prefs[key] = value
def set(self, key, value)
Set the value of `key` preference to `value`.
3.927809
3.288719
1.194328
if not key in self.prefs: self.prefs[key] = [] self.prefs[key].append(value)
def add(self, key, value)
Add an entry to a list preference Add `value` to the list of entries for the `key` preference.
3.268875
2.437669
1.340984
min_indents = find_minimum_indents(code) return indent_lines(code, new_indents - min_indents)
def fix_indentation(code, new_indents)
Change the indentation of `code` to `new_indents`
4.132587
3.837839
1.0768
# FIXME scope = pyfunction.get_scope() pymodule = pyfunction.get_module() start, end = get_body_region(pyfunction) return fix_indentation(pymodule.source_code[start:end], 0)
def get_body(pyfunction)
Return unindented function body
5.923436
5.522739
1.072554
scope = defined.get_scope() pymodule = defined.get_module() lines = pymodule.lines node = defined.get_ast() start_line = node.lineno if defined.get_doc() is None: start_line = node.body[0].lineno elif len(node.body) > 1: start_line = node.body[1].lineno start = lines.get_line_start(start_line) scope_start = pymodule.logical_lines.logical_line_in(scope.start) if scope_start[1] >= start_line: # a one-liner! # XXX: what if colon appears in a string start = pymodule.source_code.index(':', start) + 1 while pymodule.source_code[start].isspace(): start += 1 end = min(lines.get_line_end(scope.end) + 1, len(pymodule.source_code)) return start, end
def get_body_region(defined)
Return the start and end offsets of function body
3.999575
3.896141
1.026548
try: self.current_change = changes changes.do(change.create_job_set(task_handle, changes)) finally: self.current_change = None if self._is_change_interesting(changes): self.undo_list.append(changes) self._remove_extra_items() del self.redo_list[:]
def do(self, changes, task_handle=taskhandle.NullTaskHandle())
Perform the change and add it to the `self.undo_list` Note that uninteresting changes (changes to ignored files) will not be appended to `self.undo_list`.
6.632203
5.758754
1.151673
if not self._undo_list: raise exceptions.HistoryError('Undo list is empty') if change is None: change = self.undo_list[-1] dependencies = self._find_dependencies(self.undo_list, change) self._move_front(self.undo_list, dependencies) self._perform_undos(len(dependencies), task_handle) result = self.redo_list[-len(dependencies):] if drop: del self.redo_list[-len(dependencies):] return result
def undo(self, change=None, drop=False, task_handle=taskhandle.NullTaskHandle())
Redo done changes from the history When `change` is `None`, the last done change will be undone. If change is not `None` it should be an item from `self.undo_list`; this change and all changes that depend on it will be undone. In both cases the list of undone changes will be returned. If `drop` is `True`, the undone change will not be appended to the redo list.
3.490494
3.263222
1.069646
if not self.redo_list: raise exceptions.HistoryError('Redo list is empty') if change is None: change = self.redo_list[-1] dependencies = self._find_dependencies(self.redo_list, change) self._move_front(self.redo_list, dependencies) self._perform_redos(len(dependencies), task_handle) return self.undo_list[-len(dependencies):]
def redo(self, change=None, task_handle=taskhandle.NullTaskHandle())
Redo undone changes from the history When `change` is `None`, the last undone change will be redone. If change is not `None` it should be an item from `self.redo_list`; this change and all changes that depend on it will be redone. In both cases the list of redone changes will be returned.
3.941018
3.49465
1.127729
return PyModule(self, code, resource, force_errors=force_errors)
def get_string_module(self, code, resource=None, force_errors=False)
Returns a `PyObject` object for the given code If `force_errors` is `True`, `exceptions.ModuleSyntaxError` is raised if module has syntax errors. This overrides ``ignore_syntax_errors`` project config.
5.604776
6.186847
0.905918
return rope.base.libutils.get_string_scope(code, resource)
def get_string_scope(self, code, resource=None)
Returns a `Scope` object for the given code
6.316445
7.023154
0.899374
perform_doa = self.project.prefs.get('perform_doi', True) perform_doa = self.project.prefs.get('perform_doa', perform_doa) receiver = self.object_info.doa_data_received if not perform_doa: receiver = None runner = rope.base.oi.doa.PythonFileRunner( self, resource, args, stdin, stdout, receiver) runner.add_finishing_observer(self.module_cache.forget_all_data) runner.run() return runner
def run_module(self, resource, args=None, stdin=None, stdout=None)
Run `resource` module Returns a `rope.base.oi.doa.PythonFileRunner` object for controlling the process.
7.33523
4.790462
1.531215
if followed_calls is None: followed_calls = self.project.prefs.get('soa_followed_calls', 0) pymodule = self.resource_to_pyobject(resource) self.module_cache.forget_all_data() rope.base.oi.soa.analyze_module( self, pymodule, should_analyze, search_subscopes, followed_calls)
def analyze_module(self, resource, should_analyze=lambda py: True, search_subscopes=lambda py: True, followed_calls=None)
Analyze `resource` module for static object inference This function forces rope to analyze this module to collect information about function calls. `should_analyze` is a function that is called with a `PyDefinedObject` argument. If it returns `True` the element is analyzed. If it is `None` or returns `False` the element is not analyzed. `search_subscopes` is like `should_analyze`; The difference is that if it returns `False` the sub-scopes are all ignored. That is it is assumed that `should_analyze` returns `False` for all of its subscopes. `followed_calls` override the value of ``soa_followed_calls`` project config.
5.746325
4.455275
1.28978
left, right = self._get_changed(start, end) if left < right: return True return False
def is_changed(self, start, end)
Tell whether any of start till end lines have changed The end points are inclusive and indices start from 1.
4.946855
5.088139
0.972233
left, right = self._get_changed(start, end) if left < right: del self.lines[left:right] return left < right
def consume_changes(self, start, end)
Clear the changed status of lines from start till end
5.814809
4.988348
1.165678
if name not in self.get_names(): raise exceptions.NameNotFoundError('name %s not found' % name) return self.get_names()[name]
def get_name(self, name)
Return name `PyName` defined in this scope
3.843183
3.575405
1.074894
# XXX: breaking if gave up! use generators result = [] for module in self.names: for global_name in self.names[module]: if global_name.startswith(starting): result.append((global_name, module)) return result
def import_assist(self, starting)
Return a list of ``(name, module)`` tuples This function tries to find modules that have a global name that starts with `starting`.
7.304668
6.078548
1.201713
result = [] for module in self.names: if name in self.names[module]: result.append(module) return result
def get_modules(self, name)
Return the list of modules that have global `name`
3.220141
2.657063
1.211917
result = set() for module in self.names: result.update(set(self.names[module])) return result
def get_all_names(self)
Return the list of all cached global names
4.797442
4.269058
1.123771
result = [] for module in self.names: if name in self.names[module]: try: pymodule = self.project.get_module(module) if name in pymodule: pyname = pymodule[name] module, lineno = pyname.get_definition_location() if module is not None: resource = module.get_module().get_resource() if resource is not None and lineno is not None: result.append((resource, lineno)) except exceptions.ModuleNotFoundError: pass return result
def get_name_locations(self, name)
Return a list of ``(resource, lineno)`` tuples
3.519802
3.128608
1.125038
if resources is None: resources = self.project.get_python_files() job_set = task_handle.create_jobset( 'Generatig autoimport cache', len(resources)) for file in resources: job_set.started_job('Working on <%s>' % file.path) self.update_resource(file, underlined) job_set.finished_job()
def generate_cache(self, resources=None, underlined=None, task_handle=taskhandle.NullTaskHandle())
Generate global name cache for project files If `resources` is a list of `rope.base.resource.File`\s, only those files are searched; otherwise all python modules in the project are cached.
5.243079
4.594368
1.141197
job_set = task_handle.create_jobset( 'Generatig autoimport cache for modules', len(modules)) for modname in modules: job_set.started_job('Working on <%s>' % modname) if modname.endswith('.*'): mod = self.project.find_module(modname[:-2]) if mod: for sub in submodules(mod): self.update_resource(sub, underlined) else: self.update_module(modname, underlined) job_set.finished_job()
def generate_modules_cache(self, modules, underlined=None, task_handle=taskhandle.NullTaskHandle())
Generate global name cache for modules listed in `modules`
4.448325
4.446663
1.000374
match = re.search(r'^(def|class)\s+', code) if match is not None: code = code[:match.start()] try: pymodule = libutils.get_string_module(self.project, code) except exceptions.ModuleSyntaxError: return 1 testmodname = '__rope_testmodule_rope' importinfo = importutils.NormalImport(((testmodname, None),)) module_imports = importutils.get_module_imports(self.project, pymodule) module_imports.add_import(importinfo) code = module_imports.get_changed_source() offset = code.index(testmodname) lineno = code.count('\n', 0, offset) + 1 return lineno
def find_insertion_line(self, code)
Guess at what line the new import should be inserted
4.966973
4.854114
1.02325
try: pymodule = self.project.get_pymodule(resource) modname = self._module_name(resource) self._add_names(pymodule, modname, underlined) except exceptions.ModuleSyntaxError: pass
def update_resource(self, resource, underlined=None)
Update the cache for global names in `resource`
6.422346
5.680969
1.130502
try: pymodule = self.project.get_module(modname) self._add_names(pymodule, modname, underlined) except exceptions.ModuleNotFoundError: pass
def update_module(self, modname, underlined=None)
Update the cache for global names in `modname` module `modname` is the name of a module.
5.027065
5.086607
0.988294
finder = similarfinder.RawSimilarFinder(code) matches = list(finder.get_matches(pattern)) ast = patchedast.get_patched_ast(code) lines = codeanalyze.SourceLinesAdapter(code) template = similarfinder.CodeTemplate(goal) computer = _ChangeComputer(code, ast, lines, template, matches) result = computer.get_changed() if result is None: return code return result
def replace(code, pattern, goal)
used by other refactorings
8.854532
8.447388
1.048198
if checks is not None: warnings.warn( 'The use of checks parameter is deprecated; ' 'use the args parameter of the constructor instead.', DeprecationWarning, stacklevel=2) for name, value in checks.items(): self.args[name] = similarfinder._pydefined_to_str(value) if imports is not None: warnings.warn( 'The use of imports parameter is deprecated; ' 'use imports parameter of the constructor, instead.', DeprecationWarning, stacklevel=2) self.imports = imports changes = change.ChangeSet('Restructuring <%s> to <%s>' % (self.pattern, self.goal)) if resources is not None: files = [resource for resource in resources if libutils.is_python_file(self.project, resource)] else: files = self.project.get_python_files() job_set = task_handle.create_jobset('Collecting Changes', len(files)) for resource in files: job_set.started_job(resource.path) pymodule = self.project.get_pymodule(resource) finder = similarfinder.SimilarFinder(pymodule, wildcards=self.wildcards) matches = list(finder.get_matches(self.pattern, self.args)) computer = self._compute_changes(matches, pymodule) result = computer.get_changed() if result is not None: imported_source = self._add_imports(resource, result, self.imports) changes.add_change(change.ChangeContents(resource, imported_source)) job_set.finished_job() return changes
def get_changes(self, checks=None, imports=None, resources=None, task_handle=taskhandle.NullTaskHandle())
Get the changes needed by this restructuring `resources` can be a list of `rope.base.resources.File`\s to apply the restructuring on. If `None`, the restructuring will be applied to all python files. `checks` argument has been deprecated. Use the `args` argument of the constructor. The usage of:: strchecks = {'obj1.type': 'mod.A', 'obj2': 'mod.B', 'obj3.object': 'mod.C'} checks = restructuring.make_checks(strchecks) can be replaced with:: args = {'obj1': 'type=mod.A', 'obj2': 'name=mod.B', 'obj3': 'object=mod.C'} where obj1, obj2 and obj3 are wildcard names that appear in restructuring pattern.
3.947387
3.616872
1.091381
checks = {} for key, value in string_checks.items(): is_pyname = not key.endswith('.object') and \ not key.endswith('.type') evaluated = self._evaluate(value, is_pyname=is_pyname) if evaluated is not None: checks[key] = evaluated return checks
def make_checks(self, string_checks)
Convert str to str dicts to str to PyObject dicts This function is here to ease writing a UI.
4.215074
4.289494
0.982651
info = _ExtractInfo( self.project, self.resource, self.start_offset, self.end_offset, extracted_name, variable=self.kind == 'variable', similar=similar, make_global=global_) new_contents = _ExtractPerformer(info).extract() changes = ChangeSet('Extract %s <%s>' % (self.kind, extracted_name)) changes.add_change(ChangeContents(self.resource, new_contents)) return changes
def get_changes(self, extracted_name, similar=False, global_=False)
Get the changes this refactoring makes :parameters: - `similar`: if `True`, similar expressions/statements are also replaced. - `global_`: if `True`, the extracted method/variable will be global.
6.262946
6.248012
1.00239
if self._returned is None: node = _parse_text(self.extracted) self._returned = usefunction._returns_last(node) return self._returned
def returned(self)
Does the extracted piece contain return statement
14.378291
11.564509
1.243312
visitor = _NodeNameCollector() ast.walk(node, visitor) return visitor.names
def get_name_levels(node)
Return a list of ``(name, level)`` tuples for assigned names The `level` is `None` for simple assignments and is a list of numbers for tuple assignments for example in:: a, (b, c) = x The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for `c` is ``[1, 1]``.
8.778299
12.297425
0.713832
msg = None code = self.code tries = 0 while True: try: if tries == 0 and self.resource is not None and \ self.resource.read() == code: return self.project.get_pymodule(self.resource, force_errors=True) return libutils.get_string_module( self.project, code, resource=self.resource, force_errors=True) except exceptions.ModuleSyntaxError as e: if msg is None: msg = '%s:%s %s' % (e.filename, e.lineno, e.message_) if tries < self.maxfixes: tries += 1 self.commenter.comment(e.lineno) code = '\n'.join(self.commenter.lines) else: raise exceptions.ModuleSyntaxError( e.filename, e.lineno, 'Failed to fix error: {0}'.format(msg))
def get_pymodule(self)
Get a `PyModule`
4.271845
4.256234
1.003668
def call(self, job_set=taskhandle.NullJobSet()): job_set.started_job(str(self)) function(self) job_set.finished_job() return call
def _handle_job_set(function)
A decorator for handling `taskhandle.JobSet`\s A decorator for handling `taskhandle.JobSet`\s for `do` and `undo` methods of `Change`\s.
7.34405
7.850827
0.935449
if isinstance(change, ChangeSet): result = 0 for child in change.changes: result += count_changes(child) return result return 1
def count_changes(change)
Counts the number of basic changes a `Change` will make
3.093306
3.113383
0.993551
try: pyclass, attr_name = get_class_with_attr_name(pyname) except TypeError: return else: for super_pyclass in get_mro(pyclass)[1:]: if attr_name in super_pyclass: return super_pyclass[attr_name]
def get_super_assignment(pyname)
:type pyname: rope.base.pynamesdef.AssignedName :type: rope.base.pynamesdef.AssignedName
3.782596
3.760025
1.006003
lineno = get_lineno_for_node(pyname.assignments[0].ast_node) holding_scope = pyname.module.get_scope().get_inner_scope_for_line(lineno) pyobject = holding_scope.pyobject if isinstance(pyobject, PyClass): pyclass = pyobject elif (isinstance(pyobject, PyFunction) and isinstance(pyobject.parent, PyClass)): pyclass = pyobject.parent else: return for name, attr in pyclass.get_attributes().items(): if attr is pyname: return (pyclass, name)
def get_class_with_attr_name(pyname)
:type pyname: rope.base.pynamesdef.AssignedName :return: rope.base.pyobjectsdef.PyClass, str :rtype: tuple
3.427252
3.291016
1.041396
if '.' not in type_name: try: return pyobject.get_module().get_scope().get_name(type_name).get_object() except Exception: pass else: mod_name, attr_name = type_name.rsplit('.', 1) try: mod_finder = ScopeNameFinder(pyobject.get_module()) mod = mod_finder._find_module(mod_name).get_object() return mod.get_attribute(attr_name).get_object() except Exception: pass
def resolve_type(type_name, pyobject)
:type type_name: str :type pyobject: rope.base.pyobjects.PyDefinedObject | rope.base.pyobjects.PyObject :rtype: rope.base.pyobjects.PyDefinedObject | rope.base.pyobjects.PyObject or None
2.684484
2.510695
1.06922
_analyze_node(pycore, pymodule, should_analyze, search_subscopes, followed_calls)
def analyze_module(pycore, pymodule, should_analyze, search_subscopes, followed_calls)
Analyze `pymodule` for static object inference Analyzes scopes for collecting object information. The analysis starts from inner scopes.
2.926702
4.186815
0.699028
function_changer = _FunctionChangers(self.pyname.get_object(), self._definfo(), changers) return self._change_calls(function_changer, in_hierarchy, resources, task_handle)
def get_changes(self, changers, in_hierarchy=False, resources=None, task_handle=taskhandle.NullTaskHandle())
Get changes caused by this refactoring `changers` is a list of `_ArgumentChanger`\s. If `in_hierarchy` is `True` the changers are applyed to all matching methods in the class hierarchy. `resources` can be a list of `rope.base.resource.File`\s that should be searched for occurrences; if `None` all python files in the project are searched.
9.082399
8.771086
1.035493
stack = changestack.ChangeStack(self.project, 'Fixing module names') jobset = task_handle.create_jobset('Fixing module names', self._count_fixes(fixer) + 1) try: while True: for resource in self._tobe_fixed(fixer): jobset.started_job(resource.path) renamer = rename.Rename(self.project, resource) changes = renamer.get_changes(fixer(self._name(resource))) stack.push(changes) jobset.finished_job() break else: break finally: jobset.started_job('Reverting to original state') stack.pop_all() jobset.finished_job() return stack.merged()
def get_changes(self, fixer=str.lower, task_handle=taskhandle.NullTaskHandle())
Fix module names `fixer` is a function that takes and returns a `str`. Given the name of a module, it should return the fixed name.
4.388052
4.725575
0.928575
object_info = pyfunction.pycore.object_info result = object_info.get_exact_returned(pyfunction, args) if result is not None: return result result = _infer_returned(pyfunction, args) if result is not None: if args and pyfunction.get_module().get_resource() is not None: params = args.get_arguments( pyfunction.get_param_names(special_args=False)) object_info.function_called(pyfunction, params, result) return result result = object_info.get_returned(pyfunction, args) if result is not None: return result hint_return = get_type_hinting_factory(pyfunction.pycore.project).make_return_provider() type_ = hint_return(pyfunction) if type_ is not None: return rope.base.pyobjects.PyObject(type_)
def infer_returned_object(pyfunction, args)
Infer the `PyObject` this `PyFunction` returns after calling
4.399436
4.520641
0.973188
object_info = pyfunction.pycore.object_info result = object_info.get_parameter_objects(pyfunction) if result is None: result = _parameter_objects(pyfunction) _handle_first_parameter(pyfunction, result) return result
def infer_parameter_objects(pyfunction)
Infer the `PyObject`\s of parameters of this `PyFunction`
4.58126
5.21726
0.878097
if "cpython" in name: return os.path.splitext(os.path.splitext(name)[0])[0] # XXX: Special handling for Fedora python2 distribution # See: https://github.com/python-rope/rope/issues/211 if name == "timemodule.so": return "time" return os.path.splitext(name)[0]
def normalize_so_name(name)
Handle different types of python installations
5.055377
4.639252
1.089697
if len(a) != len(b): return False # Computes the bitwise difference of all characters in the two strings # before returning whether or not they are equal. difference = 0 for (a_char, b_char) in zip(a, b): difference |= ord(a_char) ^ ord(b_char) return difference == 0
def _compat_compare_digest(a, b)
Implementation of hmac.compare_digest for python < 2.7.7. This function uses an approach designed to prevent timing analysis by avoiding content-based short circuiting behaviour, making it appropriate for cryptography.
3.031298
3.161319
0.958871
env = dict(os.environ) file_path = self.file.real_path path_folders = self.pycore.project.get_source_folders() + \ self.pycore.project.get_python_path_folders() env['PYTHONPATH'] = os.pathsep.join(folder.real_path for folder in path_folders) runmod_path = self.pycore.project.find_module('rope.base.oi.runmod').real_path self.receiver = None self._init_data_receiving() send_info = '-' if self.receiver: send_info = self.receiver.get_send_info() args = [sys.executable, runmod_path, send_info, self.pycore.project.address, self.file.real_path] if self.analyze_data is None: del args[1:4] if self.args is not None: args.extend(self.args) self.process = subprocess.Popen( executable=sys.executable, args=args, env=env, cwd=os.path.split(file_path)[0], stdin=self.stdin, stdout=self.stdout, stderr=self.stdout, close_fds=os.name != 'nt')
def run(self)
Execute the process
3.879384
3.780312
1.026207
self.process.wait() if self.analyze_data: self.receiving_thread.join()
def wait_process(self)
Wait for the process to finish
10.027712
8.396036
1.194339
if self.process.poll() is not None: return try: if hasattr(self.process, 'terminate'): self.process.terminate() elif os.name != 'nt': os.kill(self.process.pid, 9) else: import ctypes handle = int(self.process._handle) ctypes.windll.kernel32.TerminateProcess(handle, -1) except OSError: pass
def kill_process(self)
Stop the process
2.391668
2.364515
1.011484
if end is None: end = len(self.source) for match in self._get_matched_asts(code): match_start, match_end = match.get_region() if start <= match_start and match_end <= end: if skip is not None and (skip[0] < match_end and skip[1] > match_start): continue yield match
def get_matches(self, code, start=0, end=None, skip=None)
Search for `code` in source and return a list of `Match`\es `code` can contain wildcards. ``${name}`` matches normal names and ``${?name} can match any expression. You can use `Match.get_ast()` for getting the node that has matched a given pattern.
3.030524
3.140315
0.965038
children = ast.get_children(node) return [child for child in children if not isinstance(child, ast.expr_context)]
def _get_children(self, node)
Return not `ast.expr_context` children of `node`
4.694181
2.808916
1.671172
pymodule = pydefined.get_module() module = module_imports.ModuleImports(project, pymodule) if pymodule == pydefined: return [stmt.import_info for stmt in module.imports] return module.get_used_imports(pydefined)
def get_imports(project, pydefined)
A shortcut for getting the `ImportInfo`\s used in a scope
5.954219
5.315474
1.120167
module_name = libutils.modname(resource) names = [] if isinstance(name, list): names = [(imported, None) for imported in name] else: names = [(name, None), ] return FromImport(module_name, 0, tuple(names))
def get_from_import(self, resource, name)
The from import statement for `name` in `resource`
6.565216
5.535651
1.185988
pynames_ = set([pyname]) filters = [] if only_calls: filters.append(CallsFilter()) if not imports: filters.append(NoImportsFilter()) if not keywords: filters.append(NoKeywordsFilter()) if isinstance(instance, pynames.ParameterName): for pyobject in instance.get_objects(): try: pynames_.add(pyobject[name]) except exceptions.AttributeNotFoundError: pass for pyname in pynames_: filters.append(PyNameFilter(pyname)) if in_hierarchy: filters.append(InHierarchyFilter(pyname)) if unsure: filters.append(UnsureFilter(unsure)) return Finder(project, name, filters=filters, docs=docs)
def create_finder(project, name, pyname, only_calls=False, imports=True, unsure=None, docs=False, instance=None, in_hierarchy=False, keywords=True)
A factory for `Finder` Based on the arguments it creates a list of filters. `instance` argument is needed only when you want implicit interfaces to be considered.
2.878651
2.949743
0.975899
if expected is None or pyname is None: return False if expected == pyname: return True if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) \ and type(pyname) not in \ (pynames.ImportedModule, pynames.ImportedName): return False return expected.get_definition_location() == \ pyname.get_definition_location() and \ expected.get_object() == pyname.get_object()
def same_pyname(expected, pyname)
Check whether `expected` and `pyname` are the same
2.792813
2.756153
1.013301
if pyname is None: return True if unbound and not isinstance(pyname, pynames.UnboundName): return False if pyname.get_object() == pyobjects.get_unknown(): return True
def unsure_pyname(pyname, unbound=True)
Return `True` if we don't know what this name references
4.61104
4.208746
1.095585
tools = _OccurrenceToolsCreator(self.project, resource=resource, pymodule=pymodule, docs=self.docs) for offset in self._textual_finder.find_offsets(tools.source_code): occurrence = Occurrence(tools, offset) for filter in self.filters: result = filter(occurrence) if result is None: continue if result: yield occurrence break
def find_occurrences(self, resource=None, pymodule=None)
Generate `Occurrence` instances
6.179321
5.867699
1.053108
generate = eval('Generate' + kind.title()) return generate(project, resource, offset)
def create_generate(kind, project, resource, offset)
A factory for creating `Generate` objects `kind` can be 'variable', 'function', 'class', 'module' or 'package'.
5.528105
7.768207
0.711632
if sourcefolder is None: sourcefolder = project.root packages = name.split('.') parent = sourcefolder for package in packages[:-1]: parent = parent.get_child(package) return parent.create_file(packages[-1] + '.py')
def create_module(project, name, sourcefolder=None)
Creates a module and returns a `rope.base.resources.File`
3.297382
2.866417
1.15035
if sourcefolder is None: sourcefolder = project.root packages = name.split('.') parent = sourcefolder for package in packages[:-1]: parent = parent.get_child(package) made_packages = parent.create_folder(packages[-1]) made_packages.create_file('__init__.py') return made_packages
def create_package(project, name, sourcefolder=None)
Creates a package and returns a `rope.base.resources.Folder`
3.32173
2.944386
1.128157
if resource is not None: source_code = resource.read() else: source_code = pymodule.source_code change_collector = codeanalyze.ChangeCollector(source_code) for occurrence in occurrences_finder.find_occurrences(resource, pymodule): if replace_primary and occurrence.is_a_fixed_primary(): continue if replace_primary: start, end = occurrence.get_primary_range() else: start, end = occurrence.get_word_range() if (not reads and not occurrence.is_written()) or \ (not writes and occurrence.is_written()): continue if region is None or region[0] <= start < region[1]: change_collector.add_change(start, end, new_name) return change_collector.get_changed()
def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None, replace_primary=False, region=None, reads=True, writes=True)
Returns the changed source or `None` if there is no changes
2.701856
2.585357
1.045061
if unsure in (True, False): warnings.warn( 'unsure parameter should be a function that returns ' 'True or False', DeprecationWarning, stacklevel=2) def unsure_func(value=unsure): return value unsure = unsure_func if in_file is not None: warnings.warn( '`in_file` argument has been deprecated; use `resources` ' 'instead. ', DeprecationWarning, stacklevel=2) if in_file: resources = [self.resource] if _is_local(self.old_pyname): resources = [self.resource] if resources is None: resources = self.project.get_python_files() changes = ChangeSet('Renaming <%s> to <%s>' % (self.old_name, new_name)) finder = occurrences.create_finder( self.project, self.old_name, self.old_pyname, unsure=unsure, docs=docs, instance=self.old_instance, in_hierarchy=in_hierarchy and self.is_method()) job_set = task_handle.create_jobset('Collecting Changes', len(resources)) for file_ in resources: job_set.started_job(file_.path) new_content = rename_in_module(finder, new_name, resource=file_) if new_content is not None: changes.add_change(ChangeContents(file_, new_content)) job_set.finished_job() if self._is_renaming_a_module(): resource = self.old_pyname.get_object().get_resource() if self._is_allowed_to_move(resources, resource): self._rename_module(resource, new_name, changes) return changes
def get_changes(self, new_name, in_file=None, in_hierarchy=False, unsure=None, docs=False, resources=None, task_handle=taskhandle.NullTaskHandle())
Get the changes needed for this refactoring Parameters: - `in_hierarchy`: when renaming a method this keyword forces to rename all matching methods in the hierarchy - `docs`: when `True` rename refactoring will rename occurrences in comments and strings where the name is visible. Setting it will make renames faster, too. - `unsure`: decides what to do about unsure occurrences. If `None`, they are ignored. Otherwise `unsure` is called with an instance of `occurrence.Occurrence` as parameter. If it returns `True`, the occurrence is considered to be a match. - `resources` can be a list of `rope.base.resources.File`\s to apply this refactoring on. If `None`, the restructuring will be applied to all python files. - `in_file`: this argument has been deprecated; use `resources` instead.
3.833748
3.541389
1.082555
collector = codeanalyze.ChangeCollector(source) for start, end in ignored_regions(source): if source[start] == '#': replacement = ' ' * (end - start) else: replacement = '"%s"' % (' ' * (end - start - 2)) collector.add_change(start, end, replacement) source = collector.get_changed() or source collector = codeanalyze.ChangeCollector(source) parens = 0 for match in _parens.finditer(source): i = match.start() c = match.group() if c in '({[': parens += 1 if c in ')}]': parens -= 1 if c == '\n' and parens > 0: collector.add_change(i, i + 1, ' ') source = collector.get_changed() or source return source.replace('\\\n', ' ').replace('\t', ' ').replace(';', '\n')
def real_code(source)
Simplify `source` for analysis It replaces: * comments with spaces * strs with a new str filled with spaces * implicit and explicit continuations with spaces * tabs and semicolons with spaces The resulting code is a lot easier to analyze if we are interested only in offsets.
3.184174
3.126747
1.018366
return [(match.start(), match.end()) for match in _str.finditer(source)]
def ignored_regions(source)
Return ignored regions like strings and comments in `source`
7.281733
5.717357
1.273619
self._perform_change(change.MoveResource(self, new_location), 'Moving <%s> to <%s>' % (self.path, new_location))
def move(self, new_location)
Move resource to `new_location`
7.810082
5.670108
1.377413
try: children = os.listdir(self.real_path) except OSError: return [] result = [] for name in children: try: child = self.get_child(name) except exceptions.ResourceNotFoundError: continue if not self.project.is_ignored(child): result.append(self.get_child(name)) return result
def get_children(self)
Return the children of this folder
3.136865
3.043945
1.030526
# WARNING: The collection will be locked during the index # creation. If the collection has a large number of # documents in it, the operation can take a long time. # TODO: The creation of indexes can be moved to a Django # management command or equivalent. There is also an option to # run the indexing on the background, without locking. self.collection.ensure_index([('time', pymongo.DESCENDING)]) self.collection.ensure_index('name')
def _create_indexes(self)
Ensures the proper fields are indexed
7.56594
7.055594
1.072332
try: self.collection.insert(event, manipulate=False) except (PyMongoError, BSONError): # The event will be lost in case of a connection error or any error # that occurs when trying to insert the event into Mongo. # pymongo will re-connect/re-authenticate automatically # during the next event. msg = 'Error inserting to MongoDB event tracker backend' log.exception(msg)
def send(self, event)
Insert the event in to the Mongo collection
8.777101
8.212517
1.068747
config = getattr(settings, DJANGO_BACKEND_SETTING_NAME, {}) backends = self.instantiate_objects(config) return backends
def create_backends_from_settings(self)
Expects the Django setting "EVENT_TRACKING_BACKENDS" to be defined and point to a dictionary of backend engine configurations. Example:: EVENT_TRACKING_BACKENDS = { 'default': { 'ENGINE': 'some.arbitrary.Backend', 'OPTIONS': { 'endpoint': 'http://something/event' } }, 'another_engine': { 'ENGINE': 'some.arbitrary.OtherBackend', 'OPTIONS': { 'user': 'foo' } }, }
9.063165
13.017545
0.696227
result = node if isinstance(node, dict): if 'ENGINE' in node: result = self.instantiate_from_dict(node) else: result = {} for key, value in six.iteritems(node): result[key] = self.instantiate_objects(value) elif isinstance(node, list): result = [] for child in node: result.append(self.instantiate_objects(child)) return result
def instantiate_objects(self, node)
Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value pairs found in the sibling "OPTIONS" dictionary as keyword arguments. For example:: tree = { 'a': { 'b': { 'first_obj': { 'ENGINE': 'mypackage.mymodule.Clazz', 'OPTIONS': { 'size': 10, 'foo': 'bar' } } }, 'c': [ { 'ENGINE': 'mypackage.mymodule.Clazz2', 'OPTIONS': { 'more_objects': { 'd': {'ENGINE': 'mypackage.foo.Bar'} } } } ] } } root = self.instantiate_objects(tree) That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to constructors were saved as attributes of the same name): assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'> assert root['a']['b']['first_obj'].size == 10 assert root['a']['b']['first_obj'].foo == 'bar' assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'> assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>
2.344957
2.226634
1.05314
name = values['ENGINE'] options = values.get('OPTIONS', {}) # Parse the name parts = name.split('.') module_name = '.'.join(parts[:-1]) class_name = parts[-1] # Get the class try: module = import_module(module_name) cls = getattr(module, class_name) except (ValueError, AttributeError, TypeError, ImportError): raise ValueError('Cannot find class %s' % name) options = self.instantiate_objects(options) return cls(**options)
def instantiate_from_dict(self, values)
Constructs an object given a dictionary containing an "ENGINE" key which contains the full module path to the class, and an "OPTIONS" key which contains a dictionary that will be passed in to the constructor as keyword args.
3.022021
2.618738
1.153999
config = getattr(settings, DJANGO_PROCESSOR_SETTING_NAME, []) processors = self.instantiate_objects(config) return processors
def create_processors_from_settings(self)
Expects the Django setting "EVENT_TRACKING_PROCESSORS" to be defined and point to a list of backend engine configurations. Example:: EVENT_TRACKING_PROCESSORS = [ { 'ENGINE': 'some.arbitrary.Processor' }, { 'ENGINE': 'some.arbitrary.OtherProcessor', 'OPTIONS': { 'user': 'foo' } }, ]
9.604125
12.589981
0.762839
if not hasattr(backend, 'send') or not callable(backend.send): raise ValueError('Backend %s does not have a callable "send" method.' % backend.__class__.__name__) else: self.backends[name] = backend
def register_backend(self, name, backend)
Register a new backend that will be called for each processed event. Note that backends are called in the order that they are registered.
3.14987
3.38757
0.929832
if not callable(processor): raise ValueError('Processor %s is not callable.' % processor.__class__.__name__) else: self.processors.append(processor)
def register_processor(self, processor)
Register a new processor. Note that processors are called in the order that they are registered.
3.28817
3.834548
0.857512
try: processed_event = self.process_event(event) except EventEmissionExit: return else: self.send_to_backends(processed_event)
def send(self, event)
Process the event using all registered processors and send it to all registered backends. Logs and swallows all `Exception`.
5.9119
4.634384
1.27566
for name, backend in six.iteritems(self.backends): try: backend.send(event) except Exception: # pylint: disable=broad-except LOG.exception( 'Unable to send event to backend: %s', name )
def send_to_backends(self, event)
Sends the event to all registered backends. Logs and swallows all `Exception`.
2.925888
2.678205
1.092481
event = { 'name': name or UNKNOWN_EVENT_TYPE, 'timestamp': datetime.now(UTC), 'data': data or {}, 'context': self.resolve_context() } self.routing_backend.send(event)
def emit(self, name=None, data=None)
Emit an event annotated with the UTC time when this function was called. `name` is a unique identification string for an event that has already been registered. `data` is a dictionary mapping field names to the value to include in the event. Note that all values provided must be serializable.
6.242596
7.357834
0.848429
merged = dict() for context in self.located_context.values(): merged.update(context) return merged
def resolve_context(self)
Create a new dictionary that corresponds to the union of all of the contexts that have been entered but not exited at this point.
9.44639
6.716627
1.406419
self.enter_context(name, ctx) try: yield finally: self.exit_context(name)
def context(self, name, ctx)
Execute the block with the given context applied. This manager ensures that the context is removed even if an exception is raised within the context.
3.351721
2.833139
1.183041
if analytics is None: return context = event.get('context', {}) user_id = context.get('user_id') name = event.get('name') if name is None or user_id is None: return segment_context = {} ga_client_id = context.get('client_id') if ga_client_id is not None: segment_context['Google Analytics'] = { 'clientId': ga_client_id } ip_address = context.get('ip') if ip_address is not None: segment_context['ip'] = ip_address user_agent = context.get('agent') if user_agent is not None: segment_context['userAgent'] = user_agent path = context.get('path') referer = context.get('referer') page = context.get('page') if path and not page: # Try to put together a url from host and path, hardcoding the schema. # (Segment doesn't care about the schema for GA, but will extract the host and path from the url.) host = context.get('host') if host: parts = ("https", host, path, "", "") page = urlunsplit(parts) if path is not None or referer is not None or page is not None: segment_context['page'] = {} if path is not None: segment_context['page']['path'] = path if referer is not None: segment_context['page']['referrer'] = referer if page is not None: segment_context['page']['url'] = page analytics.track( user_id, name, event, context=segment_context )
def send(self, event)
Use the segment.com python API to send the event to segment.com
2.75384
2.691389
1.023204
if not self.thread_local_data: self.thread_local_data = threading.local() if not hasattr(self.thread_local_data, 'context'): self.thread_local_data.context = OrderedDict() return self.thread_local_data.context
def get(self)
Return a reference to a thread-specific context
2.707274
2.156302
1.255517
event_str = json.dumps(event, cls=DateTimeJSONEncoder) # TODO: do something smarter than simply dropping the event on # the floor. if self.max_event_size is None or len(event_str) <= self.max_event_size: self.log(event_str)
def send(self, event)
Send the event to the standard python logger
4.229748
4.104653
1.030476
if obj.tzinfo is None: # Localize to UTC naive datetime objects obj = UTC.localize(obj) # pylint: disable=no-value-for-parameter else: # Convert to UTC datetime objects from other timezones obj = obj.astimezone(UTC) return obj.isoformat() elif isinstance(obj, date): return obj.isoformat() return super(DateTimeJSONEncoder, self).default(obj)
def default(self, obj): # lint-amnesty, pylint: disable=arguments-differ, method-hidden if isinstance(obj, datetime)
Serialize datetime and date objects of iso format. datatime objects are converted to UTC.
3.52541
3.032558
1.16252
theme_path = os.path.abspath(os.path.dirname(__file__)) return [theme_path]
def get_html_theme_path()
Return list of HTML theme paths.
2.580065
2.372744
1.087376
# add_html_theme is new in Sphinx 1.6+ if hasattr(app, 'add_html_theme'): theme_path = get_html_theme_path()[0] app.add_html_theme('bootstrap', os.path.join(theme_path, 'bootstrap'))
def setup(app)
Setup.
2.94684
2.79892
1.052849
try: # Copy select *.rst files to *.txt for build. for rst_file, txt_file in zip(SDIST_RST_FILES, SDIST_TXT_FILES): local("cp %s %s" % (rst_file, txt_file)) # Perform action. yield finally: # Clean up temp *.txt files. for rst_file in SDIST_TXT_FILES: local("rm -f %s" % rst_file, capture=False)
def _dist_wrapper()
Add temporary distribution build files (and then clean up).
4.22212
3.729074
1.132217
rev_cmd = "git describe --always --tag" if tag in (True, "True") else \ "git rev-parse HEAD" return local(rev_cmd, capture=True).strip()
def get_rev(tag=True)
Get build revision. @param tag Use git tag instead of hash?
4.31791
5.007511
0.862287
#rev = get_rev(tag) rev = __version__ print("Cleaning old build files.") clean() local("mkdir -p build") print("Bundling new files.") with lcd("sphinx_bootstrap_theme/bootstrap"): local("zip -r ../../build/bootstrap.zip .") dest = os.path.abspath(os.path.join(DL_DIR, rev)) with lcd("build"): local("mkdir -p %s" % dest) local("cp bootstrap.zip %s" % dest) print("Verifying contents.") local("unzip -l bootstrap.zip")
def zip_bundle(tag=True)
Create zip file upload bundles. @param tag Use git tag instead of hash?
5.049347
5.525303
0.913859
cur_path = os.path.dirname(__file__) exts = ('txt', 'rst') for ext in exts: path = os.path.join(cur_path, '.'.join((name, ext))) if os.path.exists(path): with open(path, 'rt') as file_obj: return file_obj.read() return ''
def read_file(name)
Read file name (without extension) to string.
2.542456
2.467466
1.030392
obj.save() if notification: # Getting the appropriate notice labels for the sender and recipients. if obj.parent_msg is None: sender_label = 'messages_sent' recipients_label = 'messages_received' else: sender_label = 'messages_replied' recipients_label = 'messages_reply_received' # Notification for the sender. notification.send([obj.sender], sender_label, {'message': obj,}) if form.cleaned_data['group'] == 'all': # send to all users recipients = User.objects.exclude(pk=obj.recipient.pk) else: # send to a group of users recipients = [] group = form.cleaned_data['group'] if group: group = Group.objects.get(pk=group) recipients.extend( list(group.user_set.exclude(pk=obj.recipient.pk))) # create messages for all found recipients for user in recipients: obj.pk = None obj.recipient = user obj.save() if notification: # Notification for the recipient. notification.send([user], recipients_label, {'message' : obj,})
def save_model(self, request, obj, form, change)
Saves the message for the recipient and looks in the form instance for other possible recipients. Prevents duplication by excludin the original recipient from the list of optional recipients. When changing an existing message and choosing optional recipients, the message is effectively resent to those users.
3.465595
3.265798
1.061179
bits = token.contents.split() if len(bits) > 1: if len(bits) != 3: raise TemplateSyntaxError("inbox_count tag takes either no arguments or exactly two arguments") if bits[1] != 'as': raise TemplateSyntaxError("first argument to inbox_count tag must be 'as'") return InboxOutput(bits[2]) else: return InboxOutput()
def do_print_inbox_count(parser, token)
A templatetag to show the unread-count for a logged in user. Returns the number of unread messages in the user's inbox. Usage:: {% load inbox %} {% inbox_count %} {# or assign the value to a variable: #} {% inbox_count as my_var %} {{ my_var }}
2.33478
2.679082
0.871485
return Message.objects.filter(recipient=user, read_at__isnull=True, recipient_deleted_at__isnull=True).count()
def inbox_count_for(user)
returns the number of unread messages for the given user but does not mark them seen
3.478419
3.742561
0.929422