code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
date = self._confevent.get('confdate', {}) if len(date) > 0: start = {k: int(v) for k, v in date['startdate'].items()} end = {k: int(v) for k, v in date['enddate'].items()} return ((start['@year'], start['@month'], start['@day']), (end['@year'], end['@month'], end['@day'])) else: return ((None, None, None), (None, None, None))
def confdate(self)
Date range of the conference the abstract belongs to represented by two tuples in the form (YYYY, MM, DD).
2.561013
2.250582
1.137934
sponsors = chained_get(self._confevent, ['confsponsors', 'confsponsor'], []) if len(sponsors) == 0: return None if isinstance(sponsors, list): return [s['$'] for s in sponsors] return sponsors
def confsponsor(self)
Sponsor(s) of the conference the abstract belongs to.
5.525982
4.69786
1.176277
items = listify(chained_get(self._head, ['source', 'contributor-group'], [])) out = [] fields = 'given_name initials surname indexed_name role' pers = namedtuple('Contributor', fields) for item in items: entry = item.get('contributor', {}) new = pers(indexed_name=entry.get('ce:indexed-name'), role=entry.get('@role'), surname=entry.get('ce:surname'), given_name=entry.get('ce:given-name'), initials=entry.get('ce:initials')) out.append(new) return out or None
def contributor_group(self)
List of namedtuples representing contributors compiled by Scopus, in the form (given_name, initials, surname, indexed_name, role).
4.762222
3.763628
1.265328
fields = 'surname initials organization country city_group' auth = namedtuple('Correspondence', fields) corr = self._head.get('correspondence') if corr is None: return None aff = corr.get('affiliation', {}) try: org = aff['organization'] if isinstance(org, dict): try: org = org['$'] except TypeError: # Multiple names given org = [d['$'] for d in org] except KeyError: org = None return auth(surname=corr.get('person', {}).get('ce:surname'), initials=corr.get('person', {}).get('ce:initials'), organization=org, country=aff.get('country'), city_group=aff.get('city-group'))
def correspondence(self)
namedtuple representing the author to whom correspondence should be addressed, in the form (surname, initials, organization, country, city_group).
4.181311
3.114815
1.342395
path = ['item', 'xocs:meta', 'xocs:funding-list', 'xocs:funding'] funds = listify(chained_get(self._json, path, [])) out = [] fund = namedtuple('Funding', 'agency string id acronym country') for item in funds: new = fund(agency=item.get('xocs:funding-agency'), string=item.get('xocs:funding-agency-matched-string'), id=item.get('xocs:funding-agency-id'), acronym=item.get('xocs:funding-agency-acronym'), country=item.get('xocs:funding-agency-country')) out.append(new) return out or None
def funding(self)
List of namedtuples parsed funding information in the form (agency string id acronym country).
3.756857
3.3573
1.119011
isbns = listify(chained_get(self._head, ['source', 'isbn'], [])) if len(isbns) == 0: return None else: return tuple((i['$'] for i in isbns))
def isbn(self)
ISBNs belonging to publicationName as tuple of variying length, (e.g. ISBN-10 or ISBN-13).
7.942719
6.668188
1.191136
try: terms = listify(self._json.get("idxterms", {}).get('mainterm', [])) except AttributeError: # idxterms is empty return None try: return [d['$'] for d in terms] except AttributeError: return None
def idxterms(self)
List of index terms.
7.685631
7.482924
1.027089
# Return information from FULL view, fall back to other views full = chained_get(self._head, ['source', 'publisher', 'publishername']) if full is None: return self._json['coredata'].get('dc:publisher') else: return full
def publisher(self)
Name of the publisher of the abstract. Note: Information provided in the FULL view of the article might be more complete.
18.503328
13.093613
1.413157
path = ['enhancement', 'sequencebanks', 'sequencebank'] items = listify(chained_get(self._head, path, [])) bank = namedtuple('Sequencebank', 'name sequence_number type') out = [] for item in items: numbers = listify(item['sequence-number']) for number in numbers: new = bank(name=item['@name'], sequence_number=number['$'], type=number['@type']) out.append(new) return out or None
def sequencebank(self)
List of namedtuples representing biological entities defined or mentioned in the text, in the form (name, sequence_number, type).
6.173831
5.336598
1.156885
area = namedtuple('Area', 'area abbreviation code') path = ['subject-areas', 'subject-area'] out = [area(area=item['$'], abbreviation=item['@abbrev'], code=item['@code']) for item in listify(chained_get(self._json, path, []))] return out or None
def subject_areas(self)
List of namedtuples containing subject areas of the article in the form (). Note: Requires the FULL view of the article.
6.706419
6.622284
1.012705
if self.aggregationType != 'Journal': raise ValueError('Only Journal articles supported.') # Item key year = self.coverDate[0:4] first = self.title.split()[0].title() last = self.title.split()[-1].title() key = ''.join([self.authors[0].surname, year, first, last]) # Authors authors = ' and '.join(["{} {}".format(a.given_name, a.surname) for a in self.authors]) # Pages if self.pageRange: pages = self.pageRange elif self.startingPage: pages = '{}-{}'.format(self.startingPage, self.endingPage) else: pages = '-' # All information bib = "@article{{{key},\n author = {{{auth}}},\n title = "\ "{{{{{title}}}}},\n journal = {{{jour}}},\n year = "\ "{{{year}}},\n volume = {{{vol}}},\n number = {{{number}}},"\ "\n pages = {{{pages}}}".format( key=key, auth=authors, title=self.title, year=year, jour=self.publicationName, vol=self.volume, number=self.issueIdentifier, pages=pages) # DOI if self.doi: bib += ",\n doi = {{{}}}".format(self.doi) bib += "}" return bib
def get_bibtex(self)
Bibliographic entry in BibTeX format. Raises ------ ValueError If the item's aggregationType is not Journal.
2.97895
2.724535
1.093379
# Author links au_link = ('<a href="https://www.scopus.com/authid/detail.url' '?origin=AuthorProfile&authorId={0}">{1}</a>') if len(self.authors) > 1: authors = u', '.join([au_link.format(a.auid, a.given_name + ' ' + a.surname) for a in self.authors[0:-1]]) authors += (u' and ' + au_link.format(self.authors[-1].auid, (str(self.authors[-1].given_name) + ' ' + str(self.authors[-1].surname)))) else: a = self.authors[0] authors = au_link.format(a.auid, a.given_name + ' ' + a.surname) title = u'<a href="{}">{}</a>'.format(self.scopus_link, self.title) if self.volume and self.issueIdentifier: volissue = u'<b>{}({})</b>'.format(self.volume, self.issueIdentifier) elif self.volume: volissue = u'<b>{}</b>'.format(self.volume) else: volissue = 'no volume' jlink = '<a href="https://www.scopus.com/source/sourceInfo.url'\ '?sourceId={}">{}</a>'.format( self.source_id, self.publicationName) pages = _parse_pages(self, unicode=True) s = "{auth}, {title}, {jour}, {volissue}, {pages}, ({year}).".format( auth=authors, title=title, jour=jlink, volissue=volissue, pages=pages, year=self.coverDate[:4]) if self.doi: s += ' <a href="https://doi.org/{0}">doi:{0}</a>.'.format(self.doi) return s
def get_html(self)
Bibliographic entry in html format.
2.807149
2.714015
1.034316
if len(self.authors) > 1: authors = _list_authors(self.authors) else: a = self.authors authors = ' '.join([a.given_name, a.surname]) if self.volume and self.issueIdentifier: volissue = '\\textbf{{{}({})}}'.format(self.volume, self.issueIdentifier) elif self.volume: volissue = '\\textbf{{{}}}'.format(self.volume) else: volissue = 'no volume' pages = _parse_pages(self) s = '{auth}, \\textit{{{title}}}, {jour}, {vol}, {pages} ({year}).'.format( auth=authors, title=self.title, jour=self.publicationName, vol=volissue, pages=pages, year=self.coverDate[:4]) if self.doi is not None: s += ' \\href{{https://doi.org/{0}}}{{doi:{0}}}, '.format(self.doi) s += '\\href{{{0}}}{{scopus:{1}}}.'.format(self.scopus_link, self.eid) return s
def get_latex(self)
Bibliographic entry in LaTeX format.
3.126633
2.970201
1.052667
if self.aggregationType != 'Journal': raise ValueError('Only Journal articles supported.') # Basic information ris = "TY - JOUR\nTI - {title}\nJO - {jour}\nVL - {vol}\n"\ "DA - {date}\nPY - {year}\nSP - {pages}\n".format( title=self.title, jour=self.publicationName, vol=self.volume, date=self.coverDate, year=self.coverDate[0:4], pages=self.pageRange) # Authors for au in self.authors: ris += 'AU - {}\n'.format(au.indexed_name) # DOI if self.doi is not None: ris += 'DO - {0}\nUR - https://doi.org/{0}\n'.format(self.doi) # Issue if self.issueIdentifier is not None: ris += 'IS - {}\n'.format(self.issueIdentifier) ris += 'ER - \n\n' return ris
def get_ris(self)
Bibliographic entry in RIS (Research Information System Format) format for journal articles. Raises ------ ValueError If the item's aggregationType is not Journal.
3.3485
2.92574
1.144497
cursor = "cursor" in params if not cursor: start = params["start"] if n == 0: return "" _json = res.get('search-results', {}).get('entry', []) # Download the remaining information in chunks while n > 0: n -= params["count"] if cursor: pointer = res['search-results']['cursor'].get('@next') params.update({'cursor': pointer}) else: start += params["count"] params.update({'start': start}) res = download(url=URL[api], params=params, accept="json", **kwds).json() _json.extend(res.get('search-results', {}).get('entry', [])) return _json
def _parse(res, params, n, api, **kwds)
Auxiliary function to download results and parse json.
4.492766
4.283396
1.048879
file_exists = exists(CONFIG_FILE) if not file_exists: # Set directories config.add_section('Directories') defaults = [ ('AbstractRetrieval', expanduser('~/.scopus/abstract_retrieval')), ('AffiliationSearch', expanduser('~/.scopus/affiliation_search')), ('AuthorRetrieval', expanduser('~/.scopus/author_retrieval')), ('AuthorSearch', expanduser('~/.scopus/author_search')), ('CitationOverview', expanduser('~/.scopus/citation_overview')), ('ContentAffiliationRetrieval', expanduser('~/.scopus/affiliation_retrieval')), ('ScopusSearch', expanduser('~/.scopus/scopus_search')) ] for key, value in defaults: config.set('Directories', key, value) if not exists(value): makedirs(value) # Set authentication config.add_section('Authentication') prompt_key = "Please enter your API Key, obtained from "\ "http://dev.elsevier.com/myapikey.html: \n" if py3: key = input(prompt_key) else: key = raw_input(prompt_key) config.set('Authentication', 'APIKey', key) prompt_token = "API Keys are sufficient for most users. If you "\ "have to use Authtoken authentication, please enter "\ "the token, otherwise press Enter: \n" if py3: token = input(prompt_token) else: token = raw_input(prompt_token) if len(token) > 0: config.set('Authentication', 'InstToken', token) # Write out with open(CONFIG_FILE, 'w') as f: config.write(f) else: text = "Configuration file already exists at {}; process to create "\ "the file aborted. Please open the file and edit the "\ "entries manually.".format(CONFIG_FILE) raise FileExistsError(text)
def create_config()
Initiates process to generate configuration file.
3.136842
3.070779
1.021513
out = [] order = 'eid surname initials givenname affiliation documents '\ 'affiliation_id city country areas' auth = namedtuple('Author', order) for item in self._json: name = item.get('preferred-name', {}) aff = item.get('affiliation-current', {}) fields = item.get('subject-area', [{'@abbrev': '', '@frequency': ''}]) if isinstance(fields, dict): fields = [fields] areas = ["{} ({})".format(d.get('@abbrev', ''), d.get('@frequency', '')) for d in fields] new = auth(eid=item['eid'], initials=name.get('initials'), surname=name.get('surname'), areas="; ".join(areas), givenname=name.get('given-name'), documents=item.get('document-count', '0'), affiliation=aff.get('affiliation-name'), affiliation_id=aff.get('affiliation-id'), city=aff.get('affiliation-city'), country=aff.get('affiliation-country')) out.append(new) return out or None
def authors(self)
A list of namedtuples storing author information, where each namedtuple corresponds to one author. The information in each namedtuple is (eid surname initials givenname documents affiliation affiliation_id city country areas). All entries are strings or None. Areas combines abbreviated subject areas followed by the number of documents in this subject.
3.534884
2.767868
1.277114
try: return "".join(container.find(xpath, ns).itertext()) except AttributeError: return None
def get_encoded_text(container, xpath)
Return text for element at xpath in the container xml if it is there. Parameters ---------- container : xml.etree.ElementTree.Element The element to be searched in. xpath : str The path to be looked for. Returns ------- result : str
4.506857
6.597882
0.683076
config = cosmic_ray.commands.new_config() config_str = serialize_config(config) with open(args['<config-file>'], mode='wt') as handle: handle.write(config_str) return ExitCode.OK
def handle_new_config(args)
usage: cosmic-ray new-config <config-file> Create a new config file.
5.653048
4.560646
1.239528
config_file = args['<config-file>'] config = load_config(config_file) modules = set(cosmic_ray.modules.find_modules(Path(config['module-path']))) log.info('Modules discovered: %s', [m for m in modules]) db_name = get_db_name(args['<session-file>']) with use_db(db_name) as database: cosmic_ray.commands.init(modules, database, config) return ExitCode.OK
def handle_init(args)
usage: cosmic-ray init <config-file> <session-file> Initialize a mutation testing session from a configuration. This primarily creates a session - a database of "work to be done" - which describes all of the mutations and test runs that need to be executed for a full mutation testing run. The configuration specifies the top-level module to mutate, the tests to run, and how to run them. This command doesn't actually run any tests. Instead, it scans the modules-under-test and simply generates the work order which can be executed with other commands. The `session-file` is the filename for the database in which the work order will be stored.
4.798303
4.866719
0.985942
session_file = get_db_name(args['<session-file>']) with use_db(session_file) as database: config = database.get_config() print(serialize_config(config)) return ExitCode.OK
def handle_config(args)
usage: cosmic-ray config <session-file> Show the configuration for in a session.
6.330183
6.012747
1.052794
session_file = get_db_name(args.get('<session-file>')) cosmic_ray.commands.execute(session_file) return ExitCode.OK
def handle_exec(args)
usage: cosmic-ray exec <session-file> Perform the remaining work to be done in the specified session. This requires that the rest of your mutation testing infrastructure (e.g. worker processes) are already running.
15.262448
9.486357
1.608884
session_file = get_db_name(args['<session-file>']) with use_db(session_file, WorkDB.Mode.open) as database: for work_item, result in database.completed_work_items: print(json.dumps((work_item, result), cls=WorkItemJsonEncoder)) for work_item in database.pending_work_items: print(json.dumps((work_item, None), cls=WorkItemJsonEncoder)) return ExitCode.OK
def handle_dump(args)
usage: cosmic-ray dump <session-file> JSON dump of session data. This output is typically run through other programs to produce reports. Each line of output is a list with two elements: a WorkItem and a WorkResult, both JSON-serialized. The WorkResult can be null, indicating a WorkItem with no results.
4.784995
4.354215
1.098934
assert args print('\n'.join(cosmic_ray.plugins.operator_names())) return ExitCode.OK
def handle_operators(args)
usage: {program} operators List the available operator plugins.
26.532423
18.39152
1.442644
assert args print('\n'.join(cosmic_ray.plugins.execution_engine_names())) return ExitCode.OK
def handle_execution_engines(args)
usage: {program} execution-engines List the available execution-engine plugins.
20.002863
15.760411
1.269184
assert args print('\n'.join(cosmic_ray.plugins.interceptor_names())) return ExitCode.OK
def handle_interceptors(args)
usage: {program} interceptors List the available interceptor plugins.
25.278656
23.008574
1.098662
python_version = args['--python-version'] if python_version is None: python_version = "{}.{}".format(sys.version_info.major, sys.version_info.minor) apply_mutation( Path(args['<module-path>']), cosmic_ray.plugins.get_operator(args['<operator>'])(python_version), int(args['<occurrence>'])) return ExitCode.OK
def handle_apply(args)
usage: {program} apply <module-path> <operator> <occurrence> Apply the specified mutation to the files on disk. This is primarily a debugging tool. options: --python-version=VERSION Python major.minor version (e.g. 3.6) of the code being mutated.
5.213764
3.495201
1.491692
config = load_config(args.get('<config-file>')) with open(os.devnull, 'w') as devnull: with redirect_stdout(sys.stdout if args['--keep-stdout'] else devnull): work_item = cosmic_ray.worker.worker( Path(args['<module-path>']), config.python_version, args['<operator>'], int(args['<occurrence>']), config.test_command, None) sys.stdout.write(json.dumps(work_item, cls=WorkItemJsonEncoder)) return ExitCode.OK
def handle_worker(args)
usage: {program} worker [options] <module-path> <operator> <occurrence> [<config-file>] Run a worker process which performs a single mutation and test run. Each worker does a minimal, isolated chunk of work: it mutates the <occurence>-th instance of <operator> in <module-path>, runs the test suite defined in the configuration, prints the results, and exits. Normally you won't run this directly. Rather, it will be launched by an execution engine. However, it can be useful to run this on its own for testing and debugging purposes. options: --keep-stdout Do not squelch stdout
6.240316
5.26868
1.184417
signal.signal( signal.SIGINT, lambda *args: sys.exit(_SIGNAL_EXIT_CODE_BASE + signal.SIGINT)) if hasattr(signal, 'SIGINFO'): signal.signal( getattr(signal, 'SIGINFO'), lambda *args: report_progress(sys.stderr)) try: return docopt_subcommands.main( commands=dsc, argv=argv, doc_template=DOC_TEMPLATE, exit_at_end=False) except docopt.DocoptExit as exc: print(exc, file=sys.stderr) return ExitCode.USAGE except FileNotFoundError as exc: print(exc, file=sys.stderr) return ExitCode.NO_INPUT except PermissionError as exc: print(exc, file=sys.stderr) return ExitCode.NO_PERM except cosmic_ray.config.ConfigError as exc: print(repr(exc), file=sys.stderr) if exc.__cause__ is not None: print(exc.__cause__, file=sys.stderr) return ExitCode.CONFIG except subprocess.CalledProcessError as exc: print('Error in subprocess', file=sys.stderr) print(exc, file=sys.stderr) return exc.returncode
def main(argv=None)
Invoke the cosmic ray evaluation. :param argv: the command line arguments
2.88658
2.942193
0.981098
def dec(cls): name = '{}{}'.format(cls.__name__, suffix) setattr(cls, '__name__', name) return cls return dec
def extend_name(suffix)
A factory for class decorators that modify the class name by appending some text to it. Example: @extend_name('_Foo') class Class: pass assert Class.__name__ == 'Class_Foo'
4.454808
3.959982
1.124957
assert index < len(OFFSETS), 'received count with no associated offset' assert isinstance(node, parso.python.tree.Number) val = eval(node.value) + OFFSETS[index] # pylint: disable=W0123 return parso.python.tree.Number(' ' + str(val), node.start_pos)
def mutate(self, node, index)
Modify the numeric value on `node`.
7.121055
6.602042
1.078614
"Determines if from_op is allowed to be mutated to to_op." # 'not' can only be removed but not replaced with # '+', '-' or '~' b/c that may lead to strange results if from_op is UnaryOperators.Not: if to_op is not UnaryOperators.Nothing: return True # '+1' => '1' yields equivalent mutations if from_op is UnaryOperators.UAdd: if to_op is UnaryOperators.Nothing: return True return False
def _prohibited(from_op, to_op)
Determines if from_op is allowed to be mutated to to_op.
9.120406
7.741211
1.178163
try: with _config_stream(filename) as handle: filename = handle.name return deserialize_config(handle.read()) except (OSError, toml.TomlDecodeError, UnicodeDecodeError) as exc: raise ConfigError( 'Error loading configuration from {}'.format(filename)) from exc
def load_config(filename=None)
Load a configuration from a file or stdin. If `filename` is `None` or "-", then configuration gets read from stdin. Returns: A `ConfigDict`. Raises: ConfigError: If there is an error loading the config.
4.769496
4.749237
1.004266
if filename is None or filename == '-': log.info('Reading config from stdin') yield sys.stdin else: with open(filename, mode='rt') as handle: log.info('Reading config from %r', filename) yield handle
def _config_stream(filename)
Given a configuration's filename, this returns a stream from which a configuration can be read. If `filename` is `None` or '-' then stream will be `sys.stdin`. Otherwise, it's the open file handle for the filename.
3.031498
3.151431
0.961944
"Get a sub-configuration." d = self for segment in segments: try: d = d[segment] except KeyError: return ConfigDict({}) return d
def sub(self, *segments)
Get a sub-configuration.
5.551816
4.264309
1.301926
v = self.get('python-version', '') if v == '': v = "{}.{}".format(sys.version_info.major, sys.version_info.minor) return v
def python_version(self)
Get the configured Python version. If this is not set in the config, then it defaults to the version of the current runtime. Returns: A string of the form "MAJOR.MINOR", e.g. "3.6".
3.314642
3.105686
1.067282
assert index == 0 assert isinstance(node, ForStmt) empty_list = parso.parse(' []') node.children[3] = empty_list return node
def mutate(self, node, index)
Modify the For loop to evaluate to None
10.285126
8.551579
1.202716
sep = name.index('/') provider_name = name[:sep] operator_name = name[sep + 1:] provider = OPERATOR_PROVIDERS[provider_name] return provider[operator_name]
def get_operator(name)
Get an operator class from a provider plugin. Attrs: name: The name of the operator class. Returns: The operator *class object* (i.e. not an instance).
3.90586
4.559026
0.856731
return tuple('{}/{}'.format(provider_name, operator_name) for provider_name, provider in OPERATOR_PROVIDERS.items() for operator_name in provider)
def operator_names()
Get all operator names. Returns: A sequence of operator names.
5.272333
5.822773
0.905468
manager = driver.DriverManager( namespace='cosmic_ray.execution_engines', name=name, invoke_on_load=True, on_load_failure_callback=_log_extension_loading_failure, ) return manager.driver
def get_execution_engine(name)
Get the execution engine by name.
4.852743
4.57106
1.061623
database = WorkDB(path, mode) try: yield database finally: database.close()
def use_db(path, mode=WorkDB.Mode.create)
Open a DB in file `path` in mode `mode` as a context manager. On exiting the context the DB will be automatically closed. Args: path: The path to the DB file. mode: The mode in which to open the DB. See the `Mode` enum for details. Raises: FileNotFoundError: If `mode` is `Mode.open` and `path` does not exist.
3.124723
7.220535
0.432755
with self._conn: self._conn.execute("DELETE FROM config") self._conn.execute('INSERT INTO config VALUES(?)', (serialize_config(config),))
def set_config(self, config)
Set (replace) the configuration for the session. Args: config: Configuration object
4.476008
5.911553
0.757163
rows = list(self._conn.execute("SELECT * FROM config")) if not rows: raise ValueError("work-db has no config") (config_str,) = rows[0] return deserialize_config(config_str)
def get_config(self)
Get the work parameters (if set) for the session. Returns: a Configuration object. Raises: ValueError: If is no config set for the session.
7.349805
6.262355
1.173649
cur = self._conn.cursor() rows = cur.execute("SELECT * FROM work_items") for row in rows: yield _row_to_work_item(row)
def work_items(self)
An iterable of all of WorkItems in the db. This includes both WorkItems with and without results.
3.787806
3.800637
0.996624
with self._conn: self._conn.execute( ''' INSERT INTO work_items VALUES (?, ?, ?, ?, ?, ?, ?, ?) ''', _work_item_to_row(work_item))
def add_work_item(self, work_item)
Add a WorkItems. Args: work_item: A WorkItem.
3.44615
3.756081
0.917486
with self._conn: self._conn.execute('DELETE FROM results') self._conn.execute('DELETE FROM work_items')
def clear(self)
Clear all work items from the session. This removes any associated results as well.
6.087817
4.733842
1.28602
"An iterable of all `(job-id, WorkResult)`s." cur = self._conn.cursor() rows = cur.execute("SELECT * FROM results") for row in rows: yield (row['job_id'], _row_to_work_result(row))
def results(self)
An iterable of all `(job-id, WorkResult)`s.
7.254137
3.525917
2.057376
with self._conn: try: self._conn.execute( ''' REPLACE INTO results VALUES (?, ?, ?, ?, ?) ''', _work_result_to_row(job_id, result)) except sqlite3.IntegrityError as exc: raise KeyError('Can not add result with job-id {}'.format( job_id)) from exc
def set_result(self, job_id, result)
Set the result for a job. This will overwrite any existing results for the job. Args: job_id: The ID of the WorkItem to set the result for. result: A WorkResult indicating the result of the job. Raises: KeyError: If there is no work-item with a matching job-id.
4.329226
4.073356
1.062816
"Iterable of all pending work items." pending = self._conn.execute( "SELECT * FROM work_items WHERE job_id NOT IN (SELECT job_id FROM results)" ) return (_row_to_work_item(p) for p in pending)
def pending_work_items(self)
Iterable of all pending work items.
6.398195
6.120389
1.04539
"Iterable of `(work-item, result)`s for all completed items." completed = self._conn.execute( "SELECT * FROM work_items, results WHERE work_items.job_id == results.job_id" ) return ((_row_to_work_item(result), _row_to_work_result(result)) for result in completed)
def completed_work_items(self)
Iterable of `(work-item, result)`s for all completed items.
7.143277
4.71993
1.513429
arguments = docopt.docopt(report_xml.__doc__, version='cr-rate 1.0') with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db: xml_elem = _create_xml_report(db) xml_elem.write( sys.stdout.buffer, encoding='utf-8', xml_declaration=True)
def report_xml()
cr-xml Usage: cr-xml <session-file> Print an XML formatted report of test results for continuos integration systems
5.876061
5.824319
1.008884
try: with use_db(db_name, mode=WorkDB.Mode.open) as work_db: _update_progress(work_db) config = work_db.get_config() engine = get_execution_engine(config.execution_engine_name) def on_task_complete(job_id, work_result): work_db.set_result(job_id, work_result) _update_progress(work_db) log.info("Job %s complete", job_id) log.info("Beginning execution") engine( work_db.pending_work_items, config, on_task_complete=on_task_complete) log.info("Execution finished") except FileNotFoundError as exc: raise FileNotFoundError( str(exc).replace('Requested file', 'Corresponding database', 1)) from exc
def execute(db_name)
Execute any pending work in the database stored in `db_name`, recording the results. This looks for any work in `db_name` which has no results, schedules it to be executed, and records any results that arrive.
4.541462
4.352834
1.043335
with module_path.open(mode='rt', encoding='utf-8') as handle: source = handle.read() return parso.parse(source, version=python_version)
def get_ast(module_path, python_version)
Get the AST for the code in a file. Args: module_path: pathlib.Path to the file containing the code. python_version: Python version as a "MAJ.MIN" string. Returns: The parso parse tree for the code in `module_path`.
3.693149
3.863398
0.955933
"Determine if a node is the `None` keyword." return isinstance(node, parso.python.tree.Keyword) and node.value == 'None'
def is_none(node)
Determine if a node is the `None` keyword.
6.631915
4.90735
1.351425
"Walk a parse tree, calling visit for each node." node = self.visit(node) if node is None: return None if isinstance(node, parso.tree.BaseNode): walked = map(self.walk, node.children) node.children = [child for child in walked if child is not None] return node
def walk(self, node)
Walk a parse tree, calling visit for each node.
3.471099
2.962515
1.171673
arguments = docopt.docopt( format_survival_rate.__doc__, version='cr-rate 1.0') with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db: rate = survival_rate(db) print('{:.2f}'.format(rate))
def format_survival_rate()
cr-rate Usage: cr-rate <session-file> Calculate the survival rate of a session.
6.607499
5.970176
1.106751
kills = sum(r.is_killed for _, r in work_db.results) num_results = work_db.num_results if not num_results: return 0 return (1 - kills / num_results) * 100
def survival_rate(work_db)
Calcuate the survival rate for the results in a WorkDB.
4.213411
3.603599
1.169223
arguments = docopt.docopt(report_html.__doc__, version='cr-rate 1.0') with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db: doc = _generate_html_report(db) print(doc.getvalue())
def report_html()
cr-html Usage: cr-html <session-file> Print an HTML formatted report of test results.
7.553302
7.154572
1.055731
arguments = docopt.docopt(report.__doc__, version='cr-format 0.1') show_pending = arguments['--show-pending'] show_output = arguments['--show-output'] show_diff = arguments['--show-diff'] with use_db(arguments['<session-file>'], WorkDB.Mode.open) as db: for work_item, result in db.completed_work_items: print('{} {} {} {}'.format(work_item.job_id, work_item.module_path, work_item.operator_name, work_item.occurrence)) print('worker outcome: {}, test outcome: {}'.format( result.worker_outcome, result.test_outcome)) if show_output: print('=== OUTPUT ===') print(result.output) print('==============') if show_diff: print('=== DIFF ===') print(result.diff) print('============') if show_pending: for work_item in db.pending_work_items: print('{} {} {} {}'.format( work_item.job_id, work_item.module_path, work_item.operator_name, work_item.occurrence)) num_items = db.num_work_items num_complete = db.num_results print('total jobs: {}'.format(num_items)) if num_complete > 0: print('complete: {} ({:.2f}%)'.format( num_complete, num_complete / num_items * 100)) print('survival rate: {:.2f}%'.format(survival_rate(db))) else: print('no jobs completed')
def report()
cr-report Usage: cr-report [--show-output] [--show-diff] [--show-pending] <session-file> Print a nicely formatted report of test results and some basic statistics. options: --show-output Display output of test executions --show-diff Display diff of mutants --show-pending Display results for incomplete tasks
2.958018
2.781037
1.063639
"Return True if a string is of the form <int>.<int>, False otherwise." if not s: return True toks = s.split('.') if len(toks) != 2: return False try: int(toks[0]) int(toks[1]) except ValueError: return False return True
def _validate_python_version(s)
Return True if a string is of the form <int>.<int>, False otherwise.
2.770365
2.027426
1.366445
config = ConfigDict() config["module-path"] = qprompt.ask_str( "Top-level module path", blk=False, vld=os.path.exists, hlp=MODULE_PATH_HELP) python_version = qprompt.ask_str( 'Python version (blank for auto detection)', vld=_validate_python_version, hlp=PYTHON_VERSION_HELP) config['python-version'] = python_version timeout = qprompt.ask_str( 'Test execution timeout (seconds)', vld=float, blk=False, hlp="The number of seconds to let a test run before terminating it.") config['timeout'] = float(timeout) config['excluded-modules'] = [] config["test-command"] = qprompt.ask_str( "Test command", blk=False, hlp=TEST_COMMAND_HELP) menu = qprompt.Menu() for at_pos, engine_name in enumerate(execution_engine_names()): menu.add(str(at_pos), engine_name) config["execution-engine"] = ConfigDict() config['execution-engine']['name'] = menu.show(header="Execution engine", returns="desc") config["cloning"] = ConfigDict() config['cloning']['method'] = 'copy' config['cloning']['commands'] = [] return config
def new_config()
Prompt user for config variables and generate new config. Returns: A new ConfigDict.
4.413575
4.380712
1.007502
if module_path.is_file(): if module_path.suffix == '.py': yield module_path elif module_path.is_dir(): pyfiles = glob.glob('{}/**/*.py'.format(module_path), recursive=True) yield from (Path(pyfile) for pyfile in pyfiles)
def find_modules(module_path)
Find all modules in the module (possibly package) represented by `module_path`. Args: module_path: A pathlib.Path to a Python package or module. Returns: An iterable of paths Python modules (i.e. *py files).
2.379414
2.342759
1.015646
operator_names = cosmic_ray.plugins.operator_names() work_db.set_config(config=config) work_db.clear() for module_path in module_paths: module_ast = get_ast( module_path, python_version=config.python_version) for op_name in operator_names: operator = get_operator(op_name)(config.python_version) visitor = WorkDBInitVisitor(module_path, op_name, work_db, operator) visitor.walk(module_ast) apply_interceptors(work_db, config.sub('interceptors').get('enabled', ()))
def init(module_paths, work_db, config)
Clear and initialize a work-db with work items. Any existing data in the work-db will be cleared and replaced with entirely new work orders. In particular, this means that any results in the db are removed. Args: module_paths: iterable of pathlib.Paths of modules to mutate. work_db: A `WorkDB` instance into which the work orders will be saved. config: The configuration for the new session.
5.36662
5.71097
0.939704
names = (name for name in interceptor_names() if name in enabled_interceptors) for name in names: interceptor = get_interceptor(name) interceptor(work_db)
def apply_interceptors(work_db, enabled_interceptors)
Apply each registered interceptor to the WorkDB.
3.865697
3.217517
1.201454
"Determine if a mutation from `from_op` to `to_op` is allowed given a particular `rhs` node." if is_none(rhs): return to_op in _RHS_IS_NONE_OPS.get(from_op, ()) if is_number(rhs): return to_op in _RHS_IS_INTEGER_OPS return True
def _allowed(to_op, from_op, rhs)
Determine if a mutation from `from_op` to `to_op` is allowed given a particular `rhs` node.
5.744795
3.933632
1.46043
try: operator_class = cosmic_ray.plugins.get_operator(operator_name) operator = operator_class(python_version) with cosmic_ray.mutating.use_mutation(module_path, operator, occurrence) as (original_code, mutated_code): if mutated_code is None: return WorkResult(worker_outcome=WorkerOutcome.NO_TEST) test_outcome, output = run_tests(test_command, timeout) diff = _make_diff(original_code, mutated_code, module_path) return WorkResult( output=output, diff='\n'.join(diff), test_outcome=test_outcome, worker_outcome=WorkerOutcome.NORMAL) except Exception: # noqa # pylint: disable=broad-except return WorkResult( output=traceback.format_exc(), test_outcome=TestOutcome.INCOMPETENT, worker_outcome=WorkerOutcome.EXCEPTION)
def worker(module_path, python_version, operator_name, occurrence, test_command, timeout)
Mutate the OCCURRENCE-th site for OPERATOR_NAME in MODULE_PATH, run the tests, and report the results. This is fundamentally the single-mutation-and-test-run process implementation. There are three high-level ways that a worker can finish. First, it could fail exceptionally, meaning that some uncaught exception made its way from some part of the operation to terminate the function. This function will intercept all exceptions and return it in a non-exceptional structure. Second, the mutation testing machinery may determine that there is no OCCURENCE-th instance for OPERATOR_NAME in the module under test. In this case there is no way to report a test result (i.e. killed, survived, or incompetent) so a special value is returned indicating that no mutation is possible. Finally, and hopefully normally, the worker will find that it can run a test. It will do so and report back the result - killed, survived, or incompetent - in a structured way. Args: module_name: The path to the module to mutate python_version: The version of Python to use when interpreting the code in `module_path`. A string of the form "MAJOR.MINOR", e.g. "3.6" for Python 3.6.x. operator_name: The name of the operator plugin to use occurrence: The occurrence of the operator to apply test_command: The command to execute to run the tests timeout: The maximum amount of time (seconds) to let the tests run Returns: A WorkResult Raises: This will generally not raise any exceptions. Rather, exceptions will be reported using the 'exception' result-type in the return value.
3.499926
3.015322
1.160714
if stream is None: stream = sys.stderr for reporter in _reporters: reporter(stream)
def report_progress(stream=None)
Report progress from any currently installed reporters. Args: stream: The text stream (default: sys.stderr) to which progress will be reported.
3.930055
4.739596
0.829196
def decorator(func): # pylint: disable=missing-docstring @wraps(func) def wrapper(*args, **kwargs): # pylint: disable=missing-docstring with progress_reporter(reporter): return func(*args, **kwargs) return wrapper return decorator
def reports_progress(reporter)
A decorator factory to mark functions which report progress. Args: reporter: A zero-argument callable to report progress. The callable provided should have the means to both retrieve and display current progress information.
2.302939
2.404146
0.957903
"Get a set of tags for the current git repo." result = [t.decode('ascii') for t in subprocess.check_output([ 'git', 'tag' ]).split(b"\n")] assert len(set(result)) == len(result) return set(result)
def tags()
Get a set of tags for the current git repo.
4.753138
3.551631
1.338297
"Create a git tag for `version` and push it to origin." assert version not in tags() git('config', 'user.name', 'Travis CI on behalf of Austin Bingham') git('config', 'user.email', '[email protected]') git('config', 'core.sshCommand', 'ssh -i deploy_key') git( 'remote', 'add', 'ssh-origin', '[email protected]:sixty-north/cosmic-ray.git' ) git('tag', version) subprocess.check_call([ 'ssh-agent', 'sh', '-c', 'chmod 0600 deploy_key && ' + 'ssh-add deploy_key && ' + # 'git push ssh-origin HEAD:master &&' 'git push ssh-origin --tags' ])
def create_tag_and_push(version)
Create a git tag for `version` and push it to origin.
4.644063
4.22332
1.099624
"Read the `(version-string, version-info)` from `version_file`." vars = {} with open(version_file) as f: exec(f.read(), {}, vars) return (vars['__version__'], vars['__version_info__'])
def read_version(version_file)
Read the `(version-string, version-info)` from `version_file`.
4.182333
2.566371
1.629668
global _workspace _ensure_workspace(config) result = worker( work_item.module_path, config.python_version, work_item.operator_name, work_item.occurrence, config.test_command, config.timeout) return work_item.job_id, result
def worker_task(work_item, config)
The celery task which performs a single mutation and runs a test suite. This runs `cosmic-ray worker` in a subprocess and returns the results, passing `config` to it via stdin. Args: work_item: A dict describing a WorkItem. config: The configuration to use for the test execution. Returns: An updated WorkItem
7.460526
8.764114
0.851258
return celery.group( worker_task.s(work_item, config) for work_item in work_items )
def execute_work_items(work_items, config)
Execute a suite of tests for a given set of work items. Args: work_items: An iterable of `work_db.WorkItem`s. config: The configuration to use for the test execution. Returns: An iterable of WorkItems.
4.014021
7.352059
0.545972
workspace = ClonedWorkspace(clone_config) original_dir = os.getcwd() if chdir: os.chdir(workspace.clone_dir) try: yield workspace finally: os.chdir(original_dir) workspace.cleanup()
def cloned_workspace(clone_config, chdir=True)
Create a cloned workspace and yield it. This creates a workspace for a with-block and cleans it up on exit. By default, this will also change to the workspace's `clone_dir` for the duration of the with-block. Args: clone_config: The execution engine configuration to use for the workspace. chdir: Whether to change to the workspace's `clone_dir` before entering the with-block. Yields: The `CloneWorkspace` instance created for the context.
2.551385
3.278354
0.778252
log.info('Cloning git repo %s to %s', repo_uri, dest_path) git.Repo.clone_from(repo_uri, dest_path, depth=1)
def clone_with_git(repo_uri, dest_path)
Create a clone by cloning a git repository. Args: repo_uri: The URI of the git repository to clone. dest_path: The location to clone to.
2.361352
2.855914
0.826829
log.info('Cloning directory tree %s to %s', src_path, dest_path) shutil.copytree(src_path, dest_path)
def clone_with_copy(src_path, dest_path)
Clone a directory try by copying it. Args: src_path: The directory to be copied. dest_path: The location to copy the directory to.
3.176567
3.492516
0.909535
# NB: We had to create the because the venv modules wasn't doing what we # needed. In particular, if we used it create a venv from an existing venv, # it *always* created symlinks back to the original venv's python # executables. Then, when you used those linked executables, you ended up # interacting with the original venv. I could find no way around this, hence # this function. prefix = getattr(sys, 'real_prefix', sys.prefix) python = Path(prefix) / 'bin' / 'python' command = '{} -m venv {}'.format(python, venv_dir) try: log.info('Creating virtual environment: %s', command) subprocess.run(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=True) except subprocess.CalledProcessError as exc: log.error("Error creating virtual environment: %s", exc.output) raise
def _build_env(venv_dir)
Create a new virtual environment in `venv_dir`. This uses the base prefix of any virtual environment that you may be using when you call this.
5.138393
4.99473
1.028763
variables = { 'python-executable': str(self._venv_path / 'bin' / 'python') } return text.format(**variables)
def replace_variables(self, text)
Replace variable placeholders in `text` with values from the virtual env. The variables are: - {python-executable} Args: text: The text to do replacment int. Returns: The text after replacement.
6.645645
5.271367
1.260706
"Remove the directory containin the clone and virtual environment." log.info('Removing temp dir %s', self._tempdir.name) self._tempdir.cleanup()
def cleanup(self)
Remove the directory containin the clone and virtual environment.
14.8943
5.053526
2.947308
"Get the WorkResult as a dict." return { 'output': self.output, 'test_outcome': self.test_outcome, 'worker_outcome': self.worker_outcome, 'diff': self.diff, }
def as_dict(self)
Get the WorkResult as a dict.
5.871846
3.960953
1.482433
return { 'module_path': str(self.module_path), 'operator_name': self.operator_name, 'occurrence': self.occurrence, 'start_pos': self.start_pos, 'end_pos': self.end_pos, 'job_id': self.job_id, }
def as_dict(self)
Get fields as a dict.
2.927164
2.710007
1.080131
@lru_cache() def file_contents(file_path): "A simple cache of file contents." with file_path.open(mode="rt") as handle: return handle.readlines() for item in work_db.work_items: try: repo = open_repository(item.module_path) except ValueError: log.info("No spor repository for %s", item.module_path) continue for _, anchor in repo.items(): if anchor.file_path != item.module_path.absolute(): continue metadata = anchor.metadata lines = file_contents(item.module_path) if _item_in_context( lines, item, anchor.context) and not metadata.get("mutate", True): log.info( "spor skipping %s %s %s %s %s %s", item.job_id, item.operator_name, item.occurrence, item.module_path, item.start_pos, item.end_pos, ) work_db.set_result( item.job_id, WorkResult( output=None, test_outcome=None, diff=None, worker_outcome=WorkerOutcome.SKIPPED, ), )
def intercept(work_db)
Look for WorkItems in `work_db` that should not be mutated due to spor metadata. For each WorkItem, find anchors for the item's file/line/columns. If an anchor exists with metadata containing `{mutate: False}` then the WorkItem is marked as SKIPPED.
4.843074
3.947579
1.226847
offset = 0 for index, contents in enumerate(lines, 1): if index == line: return offset + col offset += len(contents) raise ValueError("Offset {}:{} not found".format(line, col))
def _line_and_col_to_offset(lines, line, col)
Figure out the offset into a file for a particular line and col. This can return offsets that don't actually exist in the file. If you specify a line that exists and a col that is past the end of that line, this will return a "fake" offset. This is to account for the fact that a WorkItem's end_pos is one-past the end of a mutation, and hence potentially one-past the end of a file. Args: lines: A sequence of the lines in a file. line: A one-based index indicating the line in the file. col: A zero-based index indicating the column on `line`. Raises: ValueError: If the specified line found in the file.
4.235189
5.17757
0.817988
start_offset = _line_and_col_to_offset(lines, item.start_pos[0], item.start_pos[1]) stop_offset = _line_and_col_to_offset(lines, item.end_pos[0], item.end_pos[1]) width = stop_offset - start_offset return start_offset >= context.offset and width <= len(context.topic)
def _item_in_context(lines, item, context)
Determines if a WorkItem falls within an anchor. This only returns True if a WorkItems start-/stop-pos range is *completely* within an anchor, not just if it overalaps.
3.050229
2.9497
1.034081
original_code, mutated_code = apply_mutation(module_path, operator, occurrence) try: yield original_code, mutated_code finally: with module_path.open(mode='wt', encoding='utf-8') as handle: handle.write(original_code) handle.flush()
def use_mutation(module_path, operator, occurrence)
A context manager that applies a mutation for the duration of a with-block. This applies a mutation to a file on disk, and after the with-block it put the unmutated code back in place. Args: module_path: The path to the module to mutate. operator: The `Operator` instance to use. occurrence: The occurrence of the operator to apply. Yields: A `(unmutated-code, mutated-code)` tuple to the with-block. If there was no mutation performed, the `mutated-code` is `None`.
3.310359
3.360373
0.985116
module_ast = get_ast(module_path, python_version=operator.python_version) original_code = module_ast.get_code() visitor = MutationVisitor(occurrence, operator) mutated_ast = visitor.walk(module_ast) mutated_code = None if visitor.mutation_applied: mutated_code = mutated_ast.get_code() with module_path.open(mode='wt', encoding='utf-8') as handle: handle.write(mutated_code) handle.flush() return original_code, mutated_code
def apply_mutation(module_path, operator, occurrence)
Apply a specific mutation to a file on disk. Args: module_path: The path to the module to mutate. operator: The `operator` instance to use. occurrence: The occurrence of the operator to apply. Returns: A `(unmutated-code, mutated-code)` tuple to the with-block. If there was no mutation performed, the `mutated-code` is `None`.
2.935247
3.080809
0.952752
requirements_file = os.path.join(os.getcwd(), 'requirements.txt') requirements = [] links=[] try: with open(requirements_file) as reqfile: for line in reqfile.readlines(): line = line.strip() if line.startswith('#'): continue elif line.startswith( ('https://', 'git://', 'hg://', 'svn://')): links.append(line) else: requirements.append(line) except (IOError, OSError) as error: print(error) if python26(): # Required to make `collections.OrderedDict` available on Python<=2.6 requirements.append('ordereddict==1.1#a0ed854ee442051b249bfad0f638bbec') # Don't try to install psutil on PyPy: if _isPyPy: for line in requirements[:]: if line.startswith('psutil'): print("Not installing %s on PyPy..." % line) requirements.remove(line) return requirements, links
def get_requirements()
Extract the list of requirements from our requirements.txt. :rtype: 2-tuple :returns: Two lists, the first is a list of requirements in the form of pkgname==version. The second is a list of URIs or VCS checkout strings which specify the dependency links for obtaining a copy of the requirement.
4.717178
4.723527
0.998656
useparams = {} for key, value in keyparams.items(): if value: useparams.update({key: value}) batchfile = gpg.gen_key_input(separate_keyring=True, save_batchfile=True, **useparams) log.info("Generated GnuPG batch file:\n%s" % batchfile) return batchfile
def createBatchfile(keyparams=allparams)
Create the batchfile for our new key. :params dict keyparams: A dictionary of arguments for creating the key. It should probably be ``allparams``. :rtype: str :returns: A string containing the entire GnuPG batchfile.
4.644077
5.028838
0.923489
key = gpg.gen_key(batchfile) fingerprint = key.fingerprint if not fingerprint: log.error("Key creation seems to have failed: %s" % key.status) return None, None return key, fingerprint
def createKey(batchfile)
Create a new keypair from a **batchfile**. Writes the new keys into keyrings named after ``NAME_EMAIL`` inside the ``NEWKEY_DIR``. :params str batchfile: A GnuPG batchfile. See :func:`createBatchfile`.
4.850117
6.607916
0.733986
if key.keyring: gpg.keyring = key.keyring if key.secring: gpg.secring = key.secring # Using '--fingerprint' twice will display subkey fingerprints too: gpg.options = ['--fingerprint', '--fingerprint'] keylist = gpg.list_keys(secret=True) # `result` is a `gnupg._parsers.ListKeys`, which is list-like, so iterate # over all the keys and display their info: for gpgkey in keylist: for k, v in gpgkey.items(): log.info("%s: %s" % (k.capitalize(), v)) return keylist
def displayNewKey(key)
Use ``gnupg.GPG.list_keys()`` to display details of the new key.
6.102716
5.16268
1.182083
log.info("Exporting key: %s" % fingerprint) keyfn = os.path.join(gpg.homedir, fingerprint + '-8192-bit-key') + os.path.extsep pubkey = gpg.export_keys(fingerprint) seckey = gpg.export_keys(fingerprint, secret=True) subkey = gpg.export_keys(fingerprint, secret=True, subkeys=True) with open(keyfn + 'pub' + os.path.extsep + 'asc', 'w') as fh: fh.write(pubkey) with open(keyfn + 'sec' + os.path.extsep + 'asc', 'w') as fh: fh.write(seckey) with open(keyfn + 'sub' + os.path.extsep + 'asc', 'w') as fh: fh.write(subkey)
def exportNewKey(fingerprint)
Export the new keys into .asc files. :param str fingerprint: A full key fingerprint.
2.242553
2.307565
0.971827
protocols = ['hkp://', 'hkps://', 'http://', 'https://', 'ldap://', 'mailto:'] ## xxx feels like i´m forgetting one... for proto in protocols: if location.startswith(proto): url = location.replace(proto, str()) host, slash, extra = url.partition('/') if extra: log.warn("URI text for %s: '%s'" % (host, extra)) log.debug("Got host string for keyserver setting: '%s'" % host) host = _fix_unsafe(host) if host: log.debug("Cleaned host string: '%s'" % host) keyserver = proto + host return keyserver return None
def _check_keyserver(location)
Check that a given keyserver is a known protocol and does not contain shell escape characters. :param str location: A string containing the default keyserver. This should contain the desired keyserver protocol which is supported by the keyserver, for example, the default is ``'hkp://wwwkeys .pgp.net'``. :rtype: :obj:`str` or :obj:`None` :returns: A string specifying the protocol and keyserver hostname, if the checks passed. If not, returns None.
7.333069
6.710532
1.09277
if prefs is None: return cipher = frozenset(['AES256', 'AES192', 'AES128', 'CAMELLIA256', 'CAMELLIA192', 'TWOFISH', '3DES']) digest = frozenset(['SHA512', 'SHA384', 'SHA256', 'SHA224', 'RMD160', 'SHA1']) compress = frozenset(['BZIP2', 'ZLIB', 'ZIP', 'Uncompressed']) trust = frozenset(['gpg', 'classic', 'direct', 'always', 'auto']) pinentry = frozenset(['loopback']) all = frozenset([cipher, digest, compress, trust, pinentry]) if isinstance(prefs, str): prefs = set(prefs.split()) elif isinstance(prefs, list): prefs = set(prefs) else: msg = "prefs must be list of strings, or space-separated string" log.error("parsers._check_preferences(): %s" % message) raise TypeError(message) if not pref_type: pref_type = 'all' allowed = str() if pref_type == 'cipher': allowed += ' '.join(prefs.intersection(cipher)) if pref_type == 'digest': allowed += ' '.join(prefs.intersection(digest)) if pref_type == 'compress': allowed += ' '.join(prefs.intersection(compress)) if pref_type == 'trust': allowed += ' '.join(prefs.intersection(trust)) if pref_type == 'pinentry': allowed += ' '.join(prefs.intersection(pinentry)) if pref_type == 'all': allowed += ' '.join(prefs.intersection(all)) return allowed
def _check_preferences(prefs, pref_type=None)
Check cipher, digest, and compression preference settings. MD5 is not allowed. This is `not 1994`__. SHA1 is allowed_ grudgingly_. __ http://www.cs.colorado.edu/~jrblack/papers/md5e-full.pdf .. _allowed: http://eprint.iacr.org/2008/469.pdf .. _grudgingly: https://www.schneier.com/blog/archives/2012/10/when_will_we_se.html
2.549465
2.47338
1.030761
_unsafe = re.compile(r'[^\w@%+=:,./-]', 256) try: if len(_unsafe.findall(shell_input)) == 0: return shell_input.strip() else: clean = "'" + shell_input.replace("'", "'\"'\"'") + "'" return clean except TypeError: return None
def _fix_unsafe(shell_input)
Find characters used to escape from a string into a shell, and wrap them in quotes if they exist. Regex pilfered from Python3 :mod:`shlex` module. :param str shell_input: The input intended for the GnuPG process.
3.581053
3.967129
0.902681
ret = '--' if add_prefix else '' ret += input.replace('_', '-') return ret
def _hyphenate(input, add_prefix=False)
Change underscores to hyphens so that object attributes can be easily tranlated to GPG option names. :param str input: The attribute to hyphenate. :param bool add_prefix: If True, add leading hyphens to the input. :rtype: str :return: The ``input`` with underscores changed to hyphens.
6.315956
8.834478
0.714921
gnupg_options = _get_all_gnupg_options() allowed = _get_options_group("allowed") ## these are the allowed options we will handle so far, all others should ## be dropped. this dance is so that when new options are added later, we ## merely add the to the _allowed list, and the `` _allowed.issubset`` ## assertion will check that GPG will recognise them try: ## check that allowed is a subset of all gnupg_options assert allowed.issubset(gnupg_options) except AssertionError: raise UsageError("'allowed' isn't a subset of known options, diff: %s" % allowed.difference(gnupg_options)) ## if we got a list of args, join them ## ## see TODO file, tag :cleanup: if not isinstance(input, str): input = ' '.join([x for x in input]) if isinstance(input, str): if input.find('_') > 0: if not input.startswith('--'): hyphenated = _hyphenate(input, add_prefix=True) else: hyphenated = _hyphenate(input) else: hyphenated = input ## xxx we probably want to use itertools.dropwhile here try: assert hyphenated in allowed except AssertionError as ae: dropped = _fix_unsafe(hyphenated) log.warn("_is_allowed(): Dropping option '%s'..." % dropped) raise ProtectedOption("Option '%s' not supported." % dropped) else: return input return None
def _is_allowed(input)
Check that an option or argument given to GPG is in the set of allowed options, the latter being a strict subset of the set of all options known to GPG. :param str input: An input meant to be parsed as an option or flag to the GnuPG process. Should be formatted the same as an option or flag to the commandline gpg, i.e. "--encrypt-files". :ivar frozenset gnupg_options: All known GPG options and flags. :ivar frozenset allowed: All allowed GPG options and flags, e.g. all GPG options and flags which we are willing to acknowledge and parse. If we want to support a new option, it will need to have its own parsing class and its name will need to be added to this set. :raises: :exc:`UsageError` if **input** is not a subset of the hard-coded set of all GnuPG options in :func:`_get_all_gnupg_options`. :exc:`ProtectedOption` if **input** is not in the set of allowed options. :rtype: str :return: The original **input** parameter, unmodified and unsanitized, if no errors occur.
7.906628
6.695044
1.180967
if _util._py3k: return isinstance(thing, str) else: return isinstance(thing, basestring)
def _is_string(thing)
Python character arrays are a mess. If Python2, check if **thing** is an :obj:`unicode` or a :obj:`str`. If Python3, check if **thing** is a :obj:`str`. :param thing: The thing to check. :returns: ``True`` if **thing** is a string according to whichever version of Python we're running in.
5.050672
6.08389
0.830171
if isinstance(arg_list, list): for arg in arg_list: safe_arg = _sanitise(arg) if safe_arg != "": yield safe_arg
def _sanitise_list(arg_list)
A generator for iterating through a list of gpg options and sanitising them. :param list arg_list: A list of options and flags for GnuPG. :rtype: generator :returns: A generator whose next() method returns each of the items in ``arg_list`` after calling ``_sanitise()`` with that item as a parameter.
3.026778
3.324845
0.910352
#: These expect a hexidecimal keyid as their argument, and can be parsed #: with :func:`_is_hex`. hex_options = frozenset(['--check-sigs', '--default-key', '--default-recipient', '--delete-keys', '--delete-secret-keys', '--delete-secret-and-public-keys', '--desig-revoke', '--export', '--export-secret-keys', '--export-secret-subkeys', '--fingerprint', '--gen-revoke', '--hidden-encrypt-to', '--hidden-recipient', '--list-key', '--list-keys', '--list-public-keys', '--list-secret-keys', '--list-sigs', '--recipient', '--recv-keys', '--send-keys', '--edit-key', '--sign-key', ]) #: These options expect value which are left unchecked, though still run #: through :func:`_fix_unsafe`. unchecked_options = frozenset(['--list-options', '--passphrase-fd', '--status-fd', '--verify-options', '--command-fd', ]) #: These have their own parsers and don't really fit into a group other_options = frozenset(['--debug-level', '--keyserver', ]) #: These should have a directory for an argument dir_options = frozenset(['--homedir', ]) #: These expect a keyring or keyfile as their argument keyring_options = frozenset(['--keyring', '--primary-keyring', '--secret-keyring', '--trustdb-name', ]) #: These expect a filename (or the contents of a file as a string) or None #: (meaning that they read from stdin) file_or_none_options = frozenset(['--decrypt', '--decrypt-files', '--encrypt', '--encrypt-files', '--import', '--verify', '--verify-files', '--output', ]) #: These options expect a string. see :func:`_check_preferences`. pref_options = frozenset(['--digest-algo', '--cipher-algo', '--compress-algo', '--compression-algo', '--cert-digest-algo', '--personal-digest-prefs', '--personal-digest-preferences', '--personal-cipher-prefs', '--personal-cipher-preferences', '--personal-compress-prefs', '--personal-compress-preferences', '--pinentry-mode', '--print-md', '--trust-model', ]) #: These options expect no arguments none_options = frozenset(['--allow-loopback-pinentry', '--always-trust', '--armor', '--armour', '--batch', '--check-sigs', '--check-trustdb', '--clearsign', '--debug-all', '--default-recipient-self', '--detach-sign', '--export', '--export-ownertrust', '--export-secret-keys', '--export-secret-subkeys', '--fingerprint', '--fixed-list-mode', '--gen-key', '--import-ownertrust', '--list-config', '--list-key', '--list-keys', '--list-packets', '--list-public-keys', '--list-secret-keys', '--list-sigs', '--lock-multiple', '--lock-never', '--lock-once', '--no-default-keyring', '--no-default-recipient', '--no-emit-version', '--no-options', '--no-tty', '--no-use-agent', '--no-verbose', '--print-mds', '--quiet', '--sign', '--symmetric', '--throw-keyids', '--use-agent', '--verbose', '--version', '--with-colons', '--yes', ]) #: These options expect either None or a hex string hex_or_none_options = hex_options.intersection(none_options) allowed = hex_options.union(unchecked_options, other_options, dir_options, keyring_options, file_or_none_options, pref_options, none_options) if group and group in locals().keys(): return locals()[group]
def _get_options_group(group=None)
Get a specific group of options which are allowed.
4.244884
4.208883
1.008554
three_hundred_eighteen = ().split() # These are extra options which only exist for GnuPG>=2.0.0 three_hundred_eighteen.append('--export-ownertrust') three_hundred_eighteen.append('--import-ownertrust') # These are extra options which only exist for GnuPG>=2.1.0 three_hundred_eighteen.append('--pinentry-mode') three_hundred_eighteen.append('--allow-loopback-pinentry') gnupg_options = frozenset(three_hundred_eighteen) return gnupg_options
def _get_all_gnupg_options()
Get all GnuPG options and flags. This is hardcoded within a local scope to reduce the chance of a tampered GnuPG binary reporting falsified option sets, i.e. because certain options (namedly the ``--no-options`` option, which prevents the usage of gpg.conf files) are necessary and statically specified in :meth:`gnupg._meta.GPGBase._make_args`, if the inputs into Python are already controlled, and we were to summon the GnuPG binary to ask it for its options, it would be possible to receive a falsified options set missing the ``--no-options`` option in response. This seems unlikely, and the method is stupid and ugly, but at least we'll never have to debug whether or not an option *actually* disappeared in a different GnuPG version, or some funny business is happening. These are the options as of GnuPG 1.4.12; the current stable branch of the 2.1.x tree contains a few more -- if you need them you'll have to add them in here. :type gnupg_options: frozenset :ivar gnupg_options: All known GPG options and flags. :rtype: frozenset :returns: ``gnupg_options``
3.883426
4.051044
0.958623