code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
diff_violations = self._diff_violations().get(src_path) if diff_violations is None: return [] return sorted(diff_violations.lines)
def violation_lines(self, src_path)
Return a list of lines in violation (integers) in `src_path` that were changed. If we have no coverage information for `src_path`, returns an empty list.
5.222091
4.331433
1.205626
return sum([len(summary.measured_lines) for summary in self._diff_violations().values()])
def total_num_lines(self)
Return the total number of lines in the diff for which we have coverage info.
27.103415
14.624793
1.853251
return sum( len(summary.lines) for summary in self._diff_violations().values() )
def total_num_violations(self)
Returns the total number of lines in the diff that are in violation.
14.623086
7.943929
1.840788
total_lines = self.total_num_lines() if total_lines > 0: num_covered = total_lines - self.total_num_violations() return int(float(num_covered) / total_lines * 100) else: return 100
def total_percent_covered(self)
Returns the float percent of lines in the diff that are covered. (only counting lines for which we have coverage info).
3.150877
2.810769
1.121002
if not self._diff_violations_dict: self._diff_violations_dict = { src_path: DiffViolations( self._violations.violations(src_path), self._violations.measured_lines(src_path), self._diff.lines_changed(src_path), ) for src_path in self._diff.src_paths_changed() } return self._diff_violations_dict
def _diff_violations(self)
Returns a dictionary of the form: { SRC_PATH: DiffViolations(SRC_PATH) } where `SRC_PATH` is the path to the source file. To make this efficient, we cache and reuse the result.
3.717401
3.256019
1.141701
if self.TEMPLATE_NAME is not None: template = TEMPLATE_ENV.get_template(self.TEMPLATE_NAME) report = template.render(self._context()) if isinstance(report, six.string_types): report = report.encode('utf-8') output_file.write(report)
def generate_report(self, output_file)
See base class. output_file must be a file handler that takes in bytes!
3.072335
2.848299
1.078656
if self.CSS_TEMPLATE_NAME is not None: template = TEMPLATE_ENV.get_template(self.CSS_TEMPLATE_NAME) style = template.render(self._context()) if isinstance(style, six.string_types): style = style.encode('utf-8') output_file.write(style)
def generate_css(self, output_file)
Generate an external style sheet file. output_file must be a file handler that takes in bytes!
3.461627
3.315305
1.044135
# Calculate the information to pass to the template src_stats = { src: self._src_path_stats(src) for src in self.src_paths() } # Include snippet style info if we're displaying # source code snippets if self.INCLUDE_SNIPPETS: snippet_style = Snippet.style_defs() else: snippet_style = None return { 'css_url': self.css_url, 'report_name': self.coverage_report_name(), 'diff_name': self.diff_report_name(), 'src_stats': src_stats, 'total_num_lines': self.total_num_lines(), 'total_num_violations': self.total_num_violations(), 'total_percent_covered': self.total_percent_covered(), 'snippet_style': snippet_style }
def _context(self)
Return the context to pass to the template. The context is a dict of the form: { 'css_url': CSS_URL, 'report_name': REPORT_NAME, 'diff_name': DIFF_NAME, 'src_stats': {SRC_PATH: { 'percent_covered': PERCENT_COVERED, 'violation_lines': [LINE_NUM, ...] }, ... } 'total_num_lines': TOTAL_NUM_LINES, 'total_num_violations': TOTAL_NUM_VIOLATIONS, 'total_percent_covered': TOTAL_PERCENT_COVERED }
4.144144
2.453046
1.689387
combine_template = "{0}-{1}" combined_list = [] # Add a terminating value of `None` to list line_numbers.append(None) start = line_numbers[0] end = None for line_number in line_numbers[1:]: # If the current number is adjacent to the previous number if (end if end else start) + 1 == line_number: end = line_number else: if end: combined_list.append(combine_template.format(start, end)) else: combined_list.append(str(start)) start = line_number end = None return combined_list
def combine_adjacent_lines(line_numbers)
Given a sorted collection of line numbers this will turn them to strings and combine adjacent values [1, 2, 5, 6, 100] -> ["1-2", "5-6", "100"]
3.170775
3.110315
1.019438
# Find violation lines violation_lines = self.violation_lines(src_path) violations = sorted(self._diff_violations()[src_path].violations) # Load source snippets (if the report will display them) # If we cannot load the file, then fail gracefully if self.INCLUDE_SNIPPETS: try: snippets = Snippet.load_snippets_html(src_path, violation_lines) except IOError: snippets = [] else: snippets = [] return { 'percent_covered': self.percent_covered(src_path), 'violation_lines': TemplateReportGenerator.combine_adjacent_lines(violation_lines), 'violations': violations, 'snippets_html': snippets }
def _src_path_stats(self, src_path)
Return a dict of statistics for the source file at `src_path`.
6.085907
5.994686
1.015217
parser = argparse.ArgumentParser(description=DESCRIPTION) parser.add_argument( 'coverage_xml', type=str, help=COVERAGE_XML_HELP, nargs='+' ) parser.add_argument( '--html-report', metavar='FILENAME', type=str, default=None, help=HTML_REPORT_HELP ) parser.add_argument( '--external-css-file', metavar='FILENAME', type=str, default=None, help=CSS_FILE_HELP, ) parser.add_argument( '--compare-branch', metavar='BRANCH', type=str, default='origin/master', help=COMPARE_BRANCH_HELP ) parser.add_argument( '--fail-under', metavar='SCORE', type=float, default='0', help=FAIL_UNDER_HELP ) parser.add_argument( '--ignore-staged', action='store_true', default=False, help=IGNORE_STAGED_HELP ) parser.add_argument( '--ignore-unstaged', action='store_true', default=False, help=IGNORE_UNSTAGED_HELP ) parser.add_argument( '--exclude', metavar='EXCLUDE', type=str, nargs='+', help=EXCLUDE_HELP ) parser.add_argument( '--src-roots', metavar='DIRECTORY', type=str, nargs='+', default=['src/main/java', 'src/test/java'], help=SRC_ROOTS_HELP ) return vars(parser.parse_args(argv))
def parse_coverage_args(argv)
Parse command line arguments, returning a dict of valid options: { 'coverage_xml': COVERAGE_XML, 'html_report': None | HTML_REPORT, 'external_css_file': None | CSS_FILE, } where `COVERAGE_XML`, `HTML_REPORT`, and `CSS_FILE` are paths. The path strings may or may not exist.
1.767292
1.650609
1.070691
diff = GitDiffReporter( compare_branch, git_diff=GitDiffTool(), ignore_staged=ignore_staged, ignore_unstaged=ignore_unstaged, exclude=exclude) xml_roots = [cElementTree.parse(xml_root) for xml_root in coverage_xml] coverage = XmlCoverageReporter(xml_roots, src_roots) # Build a report generator if html_report is not None: css_url = css_file if css_url is not None: css_url = os.path.relpath(css_file, os.path.dirname(html_report)) reporter = HtmlReportGenerator(coverage, diff, css_url=css_url) with open(html_report, "wb") as output_file: reporter.generate_report(output_file) if css_file is not None: with open(css_file, "wb") as output_file: reporter.generate_css(output_file) reporter = StringReportGenerator(coverage, diff) output_file = sys.stdout if six.PY2 else sys.stdout.buffer # Generate the report reporter.generate_report(output_file) return reporter.total_percent_covered()
def generate_coverage_report(coverage_xml, compare_branch, html_report=None, css_file=None, ignore_staged=False, ignore_unstaged=False, exclude=None, src_roots=None)
Generate the diff coverage report, using kwargs from `parse_args()`.
2.702107
2.743314
0.984979
logging.basicConfig(format='%(message)s') argv = argv or sys.argv arg_dict = parse_coverage_args(argv[1:]) GitPathTool.set_cwd(directory) fail_under = arg_dict.get('fail_under') percent_covered = generate_coverage_report( arg_dict['coverage_xml'], arg_dict['compare_branch'], html_report=arg_dict['html_report'], css_file=arg_dict['external_css_file'], ignore_staged=arg_dict['ignore_staged'], ignore_unstaged=arg_dict['ignore_unstaged'], exclude=arg_dict['exclude'], src_roots=arg_dict['src_roots'], ) if percent_covered >= fail_under: return 0 else: LOGGER.error("Failure. Coverage is below {}%.".format(fail_under)) return 1
def main(argv=None, directory=None)
Main entry point for the tool, used by setup.py Returns a value that can be passed into exit() specifying the exit code. 1 is an error 0 is successful run
3.790645
3.788648
1.000527
stdout_pipe = subprocess.PIPE process = subprocess.Popen( command, stdout=stdout_pipe, stderr=stdout_pipe ) try: stdout, stderr = process.communicate() except OSError: sys.stderr.write(" ".join( [cmd.decode(sys.getfilesystemencoding()) if isinstance(cmd, bytes) else cmd for cmd in command]) ) raise stderr = _ensure_unicode(stderr) if process.returncode not in exit_codes: raise CommandError(stderr) return _ensure_unicode(stdout), stderr
def execute(command, exit_codes=[0])
Execute provided command returning the stdout Args: command (list[str]): list of tokens to execute as your command. exit_codes (list[int]): exit codes which do not indicate error. subprocess_mod (module): Defaults to pythons subprocess module but you can optionally pass in another. This is mostly for testing purposes Returns: str - Stdout of the command passed in. This will be Unicode for python < 3. Str for python 3 Raises: ValueError if there is a error running the command
3.048021
3.182713
0.95768
process = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) process.communicate() exit_code = process.returncode return exit_code
def run_command_for_code(command)
Returns command's exit code.
2.286844
2.143485
1.066881
if isinstance(text, six.binary_type): return text.decode(sys.getfilesystemencoding(), 'replace') else: return text
def _ensure_unicode(text)
Ensures the text passed in becomes unicode Args: text (str|unicode) Returns: unicode
2.530595
3.265939
0.774844
violations_dict = defaultdict(list) for report in reports: xml_document = cElementTree.fromstring("".join(report)) files = xml_document.findall(".//file") for file_tree in files: for error in file_tree.findall('error'): line_number = error.get('line') error_str = "{}: {}".format(error.get('severity'), error.get('message')) violation = Violation(int(line_number), error_str) filename = GitPathTool.relative_path(file_tree.get('name')) violations_dict[filename].append(violation) return violations_dict
def parse_reports(self, reports)
Args: reports: list[str] - output from the report Return: A dict[Str:Violation] Violation is a simple named tuple Defined above
3.641391
3.534553
1.030227
violations_dict = defaultdict(list) for report in reports: xml_document = cElementTree.fromstring("".join(report)) bugs = xml_document.findall(".//BugInstance") for bug in bugs: category = bug.get('category') short_message = bug.find('ShortMessage').text line = bug.find('SourceLine') if line.get('start') is None or line.get('end') is None: continue start = int(line.get('start')) end = int(line.get('end')) for line_number in range(start, end+1): error_str = "{}: {}".format(category, short_message) violation = Violation(line_number, error_str) filename = GitPathTool.relative_path( line.get('sourcepath')) violations_dict[filename].append(violation) return violations_dict
def parse_reports(self, reports)
Args: reports: list[str] - output from the report Return: A dict[Str:Violation] Violation is a simple named tuple Defined above
3.053304
2.966969
1.029099
formatter = HtmlFormatter() formatter.style.highlight_color = cls.VIOLATION_COLOR return formatter.get_style_defs()
def style_defs(cls)
Return the CSS style definitions required by the formatted snippet.
8.907605
8.643761
1.030524
formatter = HtmlFormatter( cssclass=self.DIV_CSS_CLASS, linenos=True, linenostart=self._start_line, hl_lines=self._shift_lines( self._violation_lines, self._start_line ), lineanchors=self._src_filename ) return pygments.format(self.src_tokens(), formatter)
def html(self)
Return an HTML representation of the snippet.
6.384935
5.990792
1.065791
num_lines = len(self.text().split('\n')) end_line = self._start_line + num_lines - 1 return (self._start_line, end_line)
def line_range(self)
Return a tuple of the form `(start_line, end_line)` indicating the start and end line number of the snippet.
3.503406
2.920176
1.199724
snippet_list = cls.load_snippets(src_path, violation_lines) return [snippet.html() for snippet in snippet_list]
def load_snippets_html(cls, src_path, violation_lines)
Load snippets from the file at `src_path` and format them as HTML. See `load_snippets()` for details.
3.663021
3.247129
1.12808
# Load the contents of the file with openpy(GitPathTool.relative_path(src_path)) as src_file: contents = src_file.read() # Convert the source file to unicode (Python < 3) if isinstance(contents, six.binary_type): contents = contents.decode('utf-8', 'replace') # Construct a list of snippet ranges src_lines = contents.split('\n') snippet_ranges = cls._snippet_ranges(len(src_lines), violation_lines) # Parse the source into tokens token_stream = cls._parse_src(contents, src_path) # Group the tokens by snippet token_groups = cls._group_tokens(token_stream, snippet_ranges) return [ Snippet(tokens, src_path, start, violation_lines) for (start, _), tokens in sorted(token_groups.items()) ]
def load_snippets(cls, src_path, violation_lines)
Load snippets from the file at `src_path` to show violations on lines in the list `violation_lines` (list of line numbers, starting at index 0). The file at `src_path` should be a text file (not binary). Returns a list of `Snippet` instances. Raises an `IOError` if the file could not be loaded.
3.722293
3.886051
0.95786
# Parse the source into tokens try: lexer = guess_lexer_for_filename(src_filename, src_contents) except ClassNotFound: lexer = TextLexer() # Ensure that we don't strip newlines from # the source file when lexing. lexer.stripnl = False return pygments.lex(src_contents, lexer)
def _parse_src(cls, src_contents, src_filename)
Return a stream of `(token_type, value)` tuples parsed from `src_contents` (str) Uses `src_filename` to guess the type of file so it can highlight syntax correctly.
4.343834
3.95887
1.097241
# Create a map from ranges (start/end tuples) to tokens token_map = {rng: [] for rng in range_list} # Keep track of the current line number; we will # increment this as we encounter newlines in token values line_num = 1 for ttype, val in token_stream: # If there are newlines in this token, # we need to split it up and check whether # each line within the token is within one # of our ranges. if '\n' in val: val_lines = val.split('\n') # Check if the tokens match each range for (start, end), filtered_tokens in six.iteritems(token_map): # Filter out lines that are not in this range include_vals = [ val_lines[i] for i in range(0, len(val_lines)) if i + line_num in range(start, end + 1) ] # If we found any lines, store the tokens if len(include_vals) > 0: token = (ttype, '\n'.join(include_vals)) filtered_tokens.append(token) # Increment the line number # by the number of lines we found line_num += len(val_lines) - 1 # No newline in this token # If we're in the line range, add it else: # Check if the tokens match each range for (start, end), filtered_tokens in six.iteritems(token_map): # If we got a match, store the token if line_num in range(start, end + 1): filtered_tokens.append((ttype, val)) # Otherwise, ignore the token return token_map
def _group_tokens(cls, token_stream, range_list)
Group tokens into snippet ranges. `token_stream` is a generator that produces `(token_type, value)` tuples, `range_list` is a list of `(start, end)` tuples representing the (inclusive) range of line numbers for each snippet. Assumes that `range_list` is an ascending order by start value. Returns a dict mapping ranges to lists of tokens: { (4, 10): [(ttype_1, val_1), (ttype_2, val_2), ...], (29, 39): [(ttype_3, val_3), ...], ... } The algorithm is slightly complicated because a single token can contain multiple line breaks.
3.352681
3.122912
1.073575
current_range = (None, None) lines_since_last_violation = 0 snippet_ranges = [] for line_num in range(1, num_src_lines + 1): # If we have not yet started a snippet, # check if we can (is this line a violation?) if current_range[0] is None: if line_num in violation_lines: # Expand to include extra context, but not before line 1 snippet_start = max(1, line_num - cls.NUM_CONTEXT_LINES) current_range = (snippet_start, None) lines_since_last_violation = 0 # If we are within a snippet, check if we # can end the snippet (have we gone enough # lines without hitting a violation?) elif current_range[1] is None: if line_num in violation_lines: lines_since_last_violation = 0 elif lines_since_last_violation > cls.MAX_GAP_IN_SNIPPET: # Expand to include extra context, but not after last line snippet_end = line_num - lines_since_last_violation snippet_end = min( num_src_lines, snippet_end + cls.NUM_CONTEXT_LINES ) current_range = (current_range[0], snippet_end) # Store the snippet and start looking for the next one snippet_ranges.append(current_range) current_range = (None, None) # Another line since the last violation lines_since_last_violation += 1 # If we started a snippet but didn't finish it, do so now if current_range[0] is not None and current_range[1] is None: snippet_ranges.append((current_range[0], num_src_lines)) return snippet_ranges
def _snippet_ranges(cls, num_src_lines, violation_lines)
Given the number of source file lines and list of violation line numbers, return a list of snippet ranges of the form `(start_line, end_line)`. Each snippet contains a few extra lines of context before/after the first/last violation. Nearby violations are grouped within the same snippet.
2.604485
2.491807
1.045219
contents = [] for file_handle in report_files: # Convert to unicode, replacing unreadable chars contents.append( file_handle.read().decode( 'utf-8', 'replace' ) ) return contents
def _load_reports(self, report_files)
Args: report_files: list[file] reports to read in
5.56101
5.387481
1.03221
if not any(src_path.endswith(ext) for ext in self.driver.supported_extensions): return [] if src_path not in self.violations_dict: if self.reports: self.violations_dict = self.driver.parse_reports(self.reports) else: if self.driver_tool_installed is None: self.driver_tool_installed = self.driver.installed() if not self.driver_tool_installed: raise EnvironmentError("{} is not installed".format(self.driver.name)) command = copy.deepcopy(self.driver.command) if self.options: command.append(self.options) if os.path.exists(src_path): command.append(src_path.encode(sys.getfilesystemencoding())) output, _ = execute(command, self.driver.exit_codes) self.violations_dict.update(self.driver.parse_reports([output])) return self.violations_dict[src_path]
def violations(self, src_path)
Return a list of Violations recorded in `src_path`.
2.992333
2.901613
1.031265
violations_dict = defaultdict(list) for report in reports: if self.expression.flags & re.MULTILINE: matches = (match for match in re.finditer(self.expression, report)) else: matches = (self.expression.match(line) for line in report.split('\n')) for match in matches: if match is not None: src, line_number, message = match.groups() # Transform src to a relative path, if it isn't already src = os.path.relpath(src) violation = Violation(int(line_number), message) violations_dict[src].append(violation) return violations_dict
def parse_reports(self, reports)
Args: reports: list[str] - output from the report Return: A dict[Str:Violation] Violation is a simple named tuple Defined above
3.147907
3.035537
1.037018
init_py = open('{0}.py'.format(module)).read() return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
def get_version(module)
Return package version as listed in `__version__`.
2.673784
2.31198
1.156491
if dotenv is None: frame_filename = sys._getframe().f_back.f_code.co_filename dotenv = os.path.join(os.path.dirname(frame_filename), '.env') if os.path.isdir(dotenv) and os.path.isfile(os.path.join(dotenv, '.env')): dotenv = os.path.join(dotenv, '.env') if os.path.exists(dotenv): with open(dotenv) as f: for k, v in parse_dotenv(f.read()).items(): if override: os.environ[k] = v else: os.environ.setdefault(k, v) else: warnings.warn("Not reading {0} - it doesn't exist.".format(dotenv), stacklevel=2)
def read_dotenv(dotenv=None, override=False)
Read a .env file into os.environ. If not given a path to a dotenv path, does filthy magic stack backtracking to find manage.py and then find the dotenv. If tests rely on .env files, setting the overwrite flag to True is a safe way to ensure tests run consistently across all environments. :param override: True if values in .env should override system variables.
2.010646
2.187109
0.919317
self.transport = transport self.username = auth.username self.address = address self.port = port
def configure(self, transport, auth, address, port)
Connect paramiko transport :type auth: :py:class`margaritashotgun.auth.AuthMethods` :param auth: authentication object :type address: str :param address: remote server ip or hostname :type port: int :param port: remote server port :type hostkey: :py:class:`paramiko.key.HostKey` :param hostkey: remote host ssh server key
2.959331
3.399721
0.870463
self.local_port = local_port self.remote_address = remote_address self.remote_port = remote_port logger.debug(("Starting ssh tunnel {0}:{1}:{2} for " "{3}@{4}".format(local_port, remote_address, remote_port, self.username, self.address))) self.forward = Forward(local_port, remote_address, remote_port, self.transport) self.forward.start()
def start(self, local_port, remote_address, remote_port)
Start ssh tunnel type: local_port: int param: local_port: local tunnel endpoint ip binding type: remote_address: str param: remote_address: Remote tunnel endpoing ip binding type: remote_port: int param: remote_port: Remote tunnel endpoint port binding
2.729621
2.643497
1.032579
if self.local_port is not None: logger.debug(("Stopping ssh tunnel {0}:{1}:{2} for " "{3}@{4}".format(self.local_port, self.remote_address, self.remote_port, self.username, self.address))) if self.forward is not None: self.forward.stop() self.forward.join() if self.transport is not None: self.transport.close()
def cleanup(self)
Cleanup resources used during execution
3.417866
3.425733
0.997704
if format_string is None: format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" time_format = "%Y-%m-%dT%H:%M:%S" logger = logging.getLogger(name) logger.setLevel(level) handler = logging.StreamHandler() handler.setLevel(level) formatter = logging.Formatter(format_string, time_format) handler.setFormatter(formatter) logger.addHandler(handler) paramiko_log_level = logging.CRITICAL paramiko_log = logging.getLogger('paramiko') paramiko_log.setLevel(paramiko_log_level) paramiko_handler = logging.StreamHandler() paramiko_handler.setLevel(paramiko_log_level) paramiko_handler.setFormatter(formatter) paramiko_log.addHandler(paramiko_handler)
def set_stream_logger(name='margaritashotgun', level=logging.INFO, format_string=None)
Add a stream handler for the provided name and level to the logging module. >>> import margaritashotgun >>> margaritashotgun.set_stream_logger('marsho', logging.DEBUG) :type name: string :param name: Log name :type level: int :param level: Logging level :type format_string: str :param format_string: Log message format
1.515395
1.587839
0.954376
if port is None: self.remote_port = 22 else: self.remote_port = int(port) auth = Auth(username=username, password=password, key=key) if jump_host is not None: jump_auth = Auth(username=jump_host['username'], password=jump_host['password'], key=jump_host['key']) if jump_host['port'] is None: jump_host['port'] = 22 else: jump_auth = None self.shell.connect(auth, address, self.remote_port, jump_host, jump_auth) transport = self.shell.transport() self.tunnel.configure(transport, auth, address, self.remote_port) self.remote_addr = address
def connect(self, username, password, key, address, port, jump_host)
Connect ssh tunnel and shell executor to remote host :type username: str :param username: username for authentication :type password: str :param password: password for authentication, may be used to unlock rsa key :type key: str :param key: path to rsa key for authentication :type address: str :param address: address for remote host :type port: int :param port: ssh port for remote host
2.46589
2.707025
0.910922
self.tunnel.start(local_port, remote_address, remote_port) self.tunnel_port = local_port
def start_tunnel(self, local_port, remote_address, remote_port)
Start ssh forward tunnel :type local_port: int :param local_port: local port binding for ssh tunnel :type remote_address: str :param remote_address: remote tunnel endpoint bind address :type remote_port: int :param remote_port: remote tunnel endpoint bind port
2.974425
3.461511
0.859285
result = self.shell.execute(self.commands.mem_size.value) stdout = self.shell.decode(result['stdout']) stderr = self.shell.decode(result['stderr']) return int(stdout)
def mem_size(self)
Returns the memory size in bytes of the remote host
5.037837
4.417768
1.140358
result = self.shell.execute(self.commands.kernel_version.value) stdout = self.shell.decode(result['stdout']) stderr = self.shell.decode(result['stderr']) return stdout
def kernel_version(self)
Returns the kernel kernel version of the remote host
4.770094
4.149145
1.149657
tries = 0 pattern = self.commands.lime_pattern.value.format(listen_address, listen_port) lime_loaded = False while tries < max_tries and lime_loaded is False: lime_loaded = self.check_for_lime(pattern) tries = tries + 1 time.sleep(wait) return lime_loaded
def wait_for_lime(self, listen_port, listen_address="0.0.0.0", max_tries=20, wait=1)
Wait for lime to load unless max_retries is exceeded :type listen_port: int :param listen_port: port LiME is listening for connections on :type listen_address: str :param listen_address: address LiME is listening for connections on :type max_tries: int :param max_tries: maximum number of checks that LiME has loaded :type wait: int :param wait: time to wait between checks
3.716432
3.973773
0.93524
check = self.commands.lime_check.value lime_loaded = False result = self.shell.execute(check) stdout = self.shell.decode(result['stdout']) connections = self.net_parser.parse(stdout) for conn in connections: local_addr, remote_addr = conn if local_addr == pattern: lime_loaded = True break return lime_loaded
def check_for_lime(self, pattern)
Check to see if LiME has loaded on the remote system :type pattern: str :param pattern: pattern to check output against :type listen_port: int :param listen_port: port LiME is listening for connections on
5.754609
6.118891
0.940466
if local_path is None: raise FileNotFoundFoundError(local_path) self.shell.upload_file(local_path, remote_path)
def upload_module(self, local_path=None, remote_path="/tmp/lime.ko")
Upload LiME kernel module to remote host :type local_path: str :param local_path: local path to lime kernel module :type remote_path: str :param remote_path: remote path to upload lime kernel module
5.257835
7.334074
0.716905
load_command = self.commands.load_lime.value.format(remote_path, listen_port, dump_format) self.shell.execute_async(load_command)
def load_lime(self, remote_path, listen_port, dump_format='lime')
Load LiME kernel module from remote filesystem :type remote_path: str :param remote_path: path to LiME kernel module on remote host :type listen_port: int :param listen_port: port LiME uses to listen to remote connections :type dump_format: str :param dump_format: LiME memory dump file format
4.324305
5.800249
0.745538
try: self.unload_lime() except AttributeError as ex: pass self.tunnel.cleanup() self.shell.cleanup()
def cleanup(self)
Release resources used by supporting classes
11.937627
11.18446
1.067341
try: self.target_address = address sock = None if jump_host is not None: self.jump_host_ssh = paramiko.SSHClient() self.jump_host_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.connect_with_auth(self.jump_host_ssh, jump_auth, jump_host['addr'], jump_host['port'], sock) transport = self.jump_host_ssh.get_transport() dest_addr = (address, port) jump_addr = (jump_host['addr'], jump_host['port']) channel = transport.open_channel('direct-tcpip', dest_addr, jump_addr) self.connect_with_auth(self.ssh, auth, address, port, channel) else: self.connect_with_auth(self.ssh, auth, address, port, sock) except (AuthenticationException, SSHException, ChannelException, SocketError) as ex: raise SSHConnectionError("{0}:{1}".format(address, port), ex)
def connect(self, auth, address, port, jump_host, jump_auth)
Creates an ssh session to a remote host :type auth: :py:class:`margaritashotgun.auth.AuthMethods` :param auth: Authentication object :type address: str :param address: remote server address :type port: int :param port: remote server port
2.369913
2.418875
0.979758
ssh.connect(username=username, password=password, hostname=address, port=port, sock=sock, timeout=timeout)
def connect_with_password(self, ssh, username, password, address, port, sock, timeout=20)
Create an ssh session to a remote host with a username and password :type username: str :param username: username used for ssh authentication :type password: str :param password: password used for ssh authentication :type address: str :param address: remote server address :type port: int :param port: remote server port
2.300349
2.94755
0.780427
ssh.connect(hostname=address, port=port, username=username, pkey=key, sock=sock, timeout=timeout)
def connect_with_key(self, ssh, username, key, address, port, sock, timeout=20)
Create an ssh session to a remote host with a username and rsa key :type username: str :param username: username used for ssh authentication :type key: :py:class:`paramiko.key.RSAKey` :param key: paramiko rsa key used for ssh authentication :type address: str :param address: remote server address :type port: int :param port: remote server port
2.204936
2.991531
0.737059
try: if self.ssh.get_transport() is not None: logger.debug('{0}: executing "{1}"'.format(self.target_address, command)) stdin, stdout, stderr = self.ssh.exec_command(command) return dict(zip(['stdin', 'stdout', 'stderr'], [stdin, stdout, stderr])) else: raise SSHConnectionError(self.target_address, "ssh transport is closed") except (AuthenticationException, SSHException, ChannelException, SocketError) as ex: logger.critical(("{0} execution failed on {1} with exception:" "{2}".format(command, self.target_address, ex))) raise SSHCommandError(self.target_address, command, ex)
def execute(self, command)
Executes command on remote hosts :type command: str :param command: command to be run on remote host
3.218577
3.294606
0.976923
try: logger.debug(('{0}: execute async "{1}"' 'with callback {2}'.format(self.target_address, command, callback))) future = self.executor.submit(self.execute, command) if callback is not None: future.add_done_callback(callback) return future except (AuthenticationException, SSHException, ChannelException, SocketError) as ex: logger.critical(("{0} execution failed on {1} with exception:" "{2}".format(command, self.target_address, ex))) raise SSHCommandError(self.target_address, command, ex)
def execute_async(self, command, callback=None)
Executes command on remote hosts without blocking :type command: str :param command: command to be run on remote host :type callback: function :param callback: function to call when execution completes
3.688943
3.879975
0.950765
data = stream.read().decode(encoding).strip("\n") if data != "": logger.debug(('{0}: decoded "{1}" with encoding ' '{2}'.format(self.target_address, data, encoding))) return data
def decode(self, stream, encoding='utf-8')
Convert paramiko stream into a string :type stream: :param stream: stream to convert :type encoding: str :param encoding: stream encoding
6.872019
7.105217
0.967179
logger.debug("{0}: uploading {1} to {0}:{2}".format(self.target_address, local_path, remote_path)) try: sftp = paramiko.SFTPClient.from_transport(self.transport()) sftp.put(local_path, remote_path) sftp.close() except SSHException as ex: logger.warn(("{0}: LiME module upload failed with exception:" "{1}".format(self.target_address, ex)))
def upload_file(self, local_path, remote_path)
Upload a file from the local filesystem to the remote host :type local_path: str :param local_path: path of local file to upload :type remote_path: str :param remote_path: destination path of upload on remote host
3.651904
3.940446
0.926774
for future in self.futures: future.cancel() self.executor.shutdown(wait=10) if self.ssh.get_transport() != None: self.ssh.close()
def cleanup(self)
Release resources used during shell execution
4.721013
4.242183
1.112873
parser = argparse.ArgumentParser( description='Remote memory aquisition wrapper for LiME') root = parser.add_mutually_exclusive_group(required=True) root.add_argument('-c', '--config', help='path to config.yml') root.add_argument('--server', help='hostname or ip of target server') root.add_argument('--version', action='version', version="%(prog)s {ver}".format(ver=__version__)) opts = parser.add_argument_group() opts.add_argument('--port', help='ssh port on remote server') opts.add_argument('--username', help='username for ssh connection to target server') opts.add_argument('--module', help='path to kernel lime kernel module') opts.add_argument('--password', help='password for user or encrypted keyfile') opts.add_argument('--key', help='path to rsa key for ssh connection') opts.add_argument('--jump-server', help='hostname or ip of jump server') opts.add_argument('--jump-port', help='ssh port on jump server') opts.add_argument('--jump-username', help='username for ssh connection to jump server') opts.add_argument('--jump-password', help='password for jump-user or encrypted keyfile') opts.add_argument('--jump-key', help='path to rsa key for ssh connection to jump server') opts.add_argument('--filename', help='memory dump filename') opts.add_argument('--repository', action='store_true', help='enable automatic kernel module downloads') opts.add_argument('--repository-url', help='kernel module repository url') opts.add_argument('--repository-manifest', help='specify alternate repository manifest') opts.add_argument('--gpg-no-verify', dest='gpg_verify', action='store_false', help='skip lime module gpg signature check') opts.add_argument('--workers', default=1, help=('number of workers to run in parallel,' 'default: auto acceptable values are' '(INTEGER | "auto")')) opts.add_argument('--verbose', action='store_true', help='log debug messages') opts.set_defaults(repository_manifest='primary') opts.set_defaults(gpg_verify=True) output = parser.add_mutually_exclusive_group(required=False) output.add_argument('--bucket', help='memory dump output bucket') output.add_argument('--output-dir', help='memory dump output directory') log = parser.add_argument_group() log.add_argument('--log-dir', help='log directory') log.add_argument('--log-prefix', help='log file prefix') return parser.parse_args(args)
def parse_args(self, args)
Parse arguments and return an arguments object >>> from margaritashotgun.cli import Cli >>> cli = CLi() >>> cli.parse_args(sys.argv[1:]) :type args: list :param args: list of arguments
2.853743
2.891803
0.986839
if arguments is not None: args_config = self.configure_args(arguments) base_config = copy.deepcopy(default_config) working_config = self.merge_config(base_config, args_config) if config is not None: self.validate_config(config) base_config = copy.deepcopy(default_config) working_config = self.merge_config(base_config, config) # override configuration with environment variables repo = self.get_env_default('LIME_REPOSITORY', 'disabled') repo_url = self.get_env_default('LIME_REPOSITORY_URL', working_config['repository']['url']) if repo.lower() == 'enabled': working_config['repository']['enabled'] = True working_config['repository']['url'] = repo_url return working_config
def configure(self, arguments=None, config=None)
Merge command line arguments, config files, and default configs :type arguments: argparse.Namespace :params arguments: Arguments produced by Cli.parse_args :type config: dict :params config: configuration dict to merge and validate
2.928768
2.872027
1.019757
if variable in os.environ: env_var = os.environ[variable] else: env_var = default return env_var
def get_env_default(self, variable, default)
Fetch environment variables, returning a default if not found
2.280808
2.172511
1.049849
module, key, config_path = self.check_file_paths(arguments.module, arguments.key, arguments.config) log_dir = self.check_directory_paths(arguments.log_dir) if arguments.repository_url is None: url = default_config['repository']['url'] else: url = arguments.repository_url args_config = dict(aws=dict(bucket=arguments.bucket), logging=dict(dir=arguments.log_dir, prefix=arguments.log_prefix), workers=arguments.workers, repository=dict(enabled=arguments.repository, url=url, manifest=arguments.repository_manifest, gpg_verify=arguments.gpg_verify)) if arguments.server is not None: jump_host = None if arguments.jump_server is not None: if arguments.jump_port is not None: jump_port = int(arguments.jump_port) else: jump_port = None jump_host = dict(zip(jump_host_allowed_keys, [arguments.jump_server, jump_port, arguments.jump_username, arguments.jump_password, arguments.jump_key])) if arguments.port is not None: port = int(arguments.port) else: port = None host = dict(zip(host_allowed_keys, [arguments.server, port, arguments.username, arguments.password, module, key, arguments.filename, jump_host])) args_config['hosts'] = [] args_config['hosts'].append(host) if config_path is not None: try: config = self.load_config(config_path) self.validate_config(config) args_config.update(config) except YAMLError as ex: logger.warn('Invalid yaml Format: {0}'.format(ex)) raise except InvalidConfigurationError as ex: logger.warn(ex) raise return args_config
def configure_args(self, arguments)
Create configuration has from command line arguments :type arguments: :py:class:`argparse.Namespace` :params arguments: arguments produced by :py:meth:`Cli.parse_args()`
2.890359
2.895008
0.998394
for path in enumerate(args): path = path[1] if path is not None: try: self.check_file_path(path) except OSError as ex: logger.warn(ex) raise return args
def check_file_paths(self, *args)
Ensure all arguments provided correspond to a file
3.914475
3.986073
0.982038
if os.path.exists(path) is not True: msg = "File Not Found {}".format(path) raise OSError(msg)
def check_file_path(self, path)
Ensure file exists at the provided path :type path: string :param path: path to directory to check
4.246131
5.536074
0.766993
for path in enumerate(args): path = path[1] if path is not None: try: self.check_directory_path(path) except OSError as ex: logger.warn(ex) raise return args
def check_directory_paths(self, *args)
Ensure all arguments correspond to directories
3.888583
3.825269
1.016551
if os.path.isdir(path) is not True: msg = "Directory Does Not Exist {}".format(path) raise OSError(msg)
def check_directory_path(self, path)
Ensure directory exists at the provided path :type path: string :param path: path to directory to check
4.135379
5.381303
0.768472
try: hosts = config['hosts'] except KeyError: raise InvalidConfigurationError('hosts', "", reason=('hosts configuration ' 'section is required')) for key in config.keys(): if key not in default_allowed_keys: raise InvalidConfigurationError(key, config[key]) bucket = False # optional configuration try: for key in config['aws'].keys(): if key == 'bucket' and config['aws'][key] is not None: bucket = True if key not in aws_allowed_keys: raise InvalidConfigurationError(key, config['aws'][key]) except KeyError: pass # optional configuration try: for key in config['logging'].keys(): if key not in logging_allowed_keys: raise InvalidConfigurationError(key, config['logging'][key]) except KeyError: pass # optional configuration try: for key in config['repository'].keys(): if key not in repository_allowed_keys: raise InvalidConfigurationError(key, config['repository'][key]) except KeyError: pass # required configuration if type(config['hosts']) is not list: raise InvalidConfigurationError('hosts', config['hosts'], reason="hosts must be a list") filename = False for host in config['hosts']: for key in host.keys(): if key == 'filename' and host['filename'] is not None: filename = True if key == 'jump_host' and host['jump_host'] is not None: for jump_key in host['jump_host'].keys(): if jump_key not in jump_host_allowed_keys: raise InvalidConfigurationError(key, host['jump_host']) if key not in host_allowed_keys: raise InvalidConfigurationError(key, host[key]) if bucket and filename: raise InvalidConfigurationError('bucket', config['aws']['bucket'], reason=('bucket configuration is' 'incompatible with filename' 'configuration in hosts'))
def validate_config(self, config)
Validate configuration dict keys are supported :type config: dict :param config: configuration dictionary
2.182309
2.187618
0.997573
try: return paramiko.RSAKey.from_private_key_file(key_path) except PasswordRequiredException as ex: return paramiko.RSAKey.from_private_key_file(key_path, password=password)
def load_key(self, key_path, password)
Creates paramiko rsa key :type key_path: str :param key_path: path to rsa key :type password: str :param password: password to try if rsa key is encrypted
2.915753
2.711267
1.075421
logger = logging.getLogger(__name__) try: # Check repository GPG settings before starting workers # Handling this here prevents subprocesses from needing stdin access repo_conf = self.config['repository'] repo = None if repo_conf['enabled'] and repo_conf['gpg_verify']: try: repo = Repository(repo_conf['url'], repo_conf['gpg_verify']) repo.init_gpg() except Exception as ex: # Do not prompt to install gpg keys unless running interactively if repo is not None and self.library is False: if isinstance(ex, RepositoryUntrustedSigningKeyError): installed = repo.prompt_for_install() if installed is False: logger.critical(("repository signature not " "installed, install the " "signature manually or use " "the --gpg-no-verify flag " "to bypass this check")) quit(1) else: logger.critical(ex) quit(1) conf = self.map_config() workers = Workers(conf, self.config['workers'], name=self.name, library=self.library) description = 'memory capture action' results = workers.spawn(description) self.statistics(results) if self.library is True: return dict([('total', self.total), ('completed', self.completed_addresses), ('failed', self.failed_addresses)]) else: logger.info(("{0} hosts processed. completed: {1} " "failed {2}".format(self.total, self.completed, self.failed))) logger.info("completed_hosts: {0}".format(self.completed_addresses)) logger.info("failed_hosts: {0}".format(self.failed_addresses)) quit() except KeyboardInterrupt: workers.cleanup(terminate=True) if self.library: raise else: quit(1)
def run(self)
Captures remote hosts memory
5.160416
5.064718
1.018895
if filename is None: raise MemoryCaptureAttributeMissingError('filename') if destination == OutputDestinations.local: logger.info("{0}: dumping to file://{1}".format(self.remote_addr, filename)) result = self.to_file(filename, tunnel_addr, tunnel_port) elif destination == OutputDestinations.s3: if bucket is None: raise MemoryCaptureAttributeMissingError('bucket') logger.info(("{0}: dumping memory to s3://{1}/" "{2}".format(self.remote_addr, bucket, filename))) result = self.to_s3(bucket, filename, tunnel_addr, tunnel_port) else: raise MemoryCaptureOutputMissingError(self.remote_addr) return result
def capture(self, tunnel_addr, tunnel_port, filename=None, bucket=None, destination=None)
Captures memory based on the provided OutputDestination :type tunnel_addr: str :param tunnel_port: ssh tunnel hostname or ip :type tunnel_port: int :param tunnel_port: ssh tunnel port :type filename: str :param filename: memory dump output filename :type bucket: str :param bucket: output s3 bucket :type destination: :py:class:`margaritashotgun.memory.OutputDestinations` :param destination: OutputDestinations member
3.223049
3.036053
1.061592
if self.progressbar: self.bar = ProgressBar(widgets=self.widgets, maxval=self.max_size).start() self.bar.start() with open(filename, 'wb') as self.outfile: self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect((tunnel_addr, tunnel_port)) self.sock.settimeout(self.sock_timeout) bytes_since_update = 0 while True: try: data = self.sock.recv(self.recv_size) data_length = len(data) if not data: break self.outfile.write(data) self.transfered = self.transfered + data_length bytes_since_update += data_length data = None data_length = 0 if bytes_since_update > self.update_threshold: self.update_progress() bytes_since_update = 0 except (socket.timeout, socket.error) as ex: if isinstance(ex, socket.timeout): break elif isinstance(ex, socket.error): if ex.errno == errno.EINTR: pass else: self.cleanup() raise else: self.cleanup() raise self.cleanup() logger.info('{0}: capture complete: {1}'.format(self.remote_addr, filename)) return True
def to_file(self, filename, tunnel_addr, tunnel_port)
Writes memory dump to a local file :type filename: str :param filename: memory dump output filename :type tunnel_addr: str :param tunnel_port: ssh tunnel hostname or ip :type tunnel_port: int :param tunnel_port: ssh tunnel port
2.458319
2.504362
0.981615
if self.progressbar: try: self.bar.update(self.transfered) except Exception as e: logger.debug("{0}: {1}, {2} exceeds memsize {3}".format( self.remote_addr, e, self.transfered, self.max_size)) if complete: self.bar.update(self.max_size) self.bar.finish() else: percent = int(100 * float(self.transfered) / float(self.max_size)) # printe a message at 10%, 20%, etc... if percent % 10 == 0: if self.progress != percent: logger.info("{0}: capture {1}% complete".format( self.remote_addr, percent)) self.progress = percent
def update_progress(self, complete=False)
Logs capture progress :type complete: bool :params complete: toggle to finish ncurses progress bar
3.777926
3.864919
0.977492
if self.sock is not None: self.sock.close() if self.outfile is not None: self.outfile.close() if self.bar is not None: self.update_progress(complete=True)
def cleanup(self)
Release resources used during memory capture
4.219525
4.072774
1.036032
if self.gpg_verify: logger.debug("gpg verification enabled, initializing gpg") gpg_home = os.path.expanduser('~/.gnupg') self.gpg = gnupg.GPG(gnupghome=gpg_home) self.key_path, self.key_info = self.get_signing_key() logger.debug("{0} {1}".format(self.key_path, self.key_info)) self.check_signing_key()
def init_gpg(self)
Initialize gpg object and check if repository signing key is trusted
3.077661
2.786882
1.104338
tmp_key_path = "/tmp/{0}".format(self.repo_signing_key) tmp_metadata_path = "/tmp/{0}".format(self.key_metadata) repo_key_path = "{0}/{1}".format(self.url, self.repo_signing_key) repo_metadata_path = "{0}/{1}".format(self.url, self.key_metadata) req_key = requests.get(repo_key_path) req_metadata = requests.get(repo_metadata_path) # Fetch the key to disk if req_key.status_code is 200: logger.debug(("found repository signing key at " "{0}".format(repo_key_path))) self.raw_key = req_key.content with open(tmp_key_path, 'wb') as f: f.write(self.raw_key) else: raise RepositoryMissingSigningKeyError(repo_key_path) # Fetch the fingerprint from the metadata if req_metadata.status_code is 200: logger.debug(("found key metadata at " "{0}".format(repo_metadata_path))) print(req_metadata.content) key_info = json.loads(req_metadata.content.decode('utf-8')) else: RepositoryMissingKeyMetadataError return (tmp_key_path, key_info)
def get_signing_key(self)
Download a local copy of repo signing key for installation
2.585096
2.489487
1.038405
user_keys = self.gpg.list_keys() if len(user_keys) > 0: trusted = False for key in user_keys: if key['fingerprint'] == self.key_info['fingerprint']: trusted = True logger.debug(("repo signing key trusted in user keyring, " "fingerprint {0}".format(key['fingerprint']))) else: trusted = False if trusted is False: repo_key_url = "{0}/{1}".format(self.url, self.repo_signing_key) raise RepositoryUntrustedSigningKeyError(repo_key_url, self.key_info['fingerprint'])
def check_signing_key(self)
Check that repo signing key is trusted by gpg keychain
3.782913
3.162547
1.19616
print(self.key_info) repo_key_url = "{0}/{1}".format(self.url, self.repo_signing_key) print(("warning: Repository key untrusted \n" "Importing GPG key 0x{0}:\n" " Userid: \"{1}\"\n" " From : {2}".format(self.key_info['fingerprint'], self.key_info['uids'][0], repo_key_url))) response = prompt(u'Is this ok: [y/N] ') if response == 'y': self.install_key(self.raw_key) return True else: return False
def prompt_for_install(self)
Prompt user to install untrusted repo signing key
5.652788
4.972608
1.136785
logger.info(("importing repository signing key {0} " "{1}".format(self.key_info['fingerprint'], self.key_info['uids'][0]))) import_result = self.gpg.import_keys(key_data) logger.debug("import results: {0}".format(import_result.results))
def install_key(self, key_data)
Install untrusted repo signing key
5.328328
4.508336
1.181884
metadata = self.get_metadata() logger.debug("parsed metadata: {0}".format(metadata)) manifest = self.get_manifest(metadata['manifests'][manifest_type]) try: module = manifest[kernel_version] logger.debug("found module {0}".format(module)) except KeyError: raise KernelModuleNotFoundError(kernel_version, self.url) path = self.fetch_module(module) return path
def fetch(self, kernel_version, manifest_type)
Search repository for kernel module matching kernel_version :type kernel_version: str :param kernel_version: kernel version to search repository on :type manifest_type: str :param manifest_type: kernel module manifest to search on
3.950704
3.895552
1.014158
metadata_path = "{}/{}/{}".format(self.url, self.metadata_dir, self.metadata_file) metadata_sig_path = "{}/{}/{}.sig".format(self.url.rstrip('/'), self.metadata_dir, self.metadata_file) # load metadata req = requests.get(metadata_path) if req.status_code is 200: raw_metadata = req.content else: raise RepositoryError(metadata_path, ("status code not 200: " "{}".format(req.status_code))) if self.gpg_verify: self.verify_data_signature(metadata_sig_path, metadata_path, raw_metadata) return self.parse_metadata(raw_metadata)
def get_metadata(self)
Fetch repository repomd.xml file
3.241595
3.038123
1.066973
try: metadata = dict() mdata = xmltodict.parse(metadata_xml)['metadata'] metadata['revision'] = mdata['revision'] metadata['manifests'] = dict() # check if multiple manifests are present if type(mdata['data']) is list: manifests = mdata['data'] else: manifests = [mdata['data']] for manifest in manifests: manifest_dict = dict() manifest_dict['type'] = manifest['@type'] manifest_dict['checksum'] = manifest['checksum'] manifest_dict['open_checksum'] = manifest['open_checksum'] manifest_dict['location'] = manifest['location']['@href'] manifest_dict['timestamp'] = datetime.fromtimestamp( int(manifest['timestamp'])) manifest_dict['size'] = int(manifest['size']) manifest_dict['open_size'] = int(manifest['open_size']) metadata['manifests'][manifest['@type']] = manifest_dict except Exception as e: raise RepositoryError("{0}/{1}".format(self.url,self.metadata_dir, self.metadata_file), e) return metadata
def parse_metadata(self, metadata_xml)
Parse repomd.xml file :type metadata_xml: str :param metadata_xml: raw xml representation of repomd.xml
2.615275
2.596328
1.007297
manifest_path = "{0}/{1}".format(self.url, metadata['location']) req = requests.get(manifest_path, stream=True) if req.status_code is 200: gz_manifest = req.raw.read() self.verify_checksum(gz_manifest, metadata['checksum'], metadata['location']) manifest = self.unzip_manifest(gz_manifest) self.verify_checksum(manifest, metadata['open_checksum'], metadata['location'].rstrip('.gz')) return self.parse_manifest(manifest)
def get_manifest(self, metadata)
Get latest manifest as specified in repomd.xml :type metadata: dict :param metadata: dictionary representation of repomd.xml
3.743744
3.906025
0.958454
buf = BytesIO(raw_manifest) f = gzip.GzipFile(fileobj=buf) manifest = f.read() return manifest
def unzip_manifest(self, raw_manifest)
Decompress gzip encoded manifest :type raw_manifest: str :param raw_manifest: compressed gzip manifest file content
3.47102
3.682117
0.94267
manifest = dict() try: mdata = xmltodict.parse(manifest_xml)['modules']['module'] for module in mdata: mod = dict() mod['type'] = module['@type'] mod['name'] = module['name'] mod['arch'] = module['arch'] mod['checksum'] = module['checksum'] mod['version'] = module['version'] mod['packager'] = module['packager'] mod['location'] = module['location']['@href'] mod['signature'] = module['signature']['@href'] mod['platform'] = module['platform'] manifest[mod['version']] = mod except Exception as e: raise return manifest
def parse_manifest(self, manifest_xml)
Parse manifest xml file :type manifest_xml: str :param manifest_xml: raw xml content of manifest file
2.529219
2.63149
0.961136
tm = int(time.time()) datestamp = datetime.utcfromtimestamp(tm).isoformat() filename = "lime-{0}-{1}.ko".format(datestamp, module['version']) url = "{0}/{1}".format(self.url, module['location']) logger.info("downloading {0} as {1}".format(url, filename)) req = requests.get(url, stream=True) with open(filename, 'wb') as f: f.write(req.raw.read()) self.verify_module(filename, module, self.gpg_verify) return filename
def fetch_module(self, module)
Download and verify kernel module :type module: str :param module: kernel module path
3.442024
3.379702
1.01844
with open(filename, 'rb') as f: module_data = f.read() self.verify_checksum(module_data, module['checksum'], module['location']) if self.gpg_verify: signature_url = "{0}/{1}".format(self.url, module['signature']) file_url = "{0}/{1}".format(self.url, module['location']) self.verify_file_signature(signature_url, file_url, filename)
def verify_module(self, filename, module, verify_signature)
Verify kernel module checksum and signature :type filename: str :param filename: downloaded kernel module path :type module: dict :param module: kernel module metadata :type verify_signature: bool :param verify_signature: enable/disable signature verification
3.258243
3.143055
1.036648
calculated_checksum = hashlib.sha256(data).hexdigest() logger.debug("calculated checksum {0} for {1}".format(calculated_checksum, filename)) if calculated_checksum != checksum: raise RepositoryError("{0}/{1}".format(self.url, filename), ("checksum verification failed, expected " "{0} got {1}".format(checksum, calculated_checksum)))
def verify_checksum(self, data, checksum, filename)
Verify sha256 checksum vs calculated checksum :type data: str :param data: data used to calculate checksum :type checksum: str :param checksum: expected checksum of data :type filename: str :param checksum: original filename
3.207325
3.518197
0.911639
req = requests.get(signature_url) if req.status_code is 200: tm = int(time.time()) datestamp = datetime.utcfromtimestamp(tm).isoformat() sigfile = "repo-{0}-tmp.sig".format(datestamp) logger.debug("writing {0} to {1}".format(signature_url, sigfile)) with open(sigfile, 'wb') as f: f.write(req.content) else: raise RepositoryMissingSignatureError(signature_url) verified = self.gpg.verify_data(sigfile, data) try: os.remove(sigfile) except OSError: pass if verified.valid is True: logger.debug("verified {0} against {1}".format(data_url, signature_url)) else: raise RepositorySignatureError(data_url, signature_url)
def verify_data_signature(self, signature_url, data_url, data)
Verify data against it's remote signature :type signature_url: str :param signature_url: remote path to signature for data_url :type data_url: str :param data_url: url from which data was fetched :type data: str :param data: content of remote file at file_url
2.785059
2.756694
1.010289
req = requests.get(signature_url, stream=True) if req.status_code is 200: sigfile = req.raw else: raise RepositoryMissingSignatureError(signature_url) verified = self.gpg.verify_file(sigfile, filename) if verified.valid is True: logger.debug("verified {0} against {1}".format(filename, signature_url)) else: raise RepositorySignatureError(file_url, signature_url)
def verify_file_signature(self, signature_url, file_url, filename)
Verify a local file against it's remote signature :type signature_url: str :param signature_url: remote path to signature for file_url :type file_url: str :param file_url: url from which file at filename was fetched :type filename: str :param filename: filename of local file downloaded from file_url
3.186564
3.262406
0.976753
if tick.time is '': return event = Event(type_=EVENT_TINY_TICK) event.dict_['data'] = tick self._event_engine.put(event)
def _notify_new_tick_event(self, tick)
tick推送
7.54716
5.989641
1.260035
if not self._market_opened: return event = Event(type_=EVENT_QUOTE_CHANGE) event.dict_['data'] = tiny_quote self._rt_tiny_quote[tiny_quote.symbol] = tiny_quote self._event_engine.put(event)
def _notify_quote_change_event(self, tiny_quote)
推送
4.939542
4.223138
1.169638
for ix, row in data.iterrows(): symbol = row['code'] tick = self._tick_dict.get(symbol, None) if not tick: tick = TinyQuoteData() tick.symbol = symbol self._tick_dict[symbol] = tick tick.date = row['data_date'].replace('-', '') tick.time = row['data_time'] # with GLOBAL.dt_lock: if tick.date and tick.time: tick.datetime = datetime.strptime(' '.join([tick.date, tick.time]), '%Y%m%d %H:%M:%S') else: return tick.openPrice = row['open_price'] tick.highPrice = row['high_price'] tick.lowPrice = row['low_price'] tick.preClosePrice = row['prev_close_price'] # 1.25 新增摆盘价差,方便计算正确的订单提交价格 要求牛牛最低版本 v3.42.4961.125 if 'price_spread' in row: tick.priceSpread = row['price_spread'] tick.lastPrice = row['last_price'] tick.volume = row['volume'] new_tick = copy(tick) self._notify_new_tick_event(new_tick)
def process_quote(self, data)
报价推送
4.030687
3.830492
1.052263
symbol = data['code'] tick = self._tick_dict.get(symbol, None) if not tick: tick = TinyQuoteData() tick.symbol = symbol self._tick_dict[symbol] = tick d = tick.__dict__ for i in range(5): bid_data = data['Bid'][i] ask_data = data['Ask'][i] n = i + 1 d['bidPrice%s' % n] = bid_data[0] d['bidVolume%s' % n] = bid_data[1] d['askPrice%s' % n] = ask_data[0] d['askVolume%s' % n] = ask_data[1] new_tick = copy(tick) self._notify_new_tick_event(new_tick)
def process_orderbook(self, data)
订单簿推送
2.633316
2.578897
1.021102
# 每一次推送, 只会是同一个symbole + kltype bars_data = [] symbol = "" ktype = "" for ix, row in data.iterrows(): symbol = row['code'] ktype = row['k_type'] bar = TinyBarData() bar.open = row['open'] bar.close = row['close'] bar.high = row['high'] bar.low = row['low'] bar.volume = row['volume'] bar.symbol = symbol # with GLOBAL.dt_lock: bar.datetime = datetime.strptime(str(row['time_key']), "%Y-%m-%d %H:%M:%S") bars_data.append(bar) # print('process_curkline - ktype=%s symbol=%s' % (ktype, symbol)) if not bars_data or not symbol or not ktype: return event = Event(type_=EVENT_CUR_KLINE_PUSH) event.dict_['bars_data'] = bars_data event.dict_['symbol'] = symbol event.dict_['ktype'] = ktype self._event_engine.put(event)
def process_curkline(self, data)
k线实时数据推送
3.555325
3.427727
1.037225
ret_type = rsp_pb.retType ret_msg = rsp_pb.retMsg if ret_type != RET_OK: return RET_ERROR, ret_msg, None res = {} if rsp_pb.HasField('s2c'): res['server_version'] = rsp_pb.s2c.serverVer res['login_user_id'] = rsp_pb.s2c.loginUserID res['conn_id'] = rsp_pb.s2c.connID res['conn_key'] = rsp_pb.s2c.connAESKey res['keep_alive_interval'] = rsp_pb.s2c.keepAliveInterval else: return RET_ERROR, "rsp_pb error", None return RET_OK, "", res
def unpack_rsp(cls, rsp_pb)
Unpack the init connect response
2.665846
2.482751
1.073747
if rsp_pb.retType != RET_OK: return RET_ERROR, rsp_pb.retMsg, None return RET_OK, "", None
def unpack_unsubscribe_rsp(cls, rsp_pb)
Unpack the un-subscribed response
4.657768
4.294046
1.084704
currentFolder = os.getcwd() currentJsonPath = os.path.join(currentFolder, name) if os.path.isfile(currentJsonPath): return currentJsonPath else: moduleFolder = os.path.abspath(os.path.dirname(moduleFile)) moduleJsonPath = os.path.join(moduleFolder, '.', name) return moduleJsonPath
def getJsonPath(name, moduleFile)
获取JSON配置文件的路径: 1. 优先从当前工作目录查找JSON文件 2. 若无法找到则前往模块所在目录查找
2.255021
2.101379
1.073115
ret, data = self._trade_ctx.place_order(price=price, qty=volume, code=symbol, trd_side=ft.TrdSide.BUY, order_type=order_type, adjust_limit=adjust_limit, trd_env=self._env_type, acc_id=acc_id) if ret != ft.RET_OK: return ret, data order_id = 0 for ix, row in data.iterrows(): order_id = str(row['order_id']) return ret, order_id
def buy(self, price, volume, symbol, order_type=ft.OrderType.NORMAL, adjust_limit=0, acc_id=0)
买入
2.725494
2.796842
0.97449
ret, data = self._trade_ctx.modify_order(ft.ModifyOrderOp.CANCEL, order_id=order_id, qty=0, price=0, adjust_limit=0, trd_env=self._env_type, acc_id=acc_id) # ret不为0时, data为错误字符串 if ret == ft.RET_OK: return ret, '' else: return ret, data
def cancel_order(self, order_id, acc_id=0)
取消订单
6.491694
6.357588
1.021094
ret, data = self._trade_ctx.order_list_query(order_id=order_id, status_filter_list=[], code='', start='', end='', trd_env=self._env_type, acc_id=acc_id) if ret != ft.RET_OK: return ret, data order = TinyTradeOrder() for ix, row in data.iterrows(): if order_id != str(row['order_id']): continue order.symbol = row['code'] order.order_id = order_id order.direction = row['trd_side'] order.price = float(row['price']) order.total_volume = int(row['qty']) order.trade_volume = int(row['dealt_qty']) order.create_time = row['create_time'] order.updated_time = row['updated_time'] order.trade_avg_price = float(row['dealt_avg_price']) if row['dealt_avg_price'] else 0 order.order_status = row['order_status'] break return ret, order
def get_tiny_trade_order(self, order_id, acc_id=0)
得到订单信息
2.857558
2.833004
1.008667
ret, data = self._trade_ctx.position_list_query(code=symbol, trd_env=self._env_type, acc_id=acc_id) if 0 != ret: return None for _, row in data.iterrows(): if row['code'] != symbol: continue pos = TinyPosition() pos.symbol = symbol pos.position = row['qty'] pos.frozen = pos.position - row['can_sell_qty'] pos.price = row['cost_price'] pos.market_value = row['market_val'] return pos return None
def get_tiny_position(self, symbol, acc_id=0)
得到股票持仓
3.474982
3.312977
1.0489
with open(self.settingfilePath, 'rb') as f: df = f.read() f.close() if type(df) is not str: df = ft.str_utf8(df) self._global_settings = json.loads(df) if self._global_settings is None or 'frame' not in self._global_settings: raise Exception("setting.json - no frame config!'") # 设置frame参数 frame_setting = self._global_settings['frame'] d = self.__dict__ for key in d.keys(): if key in frame_setting.keys(): d[key] = frame_setting[key] # check paramlist # for key in d.keys(): # if d[key] is None: # str_error = "setting.json - 'frame' config no key:'%s'" % key # raise Exception(str_error) # check _env_type / market env_list = [ft.TrdEnv.REAL, ft.TrdEnv.SIMULATE] if self._env_type not in env_list: str_error = "setting.json - 'frame' config '_env_type' can only is '{}'".format(','.join(env_list)) raise Exception(str_error) market_list = [ft.Market.HK, ft.Market.US] if self._market not in market_list: str_error = "setting.json - 'frame' config '_market' can only is '{}'".format(','.join(market_list)) raise Exception(str_error) return True
def __loadSetting(self)
读取策略配置
3.205628
3.131042
1.023821
SysConfig.CLINET_ID = client_id SysConfig.CLIENT_VER = client_ver
def set_client_info(cls, client_id, client_ver)
.. py:function:: set_client_info(cls, client_id, client_ver) 设置调用api的客户端信息, 非必调接口 :param client_id: str, 客户端标识 :param client_ver: int, 客户端版本号 :return: None :example: .. code:: python from futuquant import * SysConfig.set_client_info("MyFutuQuant", 0) quote_ctx = OpenQuoteContext(host='127.0.0.1', port=11111) quote_ctx.close()
7.537393
6.70531
1.124093
# auto subscriber resub_count = 0 subtype_list = [] code_list = [] resub_dict = copy(self._ctx_subscribe) subtype_all_cnt = len(resub_dict.keys()) subtype_cur_cnt = 0 ret_code = RET_OK ret_msg = '' for subtype in resub_dict.keys(): subtype_cur_cnt += 1 code_set = resub_dict[subtype] code_list_new = [code for code in code_set] if len(code_list_new) == 0: continue if len(code_list) == 0: code_list = code_list_new subtype_list = [subtype] is_need_sub = False if code_list == code_list_new: if subtype not in subtype_list: subtype_list.append(subtype) # 合并subtype请求 else: ret_code, ret_msg = self._reconnect_subscribe(code_list, subtype_list) logger.debug("reconnect subscribe code_count={} ret_code={} ret_msg={} subtype_list={} code_list={}".format( len(code_list), ret_code, ret_msg, subtype_list, code_list)) if ret_code != RET_OK: break resub_count += len(code_list) code_list = code_list_new subtype_list = [subtype] # 循环即将结束 if subtype_cur_cnt == subtype_all_cnt and len(code_list): ret_code, ret_msg = self._reconnect_subscribe(code_list, subtype_list) logger.debug("reconnect subscribe code_count={} ret_code={} ret_msg={} subtype_list={} code_list={}".format(len(code_list), ret_code, ret_msg, subtype_list, code_list)) if ret_code != RET_OK: break resub_count += len(code_list) code_list = [] subtype_list = [] logger.debug("reconnect subscribe all code_count={} ret_code={} ret_msg={}".format(resub_count, ret_code, ret_msg)) # 重定阅失败,重连 if ret_code != RET_OK: logger.error("reconnect subscribe error, close connect and retry!!") self._status = ContextStatus.Start self._wait_reconnect() return ret_code, ret_msg
def on_api_socket_reconnected(self)
for API socket reconnected
2.668232
2.655956
1.004622
if market is None or is_str(market) is False: error_str = ERROR_STR_PREFIX + "the type of market param is wrong" return RET_ERROR, error_str ret, msg, start, end = normalize_start_end_date(start, end, 365) if ret != RET_OK: return ret, msg query_processor = self._get_sync_query_processor( TradeDayQuery.pack_req, TradeDayQuery.unpack_rsp) # the keys of kargs should be corresponding to the actual function arguments kargs = { 'market': market, 'start_date': start, 'end_date': end, 'conn_id': self.get_sync_conn_id() } ret_code, msg, trade_day_list = query_processor(**kargs) if ret_code != RET_OK: return RET_ERROR, msg return RET_OK, trade_day_list
def get_trading_days(self, market, start=None, end=None)
获取交易日 :param market: 市场类型,Market_ :param start: 起始日期。例如'2018-01-01'。 :param end: 结束日期。例如'2018-01-01'。 start和end的组合如下: ========== ========== ======================================== start类型 end类型 说明 ========== ========== ======================================== str str start和end分别为指定的日期 None str start为end往前365天 str None end为start往后365天 None None end为当前日期,start为end往前365天 ========== ========== ======================================== :return: 成功时返回(RET_OK, data),data是字符串数组;失败时返回(RET_ERROR, data),其中data是错误描述字符串
3.457942
3.313507
1.04359
param_table = {'market': market, 'stock_type': stock_type} for x in param_table: param = param_table[x] if param is None or is_str(param) is False: error_str = ERROR_STR_PREFIX + "the type of %s param is wrong" % x return RET_ERROR, error_str if code_list is not None: if is_str(code_list): code_list = code_list.split(',') elif isinstance(code_list, list): pass else: return RET_ERROR, "code list must be like ['HK.00001', 'HK.00700'] or 'HK.00001,HK.00700'" query_processor = self._get_sync_query_processor( StockBasicInfoQuery.pack_req, StockBasicInfoQuery.unpack_rsp) kargs = { "market": market, 'stock_type': stock_type, 'code_list': code_list, 'conn_id': self.get_sync_conn_id() } ret_code, msg, basic_info_list = query_processor(**kargs) if ret_code != RET_OK: return ret_code, msg col_list = [ 'code', 'name', 'lot_size', 'stock_type', 'stock_child_type', 'stock_owner', 'option_type', 'strike_time', 'strike_price', 'suspension', 'listing_date', 'stock_id', 'delisting' ] basic_info_table = pd.DataFrame(basic_info_list, columns=col_list) return RET_OK, basic_info_table
def get_stock_basicinfo(self, market, stock_type=SecurityType.STOCK, code_list=None)
获取指定市场中特定类型的股票基本信息 :param market: 市场类型,futuquant.common.constant.Market :param stock_type: 股票类型, futuquant.common.constant.SecurityType :param code_list: 如果不为None,应该是股票code的iterable类型,将只返回指定的股票信息 :return: (ret_code, content) ret_code 等于RET_OK时, content为Pandas.DataFrame数据, 否则为错误原因字符串, 数据列格式如下 ================= =========== ============================================================================== 参数 类型 说明 ================= =========== ============================================================================== code str 股票代码 name str 名字 lot_size int 每手数量 stock_type str 股票类型,参见SecurityType stock_child_type str 涡轮子类型,参见WrtType stock_owner str 所属正股的代码 option_type str 期权类型,Qot_Common.OptionType strike_time str 行权日 strike_price float 行权价 suspension bool 是否停牌(True表示停牌) listing_date str 上市时间 stock_id int 股票id delisting bool 是否退市 ================= =========== ============================================================================== :example: .. code-block:: python from futuquant import * quote_ctx = OpenQuoteContext(host='127.0.0.1', port=11111) print(quote_ctx.get_stock_basicinfo(Market.HK, SecurityType.WARRANT)) print(quote_ctx.get_stock_basicinfo(Market.US, SecurityType.DRVT, 'US.AAPL190621C140000')) quote_ctx.close()
2.822995
2.280389
1.237945