code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
client = self.grr_api.Client(client_id)
print('Checking for client approval')
self._check_approval_wrapper(client, client.ListFlows)
print('{0:s}: Client approval is valid'.format(client_id))
return client.Get() | def _get_client_by_id(self, client_id) | Get GRR client dictionary and make sure valid approvals exist.
Args:
client_id: GRR client ID.
Returns:
GRR API Client object | 9.836886 | 7.859332 | 1.251619 |
# Start the flow and get the flow ID
flow = self._check_approval_wrapper(
client, client.CreateFlow, name=name, args=args)
flow_id = flow.flow_id
print('{0:s}: Scheduled'.format(flow_id))
if self.keepalive:
keepalive_flow = client.CreateFlow(
name='KeepAlive', args=flows_pb2.KeepAliveArgs())
print('KeepAlive Flow:{0:s} scheduled'.format(keepalive_flow.flow_id))
return flow_id | def _launch_flow(self, client, name, args) | Create specified flow, setting KeepAlive if requested.
Args:
client: GRR Client object on which to launch the flow.
name: string containing flow name.
args: proto (*FlowArgs) for type of flow, as defined in GRR flow proto.
Returns:
string containing ID of launched flow | 5.05423 | 4.754291 | 1.063088 |
# Wait for the flow to finish
print('{0:s}: Waiting to finish'.format(flow_id))
while True:
try:
status = client.Flow(flow_id).Get().data
except grr_errors.UnknownError:
msg = 'Unable to stat flow {0:s} for host {1:s}'.format(
flow_id, client.data.os_info.fqdn.lower())
self.state.add_error(msg)
raise DFTimewolfError(
'Unable to stat flow {0:s} for host {1:s}'.format(
flow_id, client.data.os_info.fqdn.lower()))
if status.state == flows_pb2.FlowContext.ERROR:
# TODO(jbn): If one artifact fails, what happens? Test.
message = status.context.backtrace
if 'ArtifactNotRegisteredError' in status.context.backtrace:
message = status.context.backtrace.split('\n')[-2]
raise DFTimewolfError(
'{0:s}: FAILED! Message from GRR:\n{1:s}'.format(
flow_id, message))
if status.state == flows_pb2.FlowContext.TERMINATED:
print('{0:s}: Complete'.format(flow_id))
break
time.sleep(self._CHECK_FLOW_INTERVAL_SEC) | def _await_flow(self, client, flow_id) | Awaits flow completion.
Args:
client: GRR Client object in which to await the flow.
flow_id: string containing ID of flow to await.
Raises:
DFTimewolfError: if flow error encountered. | 3.749147 | 3.549272 | 1.056314 |
output_file_path = os.path.join(
self.output_path, '.'.join((flow_id, 'zip')))
if os.path.exists(output_file_path):
print('{0:s} already exists: Skipping'.format(output_file_path))
return None
flow = client.Flow(flow_id)
file_archive = flow.GetFilesArchive()
file_archive.WriteToFile(output_file_path)
# Unzip archive for processing and remove redundant zip
fqdn = client.data.os_info.fqdn.lower()
client_output_file = os.path.join(self.output_path, fqdn)
if not os.path.isdir(client_output_file):
os.makedirs(client_output_file)
with zipfile.ZipFile(output_file_path) as archive:
archive.extractall(path=client_output_file)
os.remove(output_file_path)
return client_output_file | def _download_files(self, client, flow_id) | Download files from the specified flow.
Args:
client: GRR Client object to which to download flow data from.
flow_id: GRR flow ID.
Returns:
str: path of downloaded files. | 2.97468 | 2.999208 | 0.991822 |
super(GRRArtifactCollector, self).setup(
reason, grr_server_url, grr_username, grr_password, approvers=approvers,
verify=verify)
if artifacts is not None:
self.artifacts = [item.strip() for item in artifacts.strip().split(',')]
if extra_artifacts is not None:
self.extra_artifacts = [item.strip() for item
in extra_artifacts.strip().split(',')]
self.hostnames = [item.strip() for item in hosts.strip().split(',')]
self.use_tsk = use_tsk | def setup(self,
hosts, artifacts, extra_artifacts, use_tsk,
reason, grr_server_url, grr_username, grr_password, approvers=None,
verify=True) | Initializes a GRR artifact collector.
Args:
hosts: Comma-separated list of hostnames to launch the flow on.
artifacts: list of GRR-defined artifacts.
extra_artifacts: list of GRR-defined artifacts to append.
use_tsk: toggle for use_tsk flag on GRR flow.
reason: justification for GRR access.
grr_server_url: GRR server URL.
grr_username: GRR username.
grr_password: GRR password.
approvers: list of GRR approval recipients.
verify: boolean, whether to verify the GRR server's x509 certificate. | 1.975714 | 2.105472 | 0.938371 |
system_type = client.data.os_info.system
print('System type: {0:s}'.format(system_type))
# If the list is supplied by the user via a flag, honor that.
artifact_list = []
if self.artifacts:
print('Artifacts to be collected: {0!s}'.format(self.artifacts))
artifact_list = self.artifacts
else:
default_artifacts = self.artifact_registry.get(system_type, None)
if default_artifacts:
print('Collecting default artifacts for {0:s}: {1:s}'.format(
system_type, ', '.join(default_artifacts)))
artifact_list.extend(default_artifacts)
if self.extra_artifacts:
print('Throwing in an extra {0!s}'.format(self.extra_artifacts))
artifact_list.extend(self.extra_artifacts)
artifact_list = list(set(artifact_list))
if not artifact_list:
return
flow_args = flows_pb2.ArtifactCollectorFlowArgs(
artifact_list=artifact_list,
use_tsk=self.use_tsk,
ignore_interpolation_errors=True,
apply_parsers=False)
flow_id = self._launch_flow(client, 'ArtifactCollectorFlow', flow_args)
self._await_flow(client, flow_id)
collected_flow_data = self._download_files(client, flow_id)
if collected_flow_data:
print('{0!s}: Downloaded: {1:s}'.format(flow_id, collected_flow_data))
fqdn = client.data.os_info.fqdn.lower()
self.state.output.append((fqdn, collected_flow_data)) | def _process_thread(self, client) | Process a single GRR client.
Args:
client: a GRR client object. | 3.31213 | 3.226986 | 1.026385 |
threads = []
for client in self.find_clients(self.hostnames):
print(client)
thread = threading.Thread(target=self._process_thread, args=(client, ))
threads.append(thread)
thread.start()
for thread in threads:
thread.join() | def process(self) | Collect the artifacts.
Raises:
DFTimewolfError: if no artifacts specified nor resolved by platform. | 3.298447 | 3.788714 | 0.870598 |
super(GRRFileCollector, self).setup(
reason, grr_server_url, grr_username, grr_password,
approvers=approvers, verify=verify)
if files is not None:
self.files = [item.strip() for item in files.strip().split(',')]
self.hostnames = [item.strip() for item in hosts.strip().split(',')]
self.use_tsk = use_tsk | def setup(self,
hosts, files, use_tsk,
reason, grr_server_url, grr_username, grr_password, approvers=None,
verify=True) | Initializes a GRR file collector.
Args:
hosts: Comma-separated list of hostnames to launch the flow on.
files: list of file paths.
use_tsk: toggle for use_tsk flag on GRR flow.
reason: justification for GRR access.
grr_server_url: GRR server URL.
grr_username: GRR username.
grr_password: GRR password.
approvers: list of GRR approval recipients.
verify: boolean, whether to verify the GRR server's x509 certificate. | 2.245237 | 2.371803 | 0.946637 |
file_list = self.files
if not file_list:
return
print('Filefinder to collect {0:d} items'.format(len(file_list)))
flow_action = flows_pb2.FileFinderAction(
action_type=flows_pb2.FileFinderAction.DOWNLOAD)
flow_args = flows_pb2.FileFinderArgs(
paths=file_list,
action=flow_action,)
flow_id = self._launch_flow(client, 'FileFinder', flow_args)
self._await_flow(client, flow_id)
collected_flow_data = self._download_files(client, flow_id)
if collected_flow_data:
print('{0!s}: Downloaded: {1:s}'.format(flow_id, collected_flow_data))
fqdn = client.data.os_info.fqdn.lower()
self.state.output.append((fqdn, collected_flow_data)) | def _process_thread(self, client) | Process a single client.
Args:
client: GRR client object to act on. | 4.047043 | 3.997283 | 1.012449 |
super(GRRFlowCollector, self).setup(
reason, grr_server_url, grr_username, grr_password,
approvers=approvers, verify=verify)
self.flow_id = flow_id
self.host = host | def setup(self,
host, flow_id,
reason, grr_server_url, grr_username, grr_password, approvers=None,
verify=True) | Initializes a GRR flow collector.
Args:
host: hostname of machine.
flow_id: ID of GRR flow to retrieve.
reason: justification for GRR access.
grr_server_url: GRR server URL.
grr_username: GRR username.
grr_password: GRR password.
approvers: list of GRR approval recipients.
verify: boolean, whether to verify the GRR server's x509 certificate. | 2.235923 | 2.529938 | 0.883786 |
client = self._get_client_by_hostname(self.host)
self._await_flow(client, self.flow_id)
collected_flow_data = self._download_files(client, self.flow_id)
if collected_flow_data:
print('{0:s}: Downloaded: {1:s}'.format(
self.flow_id, collected_flow_data))
fqdn = client.data.os_info.fqdn.lower()
self.state.output.append((fqdn, collected_flow_data)) | def process(self) | Collect the results.
Raises:
DFTimewolfError: if no files specified | 5.254604 | 5.221612 | 1.006318 |
for disk in self.disks_to_copy:
print("Disk copy of {0:s} started...".format(disk.name))
snapshot = disk.snapshot()
new_disk = self.analysis_project.create_disk_from_snapshot(
snapshot, disk_name_prefix="incident" + self.incident_id)
self.analysis_vm.attach_disk(new_disk)
snapshot.delete()
print("Disk {0:s} successfully copied to {1:s}".format(
disk.name, new_disk.name))
self.state.output.append((self.analysis_vm.name, new_disk)) | def process(self) | Copy a disk to the analysis project. | 4.31725 | 3.59875 | 1.199653 |
def setup(self, timezone=None): # pylint: disable=arguments-differ
self._timezone = timezone
self._output_path = tempfile.mkdtemp() | Sets up the _timezone attribute.
Args:
timezone: Timezone name (optional) | null | null | null |
|
for description, path in self.state.input:
log_file_path = os.path.join(self._output_path, 'plaso.log')
print('Log file: {0:s}'.format(log_file_path))
# Build the plaso command line.
cmd = ['log2timeline.py']
# Since we might be running alongside another Module, always disable
# the status view.
cmd.extend(['-q', '--status_view', 'none'])
if self._timezone:
cmd.extend(['-z', self._timezone])
# Analyze all available partitions.
cmd.extend(['--partition', 'all'])
# Setup logging.
cmd.extend(['--logfile', log_file_path])
# And now, the crux of the command.
# Generate a new storage file for each plaso run
plaso_storage_file_path = os.path.join(
self._output_path, '{0:s}.plaso'.format(uuid.uuid4().hex))
cmd.extend([plaso_storage_file_path, path])
# Run the l2t command
full_cmd = ' '.join(cmd)
print('Running external command: "{0:s}"'.format(full_cmd))
try:
l2t_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, error = l2t_proc.communicate()
l2t_status = l2t_proc.wait()
if l2t_status:
# self.console_out.StdErr(errors)
message = ('The log2timeline command {0:s} failed: {1:s}.'
' Check log file for details.').format(full_cmd, error)
self.state.add_error(message, critical=True)
self.state.output.append((description, plaso_storage_file_path))
except OSError as exception:
self.state.add_error(exception, critical=True)
# Catch all remaining errors since we want to gracefully report them
except Exception as exception: # pylint: disable=broad-except
self.state.add_error(exception, critical=True) | def process(self) | Execute the Plaso process. | 3.539784 | 3.400314 | 1.041017 |
grr_auth = (grr_username, grr_password)
self.approvers = []
if approvers:
self.approvers = [item.strip() for item in approvers.strip().split(',')]
self.grr_api = grr_api.InitHttp(api_endpoint=grr_server_url,
auth=grr_auth,
verify=verify)
self.output_path = tempfile.mkdtemp()
self.reason = reason | def setup(self, reason, grr_server_url, grr_username, grr_password,
approvers=None, verify=True) | Initializes a GRR hunt result collector.
Args:
reason: justification for GRR access.
grr_server_url: GRR server URL.
grr_username: GRR username.
grr_password: GRR password.
approvers: list of GRR approval recipients.
verify: boolean, whether to verify the GRR server's x509 certificate. | 2.765352 | 3.21432 | 0.860323 |
approval_sent = False
while True:
try:
return grr_function(*args, **kwargs)
except grr_errors.AccessForbiddenError as exception:
print('No valid approval found: {0!s}'.format(exception))
# If approval was already sent, just wait a bit more.
if approval_sent:
print('Approval not yet granted, waiting {0:d}s'.format(
self._CHECK_APPROVAL_INTERVAL_SEC))
time.sleep(self._CHECK_APPROVAL_INTERVAL_SEC)
continue
# If no approvers were specified, abort.
if not self.approvers:
message = ('GRR needs approval but no approvers specified '
'(hint: use --approvers)')
self.state.add_error(message, critical=True)
return None
# Otherwise, send a request for approval
grr_object.CreateApproval(
reason=self.reason, notified_users=self.approvers)
approval_sent = True
print('{0!s}: approval request sent to: {1!s} (reason: {2:s})'.format(
grr_object, self.approvers, self.reason)) | def _check_approval_wrapper(self, grr_object, grr_function, *args, **kwargs) | Wraps a call to GRR functions checking for approval.
Args:
grr_object: the GRR object to create the eventual approval on.
grr_function: The GRR function requiring approval.
*args: Positional arguments that are to be passed to `grr_function`.
**kwargs: Keyword arguments that are to be passed to `grr_function`.
Returns:
The return value of the execution of grr_function(*args, **kwargs). | 3.382266 | 3.575059 | 0.946073 |
if isinstance(node, nodes.reference) and 'refuri' in node:
r = node['refuri']
if r.endswith('.md'):
r = r[:-3] + '.html'
node['refuri'] = r
else:
match = anchor_regex.match(r)
if match:
node['refuri'] = '{0:s}.html#{1:s}'.format(
match.group('uri'), match.group('anchor'))
return node | def find_replace(self, node) | Parses URIs containing .md and replaces them with their HTML page. | 2.983513 | 2.733358 | 1.091519 |
self.find_replace(node)
for c in node.children:
self.traverse(c) | def traverse(self, node) | Traverse the document tree rooted at node.
node : docutil node
current root node to traverse | 6.9217 | 10.174533 | 0.680297 |
session = requests.Session()
session.verify = False # Depending on SSL cert is verifiable
try:
response = session.get(self.host_url)
except requests.exceptions.ConnectionError:
return False
# Get the CSRF token from the response
soup = BeautifulSoup(response.text, 'html.parser')
csrf_token = soup.find('input', dict(name='csrf_token'))['value']
login_data = dict(username=username, password=password)
session.headers.update({
'x-csrftoken': csrf_token,
'referer': self.host_url
})
_ = session.post('{0:s}/login/'.format(self.host_url), data=login_data)
return session | def _create_session(self, username, password) | Create HTTP session.
Args:
username (str): Timesketch username
password (str): Timesketch password
Returns:
requests.Session: Session object. | 2.829787 | 2.933803 | 0.964546 |
resource_url = '{0:s}/sketches/'.format(self.api_base_url)
form_data = {'name': name, 'description': description}
response = self.session.post(resource_url, json=form_data)
response_dict = response.json()
sketch_id = response_dict['objects'][0]['id']
return sketch_id | def create_sketch(self, name, description) | Create a new sketch with the specified name and description.
Args:
name (str): Title of sketch
description (str): Description of sketch
Returns:
int: ID of created sketch | 2.48702 | 2.644213 | 0.940552 |
resource_url = '{0:s}/upload/'.format(self.api_base_url)
files = {'file': open(plaso_storage_path, 'rb')}
data = {'name': timeline_name}
response = self.session.post(resource_url, files=files, data=data)
try:
response_dict = response.json()
except ValueError:
raise RuntimeError(
'Could not decode JSON response from Timesketch'
' (Status {0:d}):\n{1:s}'.format(
response.status_code, response.content))
index_id = response_dict['objects'][0]['id']
return index_id | def upload_timeline(self, timeline_name, plaso_storage_path) | Create a timeline with the specified name from the given plaso file.
Args:
timeline_name (str): Name of timeline
plaso_storage_path (str): Local path of plaso file to be uploaded
Returns:
int: ID of uploaded timeline
Raises:
RuntimeError: When the JSON response from Timesketch cannot be decoded. | 2.551878 | 2.533663 | 1.007189 |
# Export processed timeline(s)
for timeline_name, artifact_path in processed_artifacts:
print('Uploading {0:s} to timeline {1:s}'.format(
artifact_path, timeline_name))
new_timeline_id = self.upload_timeline(timeline_name, artifact_path)
self.add_timeline_to_sketch(sketch_id, new_timeline_id)
return sketch_id | def export_artifacts(self, processed_artifacts, sketch_id) | Upload provided artifacts to specified, or new if non-existent, sketch.
Args:
processed_artifacts: List of (timeline_name, artifact_path) tuples
sketch_id: ID of sketch to append the timeline to
Returns:
int: ID of sketch. | 4.404483 | 2.690451 | 1.63708 |
resource_url = '{0:s}/sketches/{1:d}/timelines/'.format(
self.api_base_url, sketch_id)
form_data = {'timeline': [index_id]}
self.session.post(resource_url, json=form_data) | def add_timeline_to_sketch(self, sketch_id, index_id) | Associate the specified timeline and sketch.
Args:
sketch_id (int): ID of sketch
index_id (int): ID of timeline to add to sketch | 3.389595 | 3.704189 | 0.915071 |
resource_url = '{0:s}/sketches/{1:d}/'.format(self.api_base_url, sketch_id)
response = self.session.get(resource_url)
response_dict = response.json()
try:
response_dict['objects']
except KeyError:
raise ValueError('Sketch does not exist or you have no access')
return response_dict | def get_sketch(self, sketch_id) | Get information on the specified sketch.
Args:
sketch_id (int): ID of sketch
Returns:
dict: Dictionary of sketch information
Raises:
ValueError: Sketch is inaccessible | 3.200015 | 3.212234 | 0.996196 |
for attribute_name, attribute_value in attributes.items():
# Not using startswith to improve performance.
if attribute_name[0] == '_':
continue
setattr(self, attribute_name, attribute_value) | def copy_from_dict(self, attributes) | Copies the attribute container from a dictionary.
Args:
attributes (dict[str, object]): attribute values per name. | 4.125113 | 4.394165 | 0.938771 |
attribute_names = []
for attribute_name in iter(self.__dict__.keys()):
# Not using startswith to improve performance.
if attribute_name[0] == '_':
continue
attribute_names.append(attribute_name)
return attribute_names | def get_attribute_names(self) | Retrieves the names of all attributes.
Returns:
list[str]: attribute names. | 4.342344 | 4.278827 | 1.014845 |
for attribute_name, attribute_value in iter(self.__dict__.items()):
# Not using startswith to improve performance.
if attribute_name[0] == '_' or attribute_value is None:
continue
yield attribute_name, attribute_value | def get_attributes(self) | Retrieves the attribute names and values.
Attributes that are set to None are ignored.
Yields:
tuple[str, object]: attribute name and value. | 4.977164 | 4.256918 | 1.169194 |
attributes = []
for attribute_name, attribute_value in sorted(self.__dict__.items()):
# Not using startswith to improve performance.
if attribute_name[0] == '_' or attribute_value is None:
continue
if isinstance(attribute_value, dict):
attribute_value = sorted(attribute_value.items())
elif isinstance(attribute_value, six.binary_type):
attribute_value = repr(attribute_value)
attribute_string = '{0:s}: {1!s}'.format(attribute_name, attribute_value)
attributes.append(attribute_string)
return ', '.join(attributes) | def get_attribute_values_string(self) | Retrieves a comparable string of the attribute values.
Returns:
str: comparable string of the attribute values. | 2.665066 | 2.61463 | 1.01929 |
def setup(self, keywords=None): # pylint: disable=arguments-differ
self._keywords = keywords
self._output_path = tempfile.mkdtemp() | Sets up the _keywords attribute.
Args:
keywords: pipe separated list of keyword to search | null | null | null |
|
for _, path in self.state.input:
log_file_path = os.path.join(self._output_path, 'grepper.log')
print('Log file: {0:s}'.format(log_file_path))
print('Walking through dir (absolute) = ' + os.path.abspath(path))
try:
for root, _, files in os.walk(path):
for filename in files:
found = set()
fullpath = '{0:s}/{1:s}'.format(os.path.abspath(root), filename)
if mimetypes.guess_type(filename)[0] == 'application/pdf':
found = self.grepPDF(fullpath)
else:
with open(fullpath, 'r') as fp:
for line in fp:
found.update(set(x.lower() for x in re.findall(
self._keywords, line, re.IGNORECASE)))
if [item for item in found if item]:
output = '{0:s}/{1:s}:{2:s}'.format(path, filename, ','.join(
filter(None, found)))
if self._final_output:
self._final_output += '\n' + output
else:
self._final_output = output
print(output)
except OSError as exception:
self.state.add_error(exception, critical=True)
return
# Catch all remaining errors since we want to gracefully report them
except Exception as exception: # pylint: disable=broad-except
self.state.add_error(exception, critical=True)
return | def process(self) | Execute the grep command | 3.505843 | 3.416236 | 1.02623 |
with open(path, 'rb') as pdf_file_obj:
match = set()
text = ''
pdf_reader = PyPDF2.PdfFileReader(pdf_file_obj)
pages = pdf_reader.numPages
for page in range(pages):
page_obj = pdf_reader.getPage(page)
text += '\n' + page_obj.extractText()
match.update(set(x.lower() for x in re.findall(
self._keywords, text, re.IGNORECASE)))
return match | def grepPDF(self, path) | Parse PDF files text content for keywords.
Args:
path: PDF file path.
Returns:
match: set of unique occurrences of every match. | 2.54913 | 2.189927 | 1.164025 |
self.recipe = recipe
for module_description in recipe['modules']:
# Combine CLI args with args from the recipe description
module_name = module_description['name']
module = self.config.get_module(module_name)(self)
self._module_pool[module_name] = module | def load_recipe(self, recipe) | Populates the internal module pool with modules declared in a recipe.
Args:
recipe: Dict, recipe declaring modules to load. | 6.59587 | 5.648457 | 1.167729 |
with self._store_lock:
self.store.setdefault(container.CONTAINER_TYPE, []).append(container) | def store_container(self, container) | Thread-safe method to store data in the state's store.
Args:
container (containers.interface.AttributeContainer): The data to store. | 6.575713 | 6.017489 | 1.092767 |
with self._store_lock:
return self.store.get(container_class.CONTAINER_TYPE, []) | def get_containers(self, container_class) | Thread-safe method to retrieve data from the state's store.
Args:
container_class: AttributeContainer class used to filter data.
Returns:
A list of AttributeContainer objects of matching CONTAINER_TYPE. | 8.145687 | 7.672685 | 1.061648 |
def _setup_module_thread(module_description):
new_args = utils.import_args_from_dict(
module_description['args'], vars(args), self.config)
module = self._module_pool[module_description['name']]
try:
module.setup(**new_args)
except Exception as error: # pylint: disable=broad-except
self.add_error(
'An unknown error occurred: {0!s}\nFull traceback:\n{1:s}'.format(
error, traceback.format_exc()),
critical=True)
self.events[module_description['name']] = threading.Event()
self.cleanup()
threads = []
for module_description in self.recipe['modules']:
t = threading.Thread(
target=_setup_module_thread,
args=(module_description, )
)
threads.append(t)
t.start()
for t in threads:
t.join()
self.check_errors(is_global=True) | def setup_modules(self, args) | Performs setup tasks for each module in the module pool.
Threads declared modules' setup() functions. Takes CLI arguments into
account when replacing recipe parameters for each module.
Args:
args: Command line arguments that will be used to replace the parameters
declared in the recipe. | 3.686259 | 3.340845 | 1.103391 |
def _run_module_thread(module_description):
for blocker in module_description['wants']:
self.events[blocker].wait()
module = self._module_pool[module_description['name']]
try:
module.process()
except DFTimewolfError as error:
self.add_error(error.message, critical=True)
except Exception as error: # pylint: disable=broad-except
self.add_error(
'An unknown error occurred: {0!s}\nFull traceback:\n{1:s}'.format(
error, traceback.format_exc()),
critical=True)
print('Module {0:s} completed'.format(module_description['name']))
self.events[module_description['name']].set()
self.cleanup()
threads = []
for module_description in self.recipe['modules']:
t = threading.Thread(
target=_run_module_thread,
args=(module_description, )
)
threads.append(t)
t.start()
for t in threads:
t.join()
self.check_errors(is_global=True) | def run_modules(self) | Performs the actual processing for each module in the module pool. | 3.182076 | 3.048031 | 1.043978 |
self.errors.append((error, critical)) | def add_error(self, error, critical=False) | Adds an error to the state.
Args:
error: The text that will be added to the error list.
critical: If set to True and the error is checked with check_errors, will
dfTimewolf will abort. | 8.028227 | 11.115714 | 0.722241 |
# Move any existing errors to global errors
self.global_errors.extend(self.errors)
self.errors = []
# Make the previous module's output available to the next module
self.input = self.output
self.output = [] | def cleanup(self) | Basic cleanup after modules.
The state's output becomes the input for the next stage. Any errors are
moved to the global_errors attribute so that they can be reported at a
later stage. | 7.555292 | 4.362848 | 1.731734 |
errors = self.global_errors if is_global else self.errors
if errors:
print('dfTimewolf encountered one or more errors:')
for error, critical in errors:
print('{0:s} {1:s}'.format('CRITICAL: ' if critical else '', error))
if critical:
print('Critical error found. Aborting.')
sys.exit(-1) | def check_errors(self, is_global=False) | Checks for errors and exits if any of them are critical.
Args:
is_global: If True, check the global_errors attribute. If false, check the
error attribute. | 4.729502 | 4.437778 | 1.065737 |
# TODO: Consider the case when multiple disks are provided by the previous
# module or by the CLI.
if project is None or turbinia_zone is None:
self.state.add_error(
'project or turbinia_zone are not all specified, bailing out',
critical=True)
return
self.disk_name = disk_name
self.project = project
self.turbinia_zone = turbinia_zone
try:
turbinia_config.LoadConfig()
self.turbinia_region = turbinia_config.TURBINIA_REGION
self.instance = turbinia_config.PUBSUB_TOPIC
if turbinia_config.PROJECT != self.project:
self.state.add_error(
'Specified project {0:s} does not match Turbinia configured '
'project {1:s}. Use gcp_turbinia_import recipe to copy the disk '
'into the same project.'.format(
self.project, turbinia_config.PROJECT), critical=True)
return
self._output_path = tempfile.mkdtemp()
self.client = turbinia_client.TurbiniaClient()
except TurbiniaException as e:
self.state.add_error(e, critical=True)
return | def setup(self, disk_name, project, turbinia_zone) | Sets up the object attributes.
Args:
disk_name (string): Name of the disk to process
project (string): The project containing the disk to process
turbinia_zone (string): The zone containing the disk to process | 3.275855 | 3.360213 | 0.974895 |
print(' {0:s} ({1:s})'.format(task['name'], task['id']))
paths = task.get('saved_paths', [])
if not paths:
return
for path in paths:
if path.endswith('worker-log.txt'):
continue
if path.endswith('{0:s}.log'.format(task.get('id'))):
continue
if path.startswith('/'):
continue
print(' ' + path) | def _print_task_data(self, task) | Pretty-prints task data.
Args:
task: Task dict generated by Turbinia. | 4.048756 | 3.903944 | 1.037094 |
total_completed = 0
while True:
task_results = self.client.get_task_data(
instance, project, region, request_id=request_id, user=user)
tasks = {task['id']: task for task in task_results}
completed_tasks = set()
pending_tasks = set()
for task in tasks.values():
if task.get('successful') is not None:
completed_tasks.add(task['id'])
else:
pending_tasks.add(task['id'])
if len(completed_tasks) > total_completed or not completed_tasks:
total_completed = len(completed_tasks)
print('Task status update (completed: {0:d} | pending: {1:d})'.format(
len(completed_tasks), len(pending_tasks)))
print('Completed tasks:')
for task_id in completed_tasks:
self._print_task_data(tasks[task_id])
print('Pending tasks:')
for task_id in pending_tasks:
self._print_task_data(tasks[task_id])
if len(completed_tasks) == len(task_results) and completed_tasks:
print('All {0:d} Tasks completed'.format(len(task_results)))
return
time.sleep(poll_interval) | def display_task_progress(
self, instance, project, region, request_id=None, user=None,
poll_interval=60) | Displays the overall progress of tasks in a Turbinia job.
Args:
instance (string): The name of the Turbinia instance
project (string): The project containing the disk to process
region (string): Region where turbinia is configured.
request_id (string): The request ID provided by Turbinia.
user (string): The username to filter tasks by.
poll_interval (int): The interval at which to poll for new results. | 2.135838 | 2.244981 | 0.951384 |
help_text = '\nAvailable recipes:\n\n'
recipes = config.Config.get_registered_recipes()
for contents, _, _ in sorted(recipes, key=lambda k: k[0]['name']):
help_text += ' {0:<35s}{1:s}\n'.format(
contents['name'], contents.get('short_description', 'No description'))
return help_text | def generate_help() | Generates help text with alphabetically sorted recipes. | 5.109621 | 4.444374 | 1.149683 |
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=generate_help())
subparsers = parser.add_subparsers()
for registered_recipe in config.Config.get_registered_recipes():
recipe, recipe_args, documentation = registered_recipe
subparser = subparsers.add_parser(
recipe['name'],
formatter_class=utils.DFTimewolfFormatterClass,
description='{0:s}'.format(documentation))
subparser.set_defaults(recipe=recipe)
for switch, help_text, default in recipe_args:
subparser.add_argument(switch, help=help_text, default=default)
# Override recipe defaults with those specified in Config
# so that they can in turn be overridden in the commandline
subparser.set_defaults(**config.Config.get_extra())
args = parser.parse_args()
recipe = args.recipe
state = DFTimewolfState(config.Config)
print('Loading recipes...')
state.load_recipe(recipe)
print('Loaded recipe {0:s} with {1:d} modules'.format(
recipe['name'], len(recipe['modules'])))
print('Setting up modules...')
state.setup_modules(args)
print('Modules successfully set up!')
print('Running modules...')
state.run_modules()
print('Recipe {0:s} executed successfully.'.format(recipe['name'])) | def main() | Main function for DFTimewolf. | 3.400961 | 3.284943 | 1.035318 |
self.timesketch_api = timesketch_utils.TimesketchApiClient(
endpoint, username, password)
self.incident_id = None
self.sketch_id = int(sketch_id) if sketch_id else None
# Check that we have a timesketch session
if not self.timesketch_api.session:
message = 'Could not connect to Timesketch server at ' + endpoint
self.state.add_error(message, critical=True)
return
if not self.sketch_id: # No sketch id is provided, create it
if incident_id:
sketch_name = 'Sketch for incident ID: ' + incident_id
else:
sketch_name = 'Untitled sketch'
sketch_description = 'Sketch generated by dfTimewolf'
self.sketch_id = self.timesketch_api.create_sketch(
sketch_name, sketch_description)
print('Sketch {0:d} created'.format(self.sketch_id)) | def setup(self, # pylint: disable=arguments-differ
endpoint=None,
username=None,
password=None,
incident_id=None,
sketch_id=None) | Setup a connection to a Timesketch server and create a sketch if needed.
Args:
endpoint: str, Timesketch endpoint (e.g. http://timesketch.com/)
username: str, Username to authenticate against the Timesketch endpoint.
password: str, Password to authenticate against the Timesketch endpoint.
incident_id: str, Incident ID or reference. Used in sketch description.
sketch_id: int, Sketch ID to add the resulting timeline to. If not
provided, a new sketch is created. | 3.175184 | 2.983594 | 1.064215 |
# This is not the best way of catching errors, but timesketch_utils will be
# deprecated soon.
# TODO(tomchop): Consider using the official Timesketch python API.
if not self.timesketch_api.session:
message = 'Could not connect to Timesketch server'
self.state.add_error(message, critical=True)
named_timelines = []
for description, path in self.state.input:
if not description:
description = 'untitled timeline for '+path
named_timelines.append((description, path))
try:
self.timesketch_api.export_artifacts(named_timelines, self.sketch_id)
except RuntimeError as e:
self.state.add_error(
'Error occurred while working with Timesketch: {0:s}'.format(str(e)),
critical=True)
return
sketch_url = self.timesketch_api.get_sketch_url(self.sketch_id)
print('Your Timesketch URL is: {0:s}'.format(sketch_url))
self.state.output = sketch_url | def process(self) | Executes a Timesketch export. | 4.706162 | 4.19876 | 1.120846 |
self._target_directory = tempfile.mkdtemp()
elif not os.path.exists(target_directory):
try:
os.makedirs(target_directory)
except OSError as exception:
message = 'An unknown error occurred: {0!s}'.format(exception)
self.state.add_error(message, critical=True) | def setup(self, target_directory=None): # pylint: disable=arguments-differ
self._target_directory = target_directory
if not target_directory | Sets up the _target_directory attribute.
Args:
target_directory: Directory in which collected files will be dumped. | 3.321427 | 3.409326 | 0.974218 |
for _, path in self.state.input:
self._copy_file_or_directory(path, self._target_directory)
print('{0:s} -> {1:s}'.format(path, self._target_directory)) | def process(self) | Checks whether the paths exists and updates the state accordingly. | 6.632301 | 5.520376 | 1.201422 |
if os.path.isdir(source):
for item in os.listdir(source):
full_source = os.path.join(source, item)
full_destination = os.path.join(destination_directory, item)
shutil.copytree(full_source, full_destination)
else:
shutil.copy2(source, destination_directory) | def _copy_file_or_directory(self, source, destination_directory) | Recursively copies files from source to destination_directory.
Args:
source: source file or directory to copy into destination_directory
destination_directory: destination directory in which to copy source | 1.706202 | 1.824245 | 0.935292 |
if isinstance(value, six.string_types):
for match in TOKEN_REGEX.finditer(str(value)):
token = match.group(1)
if token in args:
actual_param = args[token]
if isinstance(actual_param, six.string_types):
value = value.replace("@"+token, args[token])
else:
value = actual_param
elif isinstance(value, list):
return [import_args_from_dict(item, args, config) for item in value]
elif isinstance(value, dict):
return {
key: import_args_from_dict(val, args, config)
for key, val in value.items()
}
elif isinstance(value, tuple):
return tuple(import_args_from_dict(val, args, config) for val in value)
return value | def import_args_from_dict(value, args, config) | Replaces some arguments by those specified by a key-value dictionary.
This function will be recursively called on a dictionary looking for any
value containing a "$" variable. If found, the value will be replaced
by the attribute in "args" of the same name.
It is used to load arguments from the CLI and any extra configuration
parameters passed in recipes.
Args:
value: The value of a {key: value} dictionary. This is passed recursively
and may change in nature: string, list, or dict. The top-level variable
should be the dictionary that is supposed to be recursively traversed.
args: A {key: value} dictionary used to do replacements.
config: A dftimewolf.Config class containing configuration information
Returns:
The first caller of the function will receive a dictionary in which strings
starting with "@" are replaced by the parameters in args. | 1.997655 | 1.9913 | 1.003192 |
if isinstance(value, six.string_types):
if TOKEN_REGEX.search(value):
raise ValueError('{0:s} must be replaced in dictionary'.format(value))
elif isinstance(value, list):
return [check_placeholders(item) for item in value]
elif isinstance(value, dict):
return {key: check_placeholders(val) for key, val in value.items()}
elif isinstance(value, tuple):
return tuple(check_placeholders(val) for val in value)
return value | def check_placeholders(value) | Checks if any values in a given dictionary still contain @ parameters.
Args:
value: Dictionary, list, or string that will be recursively checked for
placeholders
Raises:
ValueError: There still exists a value with an @ parameter.
Returns:
Top-level caller: a modified dict with replaced tokens.
Recursive caller: a modified object with replaced tokens. | 2.394731 | 2.438771 | 0.981942 |
reg = re.compile(r'__version__ = [\'"]([^\'"]*)[\'"]')
with open('requests_kerberos/__init__.py') as fd:
matches = list(filter(lambda x: x, map(reg.match, fd)))
if not matches:
raise RuntimeError(
'Could not find the version information for requests_kerberos'
)
return matches[0].group(1) | def get_version() | Simple function to extract the current version using regular expressions. | 2.985341 | 2.896962 | 1.030508 |
if hasattr(_negotiate_value, 'regex'):
regex = _negotiate_value.regex
else:
# There's no need to re-compile this EVERY time it is called. Compile
# it once and you won't have the performance hit of the compilation.
regex = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I)
_negotiate_value.regex = regex
authreq = response.headers.get('www-authenticate', None)
if authreq:
match_obj = regex.search(authreq)
if match_obj:
return match_obj.group(1)
return None | def _negotiate_value(response) | Extracts the gssapi authentication token from the appropriate header | 4.714813 | 4.118469 | 1.144797 |
application_data = None
raw_response = response.raw
if isinstance(raw_response, HTTPResponse):
try:
if sys.version_info > (3, 0):
socket = raw_response._fp.fp.raw._sock
else:
socket = raw_response._fp.fp._sock
except AttributeError:
warnings.warn("Failed to get raw socket for CBT; has urllib3 impl changed",
NoCertificateRetrievedWarning)
else:
try:
server_certificate = socket.getpeercert(True)
except AttributeError:
pass
else:
certificate_hash = _get_certificate_hash(server_certificate)
application_data = b'tls-server-end-point:' + certificate_hash
else:
warnings.warn(
"Requests is running with a non urllib3 backend, cannot retrieve server certificate for CBT",
NoCertificateRetrievedWarning)
return application_data | def _get_channel_bindings_application_data(response) | https://tools.ietf.org/html/rfc5929 4. The 'tls-server-end-point' Channel Binding Type
Gets the application_data value for the 'tls-server-end-point' CBT Type.
This is ultimately the SHA256 hash of the certificate of the HTTPS endpoint
appended onto tls-server-end-point. This value is then passed along to the
kerberos library to bind to the auth response. If the socket is not an SSL
socket or the raw HTTP object is not a urllib3 HTTPResponse then None will
be returned and the Kerberos auth will use GSS_C_NO_CHANNEL_BINDINGS
:param response: The original 401 response from the server
:return: byte string used on the application_data.value field on the CBT struct | 4.429668 | 3.667056 | 1.207963 |
# Flags used by kerberos module.
gssflags = kerberos.GSS_C_MUTUAL_FLAG | kerberos.GSS_C_SEQUENCE_FLAG
if self.delegate:
gssflags |= kerberos.GSS_C_DELEG_FLAG
try:
kerb_stage = "authGSSClientInit()"
# contexts still need to be stored by host, but hostname_override
# allows use of an arbitrary hostname for the kerberos exchange
# (eg, in cases of aliased hosts, internal vs external, CNAMEs
# w/ name-based HTTP hosting)
kerb_host = self.hostname_override if self.hostname_override is not None else host
kerb_spn = "{0}@{1}".format(self.service, kerb_host)
result, self.context[host] = kerberos.authGSSClientInit(kerb_spn,
gssflags=gssflags, principal=self.principal)
if result < 1:
raise EnvironmentError(result, kerb_stage)
# if we have a previous response from the server, use it to continue
# the auth process, otherwise use an empty value
negotiate_resp_value = '' if is_preemptive else _negotiate_value(response)
kerb_stage = "authGSSClientStep()"
# If this is set pass along the struct to Kerberos
if self.cbt_struct:
result = kerberos.authGSSClientStep(self.context[host],
negotiate_resp_value,
channel_bindings=self.cbt_struct)
else:
result = kerberos.authGSSClientStep(self.context[host],
negotiate_resp_value)
if result < 0:
raise EnvironmentError(result, kerb_stage)
kerb_stage = "authGSSClientResponse()"
gss_response = kerberos.authGSSClientResponse(self.context[host])
return "Negotiate {0}".format(gss_response)
except kerberos.GSSError as error:
log.exception(
"generate_request_header(): {0} failed:".format(kerb_stage))
log.exception(error)
raise KerberosExchangeError("%s failed: %s" % (kerb_stage, str(error.args)))
except EnvironmentError as error:
# ensure we raised this for translation to KerberosExchangeError
# by comparing errno to result, re-raise if not
if error.errno != result:
raise
message = "{0} failed, result: {1}".format(kerb_stage, result)
log.error("generate_request_header(): {0}".format(message))
raise KerberosExchangeError(message) | def generate_request_header(self, response, host, is_preemptive=False) | Generates the GSSAPI authentication token with kerberos.
If any GSSAPI step fails, raise KerberosExchangeError
with failure detail. | 4.134843 | 3.991853 | 1.035821 |
host = urlparse(response.url).hostname
try:
auth_header = self.generate_request_header(response, host)
except KerberosExchangeError:
# GSS Failure, return existing response
return response
log.debug("authenticate_user(): Authorization header: {0}".format(
auth_header))
response.request.headers['Authorization'] = auth_header
# Consume the content so we can reuse the connection for the next
# request.
response.content
response.raw.release_conn()
_r = response.connection.send(response.request, **kwargs)
_r.history.append(response)
log.debug("authenticate_user(): returning {0}".format(_r))
return _r | def authenticate_user(self, response, **kwargs) | Handles user authentication with gssapi/kerberos | 4.507835 | 4.152589 | 1.085548 |
log.debug("handle_401(): Handling: 401")
if _negotiate_value(response) is not None:
_r = self.authenticate_user(response, **kwargs)
log.debug("handle_401(): returning {0}".format(_r))
return _r
else:
log.debug("handle_401(): Kerberos is not supported")
log.debug("handle_401(): returning {0}".format(response))
return response | def handle_401(self, response, **kwargs) | Handles 401's, attempts to use gssapi/kerberos authentication | 3.689163 | 3.47603 | 1.061315 |
log.debug("handle_other(): Handling: %d" % response.status_code)
if self.mutual_authentication in (REQUIRED, OPTIONAL) and not self.auth_done:
is_http_error = response.status_code >= 400
if _negotiate_value(response) is not None:
log.debug("handle_other(): Authenticating the server")
if not self.authenticate_server(response):
# Mutual authentication failure when mutual auth is wanted,
# raise an exception so the user doesn't use an untrusted
# response.
log.error("handle_other(): Mutual authentication failed")
raise MutualAuthenticationError("Unable to authenticate "
"{0}".format(response))
# Authentication successful
log.debug("handle_other(): returning {0}".format(response))
self.auth_done = True
return response
elif is_http_error or self.mutual_authentication == OPTIONAL:
if not response.ok:
log.error("handle_other(): Mutual authentication unavailable "
"on {0} response".format(response.status_code))
if(self.mutual_authentication == REQUIRED and
self.sanitize_mutual_error_response):
return SanitizedResponse(response)
else:
return response
else:
# Unable to attempt mutual authentication when mutual auth is
# required, raise an exception so the user doesn't use an
# untrusted response.
log.error("handle_other(): Mutual authentication failed")
raise MutualAuthenticationError("Unable to authenticate "
"{0}".format(response))
else:
log.debug("handle_other(): returning {0}".format(response))
return response | def handle_other(self, response) | Handles all responses with the exception of 401s.
This is necessary so that we can authenticate responses if requested | 3.396447 | 3.316303 | 1.024166 |
log.debug("authenticate_server(): Authenticate header: {0}".format(
_negotiate_value(response)))
host = urlparse(response.url).hostname
try:
# If this is set pass along the struct to Kerberos
if self.cbt_struct:
result = kerberos.authGSSClientStep(self.context[host],
_negotiate_value(response),
channel_bindings=self.cbt_struct)
else:
result = kerberos.authGSSClientStep(self.context[host],
_negotiate_value(response))
except kerberos.GSSError:
log.exception("authenticate_server(): authGSSClientStep() failed:")
return False
if result < 1:
log.error("authenticate_server(): authGSSClientStep() failed: "
"{0}".format(result))
return False
log.debug("authenticate_server(): returning {0}".format(response))
return True | def authenticate_server(self, response) | Uses GSSAPI to authenticate the server.
Returns True on success, False on failure. | 3.695607 | 3.536307 | 1.045047 |
num_401s = kwargs.pop('num_401s', 0)
# Check if we have already tried to get the CBT data value
if not self.cbt_binding_tried and self.send_cbt:
# If we haven't tried, try getting it now
cbt_application_data = _get_channel_bindings_application_data(response)
if cbt_application_data:
# Only the latest version of pykerberos has this method available
try:
self.cbt_struct = kerberos.channelBindings(application_data=cbt_application_data)
except AttributeError:
# Using older version set to None
self.cbt_struct = None
# Regardless of the result, set tried to True so we don't waste time next time
self.cbt_binding_tried = True
if self.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
response.request.body.seek(self.pos)
if response.status_code == 401 and num_401s < 2:
# 401 Unauthorized. Handle it, and if it still comes back as 401,
# that means authentication failed.
_r = self.handle_401(response, **kwargs)
log.debug("handle_response(): returning %s", _r)
log.debug("handle_response() has seen %d 401 responses", num_401s)
num_401s += 1
return self.handle_response(_r, num_401s=num_401s, **kwargs)
elif response.status_code == 401 and num_401s >= 2:
# Still receiving 401 responses after attempting to handle them.
# Authentication has failed. Return the 401 response.
log.debug("handle_response(): returning 401 %s", response)
return response
else:
_r = self.handle_other(response)
log.debug("handle_response(): returning %s", _r)
return _r | def handle_response(self, response, **kwargs) | Takes the given response and tries kerberos-auth, as needed. | 4.009507 | 3.900953 | 1.027827 |
return "&".join("%s=%s" % (
k, urllib_quote(
unicode(query[k]).encode('utf-8'), safe='~'))
for k in sorted(query)) | def _quote_query(query) | Turn a dictionary into a query string in a URL, with keys
in alphabetical order. | 4.13801 | 3.928494 | 1.053332 |
query = {
'Operation': self.Operation,
'Service': "AWSECommerceService",
'Timestamp': time.strftime(
"%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
'Version': self.Version,
}
query.update(kwargs)
query['AWSAccessKeyId'] = self.AWSAccessKeyId
query['Timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%SZ",
time.gmtime())
if self.AssociateTag:
query['AssociateTag'] = self.AssociateTag
service_domain = SERVICE_DOMAINS[self.Region][0]
quoted_strings = _quote_query(query)
data = "GET\n" + service_domain + "\n/onca/xml\n" + quoted_strings
# convert unicode to UTF8 bytes for hmac library
if type(self.AWSSecretAccessKey) is unicode:
self.AWSSecretAccessKey = self.AWSSecretAccessKey.encode('utf-8')
if type(data) is unicode:
data = data.encode('utf-8')
# calculate sha256 signature
digest = hmac.new(self.AWSSecretAccessKey, data, sha256).digest()
# base64 encode and urlencode
if sys.version_info[0] == 3:
signature = urllib.parse.quote(b64encode(digest))
else:
signature = urllib.quote(b64encode(digest))
return ("https://" + service_domain + "/onca/xml?" +
quoted_strings + "&Signature=%s" % signature) | def api_url(self, **kwargs) | The URL for making the given query against the API. | 2.44539 | 2.399665 | 1.019055 |
while True: # may retry on error
api_request = urllib2.Request(
api_url, headers={"Accept-Encoding": "gzip"})
log.debug("Amazon URL: %s" % api_url)
try:
if self.Timeout and sys.version[:3] in ["2.4", "2.5"]:
# urllib2.urlopen() doesn't accept timeout until 2.6
old_timeout = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(self.Timeout)
return urllib2.urlopen(api_request)
finally:
socket.setdefaulttimeout(old_timeout)
else:
# the simple way
return urllib2.urlopen(api_request, timeout=self.Timeout)
except:
if not self.ErrorHandler:
raise
exception = sys.exc_info()[1] # works in Python 2 and 3
err = {'exception': exception}
err.update(err_env)
if not self.ErrorHandler(err):
raise | def _call_api(self, api_url, err_env) | urlopen(), plus error handling and possible retries.
err_env is a dict of additional info passed to the error handler | 3.392009 | 3.332696 | 1.017797 |
return (self is other or
(not matchTags or self.tagSet == other.tagSet) and
(not matchConstraints or self.subtypeSpec == other.subtypeSpec)) | def isSameTypeWith(self, other, matchTags=True, matchConstraints=True) | Examine |ASN.1| type for equality with other ASN.1 type.
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
out ASN.1 types comparison.
Python class inheritance relationship is NOT considered.
Parameters
----------
other: a pyasn1 type object
Class instance representing ASN.1 type.
Returns
-------
: :class:`bool`
:class:`True` if *other* is |ASN.1| type,
:class:`False` otherwise. | 3.970457 | 3.957192 | 1.003352 |
return (not matchTags or
(self.tagSet.isSuperTagSetOf(other.tagSet)) and
(not matchConstraints or self.subtypeSpec.isSuperTypeOf(other.subtypeSpec))) | def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True) | Examine |ASN.1| type for subtype relationship with other ASN.1 type.
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
out ASN.1 types comparison.
Python class inheritance relationship is NOT considered.
Parameters
----------
other: a pyasn1 type object
Class instance representing ASN.1 type.
Returns
-------
: :class:`bool`
:class:`True` if *other* is a subtype of |ASN.1| type,
:class:`False` otherwise. | 5.195986 | 5.469256 | 0.950035 |
if value is noValue:
if not kwargs:
return self
value = self._value
initializers = self.readOnly.copy()
initializers.update(kwargs)
return self.__class__(value, **initializers) | def clone(self, value=noValue, **kwargs) | Create a modified version of |ASN.1| schema or value object.
The `clone()` method accepts the same set arguments as |ASN.1|
class takes on instantiation except that all arguments
of the `clone()` method are optional.
Whatever arguments are supplied, they are used to create a copy
of `self` taking precedence over the ones used to instantiate `self`.
Note
----
Due to the immutable nature of the |ASN.1| object, if no arguments
are supplied, no new |ASN.1| object will be created and `self` will
be returned instead. | 5.332161 | 6.94839 | 0.767395 |
if value is noValue:
if not kwargs:
return self
value = self._value
initializers = self.readOnly.copy()
implicitTag = kwargs.pop('implicitTag', None)
if implicitTag is not None:
initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
explicitTag = kwargs.pop('explicitTag', None)
if explicitTag is not None:
initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
for arg, option in kwargs.items():
initializers[arg] += option
return self.__class__(value, **initializers) | def subtype(self, value=noValue, **kwargs) | Create a specialization of |ASN.1| schema or value object.
The subtype relationship between ASN.1 types has no correlation with
subtype relationship between Python types. ASN.1 type is mainly identified
by its tag(s) (:py:class:`~pyasn1.type.tag.TagSet`) and value range
constraints (:py:class:`~pyasn1.type.constraint.ConstraintsIntersection`).
These ASN.1 type properties are implemented as |ASN.1| attributes.
The `subtype()` method accepts the same set arguments as |ASN.1|
class takes on instantiation except that all parameters
of the `subtype()` method are optional.
With the exception of the arguments described below, the rest of
supplied arguments they are used to create a copy of `self` taking
precedence over the ones used to instantiate `self`.
The following arguments to `subtype()` create a ASN.1 subtype out of
|ASN.1| type:
Other Parameters
----------------
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
Implicitly apply given ASN.1 tag object to `self`'s
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
Explicitly apply given ASN.1 tag object to `self`'s
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Add ASN.1 constraints object to one of the `self`'s, then
use the result as new object's ASN.1 constraints.
Returns
-------
:
new instance of |ASN.1| schema or value object
Note
----
Due to the immutable nature of the |ASN.1| object, if no arguments
are supplied, no new |ASN.1| object will be created and `self` will
be returned instead. | 3.528437 | 3.063816 | 1.151648 |
cloneValueFlag = kwargs.pop('cloneValueFlag', False)
initializers = self.readOnly.copy()
initializers.update(kwargs)
clone = self.__class__(**initializers)
if cloneValueFlag:
self._cloneComponentValues(clone, cloneValueFlag)
return clone | def clone(self, **kwargs) | Create a modified version of |ASN.1| schema object.
The `clone()` method accepts the same set arguments as |ASN.1|
class takes on instantiation except that all arguments
of the `clone()` method are optional.
Whatever arguments are supplied, they are used to create a copy
of `self` taking precedence over the ones used to instantiate `self`.
Possible values of `self` are never copied over thus `clone()` can
only create a new schema object.
Returns
-------
:
new instance of |ASN.1| type/value
Note
----
Due to the mutable nature of the |ASN.1| object, even if no arguments
are supplied, new |ASN.1| object will always be created as a shallow
copy of `self`. | 5.747307 | 7.08846 | 0.810798 |
initializers = self.readOnly.copy()
cloneValueFlag = kwargs.pop('cloneValueFlag', False)
implicitTag = kwargs.pop('implicitTag', None)
if implicitTag is not None:
initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
explicitTag = kwargs.pop('explicitTag', None)
if explicitTag is not None:
initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
for arg, option in kwargs.items():
initializers[arg] += option
clone = self.__class__(**initializers)
if cloneValueFlag:
self._cloneComponentValues(clone, cloneValueFlag)
return clone | def subtype(self, **kwargs) | Create a specialization of |ASN.1| schema object.
The `subtype()` method accepts the same set arguments as |ASN.1|
class takes on instantiation except that all parameters
of the `subtype()` method are optional.
With the exception of the arguments described below, the rest of
supplied arguments they are used to create a copy of `self` taking
precedence over the ones used to instantiate `self`.
The following arguments to `subtype()` create a ASN.1 subtype out of
|ASN.1| type.
Other Parameters
----------------
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
Implicitly apply given ASN.1 tag object to `self`'s
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
Explicitly apply given ASN.1 tag object to `self`'s
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Add ASN.1 constraints object to one of the `self`'s, then
use the result as new object's ASN.1 constraints.
Returns
-------
:
new instance of |ASN.1| type/value
Note
----
Due to the immutable nature of the |ASN.1| object, if no arguments
are supplied, no new |ASN.1| object will be created and `self` will
be returned instead. | 3.841818 | 2.988149 | 1.285685 |
component, asn1Spec = componentAndType
if asn1Spec is None:
asn1Spec = component
if asn1Spec.typeId == univ.Choice.typeId and not asn1Spec.tagSet:
if asn1Spec.tagSet:
return asn1Spec.tagSet
else:
return asn1Spec.componentType.minTagSet
else:
return asn1Spec.tagSet | def _componentSortKey(componentAndType) | Sort SET components by tag
Sort regardless of the Choice value (static sort) | 3.947192 | 3.911468 | 1.009133 |
component, asn1Spec = componentAndType
if asn1Spec is None:
compType = component
else:
compType = asn1Spec
if compType.typeId == univ.Choice.typeId and not compType.tagSet:
if asn1Spec is None:
return component.getComponent().tagSet
else:
# TODO: move out of sorting key function
names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
if namedType.name in component]
if len(names) != 1:
raise error.PyAsn1Error(
'%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', component))
# TODO: support nested CHOICE ordering
return asn1Spec[names[0]].tagSet
else:
return compType.tagSet | def _componentSortKey(componentAndType) | Sort SET components by tag
Sort depending on the actual Choice value (dynamic sort) | 5.236422 | 5.092148 | 1.028333 |
text = str(self)
if text.endswith('Z'):
tzinfo = TimeMixIn.UTC
text = text[:-1]
elif '-' in text or '+' in text:
if '+' in text:
text, plusminus, tz = string.partition(text, '+')
else:
text, plusminus, tz = string.partition(text, '-')
if self._shortTZ and len(tz) == 2:
tz += '00'
if len(tz) != 4:
raise error.PyAsn1Error('malformed time zone offset %s' % tz)
try:
minutes = int(tz[:2]) * 60 + int(tz[2:])
if plusminus == '-':
minutes *= -1
except ValueError:
raise error.PyAsn1Error('unknown time specification %s' % self)
tzinfo = TimeMixIn.FixedOffset(minutes, '?')
else:
tzinfo = None
if '.' in text or ',' in text:
if '.' in text:
text, _, ms = string.partition(text, '.')
else:
text, _, ms = string.partition(text, ',')
try:
ms = int(ms) * 1000
except ValueError:
raise error.PyAsn1Error('bad sub-second time specification %s' % self)
else:
ms = 0
if self._optionalMinutes and len(text) - self._yearsDigits == 6:
text += '0000'
elif len(text) - self._yearsDigits == 8:
text += '00'
try:
dt = dateandtime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
except ValueError:
raise error.PyAsn1Error('malformed datetime format %s' % self)
return dt.replace(microsecond=ms, tzinfo=tzinfo) | def asDateTime(self) | Create :py:class:`datetime.datetime` object from a |ASN.1| object.
Returns
-------
:
new instance of :py:class:`datetime.datetime` object | 2.801586 | 2.780909 | 1.007435 |
text = dt.strftime(cls._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
if cls._hasSubsecond:
text += '.%d' % (dt.microsecond // 1000)
if dt.utcoffset():
seconds = dt.utcoffset().seconds
if seconds < 0:
text += '-'
else:
text += '+'
text += '%.2d%.2d' % (seconds // 3600, seconds % 3600)
else:
text += 'Z'
return cls(text) | def fromDateTime(cls, dt) | Create |ASN.1| object from a :py:class:`datetime.datetime` object.
Parameters
----------
dt: :py:class:`datetime.datetime` object
The `datetime.datetime` object to initialize the |ASN.1| object
from
Returns
-------
:
new instance of |ASN.1| value | 2.880143 | 3.029519 | 0.950693 |
try:
return self.__namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error('Type position out of range') | def getTypeByPosition(self, idx) | Return ASN.1 type object by its position in fields set.
Parameters
----------
idx: :py:class:`int`
Field index
Returns
-------
:
ASN.1 type
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given position is out of fields range | 7.586769 | 5.586055 | 1.358162 |
try:
return self.__tagToPosMap[tagSet]
except KeyError:
raise error.PyAsn1Error('Type %s not found' % (tagSet,)) | def getPositionByType(self, tagSet) | Return field position by its ASN.1 type.
Parameters
----------
tagSet: :class:`~pysnmp.type.tag.TagSet`
ASN.1 tag set distinguishing one ASN.1 type from others.
Returns
-------
: :py:class:`int`
ASN.1 type position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes* | 5.692911 | 4.461561 | 1.275991 |
try:
return self.__namedTypes[idx].name
except IndexError:
raise error.PyAsn1Error('Type position out of range') | def getNameByPosition(self, idx) | Return field name by its position in fields set.
Parameters
----------
idx: :py:class:`idx`
Field index
Returns
-------
: :py:class:`str`
Field name
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given field name is not present in callee *NamedTypes* | 8.372937 | 5.974625 | 1.401416 |
try:
return self.__nameToPosMap[name]
except KeyError:
raise error.PyAsn1Error('Name %s not found' % (name,)) | def getPositionByName(self, name) | Return field position by filed name.
Parameters
----------
name: :py:class:`str`
Field name
Returns
-------
: :py:class:`int`
Field position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *name* is not present or not unique within callee *NamedTypes* | 5.405634 | 4.311096 | 1.253888 |
try:
return self.__ambiguousTypes[idx].tagMap
except KeyError:
raise error.PyAsn1Error('Type position out of range') | def getTagMapNearPosition(self, idx) | Return ASN.1 types that are allowed at or past given field position.
Some ASN.1 serialisation allow for skipping optional and defaulted fields.
Some constructed ASN.1 types allow reordering of the fields. When recovering
such objects it may be important to know which types can possibly be
present at any given position in the field sets.
Parameters
----------
idx: :py:class:`int`
Field index
Returns
-------
: :class:`~pyasn1.type.tagmap.TagMap`
Map if ASN.1 types allowed at given field position
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given position is out of fields range | 12.36161 | 8.534519 | 1.448425 |
try:
return idx + self.__ambiguousTypes[idx].getPositionByType(tagSet)
except KeyError:
raise error.PyAsn1Error('Type position out of range') | def getPositionNearType(self, tagSet, idx) | Return the closest field position where given ASN.1 type is allowed.
Some ASN.1 serialisation allow for skipping optional and defaulted fields.
Some constructed ASN.1 types allow reordering of the fields. When recovering
such objects it may be important to know at which field position, in field set,
given *tagSet* is allowed at or past *idx* position.
Parameters
----------
tagSet: :class:`~pyasn1.type.tag.TagSet`
ASN.1 type which field position to look up
idx: :py:class:`int`
Field position at or past which to perform ASN.1 type look up
Returns
-------
: :py:class:`int`
Field position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *tagSet* is not present or not unique within callee *NamedTypes*
or *idx* is out of fields range | 11.958713 | 9.866949 | 1.211997 |
if superTag.tagClass == tagClassUniversal:
raise error.PyAsn1Error("Can't tag with UNIVERSAL class tag")
if superTag.tagFormat != tagFormatConstructed:
superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId)
return self + superTag | def tagExplicitly(self, superTag) | Return explicitly tagged *TagSet*
Create a new *TagSet* representing callee *TagSet* explicitly tagged
with passed tag(s). With explicit tagging mode, new tags are appended
to existing tag(s).
Parameters
----------
superTag: :class:`~pyasn1.type.tag.Tag`
*Tag* object to tag this *TagSet*
Returns
-------
: :class:`~pyasn1.type.tag.TagSet`
New *TagSet* object | 4.429374 | 4.849549 | 0.913358 |
if self.__superTags:
superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId)
return self[:-1] + superTag | def tagImplicitly(self, superTag) | Return implicitly tagged *TagSet*
Create a new *TagSet* representing callee *TagSet* implicitly tagged
with passed tag(s). With implicit tagging mode, new tag(s) replace the
last existing tag.
Parameters
----------
superTag: :class:`~pyasn1.type.tag.Tag`
*Tag* object to tag this *TagSet*
Returns
-------
: :class:`~pyasn1.type.tag.TagSet`
New *TagSet* object | 7.37945 | 8.523342 | 0.865793 |
if len(tagSet) < self.__lenOfSuperTags:
return False
return self.__superTags == tagSet[:self.__lenOfSuperTags] | def isSuperTagSetOf(self, tagSet) | Test type relationship against given *TagSet*
The callee is considered to be a supertype of given *TagSet*
tag-wise if all tags in *TagSet* are present in the callee and
they are in the same order.
Parameters
----------
tagSet: :class:`~pyasn1.type.tag.TagSet`
*TagSet* object to evaluate against the callee
Returns
-------
: :py:class:`bool`
`True` if callee is a supertype of *tagSet* | 4.842012 | 8.323553 | 0.581724 |
binString = binary.bin(self._value)[2:]
return '0' * (len(self._value) - len(binString)) + binString | def asBinary(self) | Get |ASN.1| value as a text string of bits. | 5.721168 | 5.540071 | 1.032688 |
try:
value = SizedInteger(value, 16).setBitLength(len(value) * 4)
except ValueError:
raise error.PyAsn1Error('%s.fromHexString() error: %s' % (cls.__name__, sys.exc_info()[1]))
if prepend is not None:
value = SizedInteger(
(SizedInteger(prepend) << len(value)) | value
).setBitLength(len(prepend) + len(value))
if not internalFormat:
value = cls(value)
return value | def fromHexString(cls, value, internalFormat=False, prepend=None) | Create a |ASN.1| object initialized from the hex string.
Parameters
----------
value: :class:`str`
Text string like 'DEADBEEF' | 3.673495 | 3.841283 | 0.95632 |
value = SizedInteger(integer.from_bytes(value) >> padding).setBitLength(len(value) * 8 - padding)
if prepend is not None:
value = SizedInteger(
(SizedInteger(prepend) << len(value)) | value
).setBitLength(len(prepend) + len(value))
if not internalFormat:
value = cls(value)
return value | def fromOctetString(cls, value, internalFormat=False, prepend=None, padding=0) | Create a |ASN.1| object initialized from a string.
Parameters
----------
value: :class:`str` (Py2) or :class:`bytes` (Py3)
Text string like '\\\\x01\\\\xff' (Py2) or b'\\\\x01\\\\xff' (Py3) | 4.65842 | 6.168128 | 0.75524 |
bitNo = 8
byte = 0
r = []
for v in value:
if bitNo:
bitNo -= 1
else:
bitNo = 7
r.append(byte)
byte = 0
if v in ('0', '1'):
v = int(v)
else:
raise error.PyAsn1Error(
'Non-binary OCTET STRING initializer %s' % (v,)
)
byte |= v << bitNo
r.append(byte)
return octets.ints2octs(r) | def fromBinaryString(value) | Create a |ASN.1| object initialized from a string of '0' and '1'.
Parameters
----------
value: :class:`str`
Text string like '1010111' | 4.048182 | 4.148395 | 0.975843 |
r = []
p = []
for v in value:
if p:
r.append(int(p + v, 16))
p = None
else:
p = v
if p:
r.append(int(p + '0', 16))
return octets.ints2octs(r) | def fromHexString(value) | Create a |ASN.1| object initialized from the hex string.
Parameters
----------
value: :class:`str`
Text string like 'DEADBEEF' | 3.335186 | 3.666709 | 0.909586 |
l = len(self)
if l <= len(other):
if self._value[:l] == other[:l]:
return True
return False | def isPrefixOf(self, other) | Indicate if this |ASN.1| object is a prefix of other |ASN.1| object.
Parameters
----------
other: |ASN.1| object
|ASN.1| object
Returns
-------
: :class:`bool`
:class:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object
or :class:`False` otherwise. | 4.069942 | 5.958737 | 0.683021 |
try:
componentValue = self._componentValues[idx]
except IndexError:
if not instantiate:
return default
self.setComponentByPosition(idx)
componentValue = self._componentValues[idx]
if default is noValue or componentValue.isValue:
return componentValue
else:
return default | def getComponentByPosition(self, idx, default=noValue, instantiate=True) | Return |ASN.1| type component value by position.
Equivalent to Python sequence subscription operation (e.g. `[]`).
Parameters
----------
idx : :class:`int`
Component index (zero-based). Must either refer to an existing
component or to N+1 component (if *componentType* is set). In the latter
case a new component type gets instantiated and appended to the |ASN.1|
sequence.
Keyword Args
------------
default: :class:`object`
If set and requested component is a schema object, return the `default`
object instead of the requested component.
instantiate: :class:`bool`
If `True` (default), inner component will be automatically instantiated.
If 'False' either existing component or the `noValue` object will be
returned.
Returns
-------
: :py:class:`~pyasn1.type.base.PyAsn1Item`
Instantiate |ASN.1| component type or return existing component value
Examples
--------
.. code-block:: python
# can also be SetOf
class MySequenceOf(SequenceOf):
componentType = OctetString()
s = MySequenceOf()
# returns component #0 with `.isValue` property False
s.getComponentByPosition(0)
# returns None
s.getComponentByPosition(0, default=None)
s.clear()
# returns noValue
s.getComponentByPosition(0, instantiate=False)
# sets component #0 to OctetString() ASN.1 schema
# object and returns it
s.getComponentByPosition(0, instantiate=True)
# sets component #0 to ASN.1 value object
s.setComponentByPosition(0, 'ABCD')
# returns OctetString('ABCD') value object
s.getComponentByPosition(0, instantiate=False)
s.clear()
# returns noValue
s.getComponentByPosition(0, instantiate=False) | 3.81496 | 4.063567 | 0.938821 |
componentType = self.componentType
try:
currentValue = self._componentValues[idx]
except IndexError:
currentValue = noValue
if len(self._componentValues) < idx:
raise error.PyAsn1Error('Component index out of range')
if value is noValue:
if componentType is not None:
value = componentType.clone()
elif currentValue is noValue:
raise error.PyAsn1Error('Component type not defined')
elif not isinstance(value, base.Asn1Item):
if componentType is not None and isinstance(componentType, base.AbstractSimpleAsn1Item):
value = componentType.clone(value=value)
elif currentValue is not noValue and isinstance(currentValue, base.AbstractSimpleAsn1Item):
value = currentValue.clone(value=value)
else:
raise error.PyAsn1Error('Non-ASN.1 value %r and undefined component type at %r' % (value, self))
elif componentType is not None:
if self.strictConstraints:
if not componentType.isSameTypeWith(value, matchTags, matchConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
else:
if not componentType.isSuperTypeOf(value, matchTags, matchConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
if verifyConstraints and value.isValue:
try:
self.subtypeSpec(value, idx)
except error.PyAsn1Error:
exType, exValue, exTb = sys.exc_info()
raise exType('%s at %s' % (exValue, self.__class__.__name__))
if currentValue is noValue:
self._componentValues.append(value)
else:
self._componentValues[idx] = value
return self | def setComponentByPosition(self, idx, value=noValue,
verifyConstraints=True,
matchTags=True,
matchConstraints=True) | Assign |ASN.1| type component by position.
Equivalent to Python sequence item assignment operation (e.g. `[]`)
or list.append() (when idx == len(self)).
Parameters
----------
idx: :class:`int`
Component index (zero-based). Must either refer to existing
component or to N+1 component. In the latter case a new component
type gets instantiated (if *componentType* is set, or given ASN.1
object is taken otherwise) and appended to the |ASN.1| sequence.
Keyword Args
------------
value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
A Python value to initialize |ASN.1| component with (if *componentType* is set)
or ASN.1 value object to assign to |ASN.1| component.
verifyConstraints: :class:`bool`
If `False`, skip constraints validation
matchTags: :class:`bool`
If `False`, skip component tags matching
matchConstraints: :class:`bool`
If `False`, skip component constraints matching
Returns
-------
self
Raises
------
IndexError:
When idx > len(self) | 2.306982 | 2.334834 | 0.988071 |
for componentValue in self._componentValues:
if componentValue is noValue or not componentValue.isValue:
return False
return True | def isValue(self) | Indicate that |ASN.1| object represents ASN.1 value.
If *isValue* is `False` then this object represents just ASN.1 schema.
If *isValue* is `True` then, in addition to its ASN.1 schema features,
this object can also be used like a Python built-in object (e.g. `int`,
`str`, `dict` etc.).
Returns
-------
: :class:`bool`
:class:`False` if object represents just ASN.1 schema.
:class:`True` if object represents ASN.1 schema and can be used as a normal value.
Note
----
There is an important distinction between PyASN1 schema and value objects.
The PyASN1 schema objects can only participate in ASN.1 schema-related
operations (e.g. defining or testing the structure of the data). Most
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
encoding/decoding serialised ASN.1 contents.
The PyASN1 value objects can **additionally** participate in many operations
involving regular Python objects (e.g. arithmetic, comprehension etc). | 7.780866 | 15.115597 | 0.514757 |
if self._componentTypeLen:
idx = self.componentType.getPositionByName(name)
else:
try:
idx = self._dynamicNames.getPositionByName(name)
except KeyError:
raise error.PyAsn1Error('Name %s not found' % (name,))
return self.getComponentByPosition(idx, default=default, instantiate=instantiate) | def getComponentByName(self, name, default=noValue, instantiate=True) | Returns |ASN.1| type component by name.
Equivalent to Python :class:`dict` subscription operation (e.g. `[]`).
Parameters
----------
name: :class:`str`
|ASN.1| type component name
Keyword Args
------------
default: :class:`object`
If set and requested component is a schema object, return the `default`
object instead of the requested component.
instantiate: :class:`bool`
If `True` (default), inner component will be automatically instantiated.
If 'False' either existing component or the `noValue` object will be
returned.
Returns
-------
: :py:class:`~pyasn1.type.base.PyAsn1Item`
Instantiate |ASN.1| component type or return existing component value | 5.046819 | 5.407474 | 0.933304 |
if self._componentTypeLen:
idx = self.componentType.getPositionByName(name)
else:
try:
idx = self._dynamicNames.getPositionByName(name)
except KeyError:
raise error.PyAsn1Error('Name %s not found' % (name,))
return self.setComponentByPosition(
idx, value, verifyConstraints, matchTags, matchConstraints
) | def setComponentByName(self, name, value=noValue,
verifyConstraints=True,
matchTags=True,
matchConstraints=True) | Assign |ASN.1| type component by name.
Equivalent to Python :class:`dict` item assignment operation (e.g. `[]`).
Parameters
----------
name: :class:`str`
|ASN.1| type component name
Keyword Args
------------
value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
A Python value to initialize |ASN.1| component with (if *componentType* is set)
or ASN.1 value object to assign to |ASN.1| component.
verifyConstraints: :class:`bool`
If `False`, skip constraints validation
matchTags: :class:`bool`
If `False`, skip component tags matching
matchConstraints: :class:`bool`
If `False`, skip component constraints matching
Returns
-------
self | 4.594246 | 5.287721 | 0.868852 |
componentType = self.componentType
componentTypeLen = self._componentTypeLen
try:
currentValue = self._componentValues[idx]
except IndexError:
currentValue = noValue
if componentTypeLen:
if componentTypeLen < idx:
raise error.PyAsn1Error('component index out of range')
self._componentValues = [noValue] * componentTypeLen
if value is noValue:
if componentTypeLen:
value = componentType.getTypeByPosition(idx)
if isinstance(value, base.AbstractConstructedAsn1Item):
value = value.clone(cloneValueFlag=componentType[idx].isDefaulted)
elif currentValue is noValue:
raise error.PyAsn1Error('Component type not defined')
elif not isinstance(value, base.Asn1Item):
if componentTypeLen:
subComponentType = componentType.getTypeByPosition(idx)
if isinstance(subComponentType, base.AbstractSimpleAsn1Item):
value = subComponentType.clone(value=value)
else:
raise error.PyAsn1Error('%s can cast only scalar values' % componentType.__class__.__name__)
elif currentValue is not noValue and isinstance(currentValue, base.AbstractSimpleAsn1Item):
value = currentValue.clone(value=value)
else:
raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__)
elif (matchTags or matchConstraints) and componentTypeLen:
subComponentType = componentType.getTypeByPosition(idx)
if subComponentType is not noValue:
subtypeChecker = (self.strictConstraints and
subComponentType.isSameTypeWith or
subComponentType.isSuperTypeOf)
if not subtypeChecker(value, matchTags, matchConstraints):
if not componentType[idx].openType:
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType))
if verifyConstraints and value.isValue:
try:
self.subtypeSpec(value, idx)
except error.PyAsn1Error:
exType, exValue, exTb = sys.exc_info()
raise exType('%s at %s' % (exValue, self.__class__.__name__))
if componentTypeLen or idx in self._dynamicNames:
self._componentValues[idx] = value
elif len(self._componentValues) == idx:
self._componentValues.append(value)
self._dynamicNames.addField(idx)
else:
raise error.PyAsn1Error('Component index out of range')
return self | def setComponentByPosition(self, idx, value=noValue,
verifyConstraints=True,
matchTags=True,
matchConstraints=True) | Assign |ASN.1| type component by position.
Equivalent to Python sequence item assignment operation (e.g. `[]`).
Parameters
----------
idx : :class:`int`
Component index (zero-based). Must either refer to existing
component (if *componentType* is set) or to N+1 component
otherwise. In the latter case a new component of given ASN.1
type gets instantiated and appended to |ASN.1| sequence.
Keyword Args
------------
value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
A Python value to initialize |ASN.1| component with (if *componentType* is set)
or ASN.1 value object to assign to |ASN.1| component.
verifyConstraints : :class:`bool`
If `False`, skip constraints validation
matchTags: :class:`bool`
If `False`, skip component tags matching
matchConstraints: :class:`bool`
If `False`, skip component constraints matching
Returns
-------
self | 3.002614 | 3.022652 | 0.993371 |
componentType = self.componentType
if componentType:
for idx, subComponentType in enumerate(componentType.namedTypes):
if subComponentType.isDefaulted or subComponentType.isOptional:
continue
if not self._componentValues:
return False
componentValue = self._componentValues[idx]
if componentValue is noValue or not componentValue.isValue:
return False
else:
for componentValue in self._componentValues:
if componentValue is noValue or not componentValue.isValue:
return False
return True | def isValue(self) | Indicate that |ASN.1| object represents ASN.1 value.
If *isValue* is `False` then this object represents just ASN.1 schema.
If *isValue* is `True` then, in addition to its ASN.1 schema features,
this object can also be used like a Python built-in object (e.g. `int`,
`str`, `dict` etc.).
Returns
-------
: :class:`bool`
:class:`False` if object represents just ASN.1 schema.
:class:`True` if object represents ASN.1 schema and can be used as a normal value.
Note
----
There is an important distinction between PyASN1 schema and value objects.
The PyASN1 schema objects can only participate in ASN.1 schema-related
operations (e.g. defining or testing the structure of the data). Most
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
encoding/decoding serialised ASN.1 contents.
The PyASN1 value objects can **additionally** participate in many operations
involving regular Python objects (e.g. arithmetic, comprehension etc). | 3.546605 | 4.084386 | 0.868333 |
scope += 1
representation = self.__class__.__name__ + ':\n'
for idx, componentValue in enumerate(self._componentValues):
if componentValue is not noValue and componentValue.isValue:
representation += ' ' * scope
if self.componentType:
representation += self.componentType.getNameByPosition(idx)
else:
representation += self._dynamicNames.getNameByPosition(idx)
representation = '%s=%s\n' % (
representation, componentValue.prettyPrint(scope)
)
return representation | def prettyPrint(self, scope=0) | Return an object representation string.
Returns
-------
: :class:`str`
Human-friendly object representation. | 4.700258 | 5.190154 | 0.90561 |
componentValue = self.getComponentByPosition(
self.componentType.getPositionByType(tagSet),
default=default, instantiate=instantiate
)
if innerFlag and isinstance(componentValue, Set):
# get inner component by inner tagSet
return componentValue.getComponent(innerFlag=True)
else:
# get outer component by inner tagSet
return componentValue | def getComponentByType(self, tagSet, default=noValue,
instantiate=True, innerFlag=False) | Returns |ASN.1| type component by ASN.1 tag.
Parameters
----------
tagSet : :py:class:`~pyasn1.type.tag.TagSet`
Object representing ASN.1 tags to identify one of
|ASN.1| object component
Keyword Args
------------
default: :class:`object`
If set and requested component is a schema object, return the `default`
object instead of the requested component.
instantiate: :class:`bool`
If `True` (default), inner component will be automatically instantiated.
If 'False' either existing component or the `noValue` object will be
returned.
Returns
-------
: :py:class:`~pyasn1.type.base.PyAsn1Item`
a pyasn1 object | 4.754134 | 5.549137 | 0.856734 |
idx = self.componentType.getPositionByType(tagSet)
if innerFlag: # set inner component by inner tagSet
componentType = self.componentType.getTypeByPosition(idx)
if componentType.tagSet:
return self.setComponentByPosition(
idx, value, verifyConstraints, matchTags, matchConstraints
)
else:
componentType = self.getComponentByPosition(idx)
return componentType.setComponentByType(
tagSet, value, verifyConstraints, matchTags, matchConstraints, innerFlag=innerFlag
)
else: # set outer component by inner tagSet
return self.setComponentByPosition(
idx, value, verifyConstraints, matchTags, matchConstraints
) | def setComponentByType(self, tagSet, value=noValue,
verifyConstraints=True,
matchTags=True,
matchConstraints=True,
innerFlag=False) | Assign |ASN.1| type component by ASN.1 tag.
Parameters
----------
tagSet : :py:class:`~pyasn1.type.tag.TagSet`
Object representing ASN.1 tags to identify one of
|ASN.1| object component
Keyword Args
------------
value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
A Python value to initialize |ASN.1| component with (if *componentType* is set)
or ASN.1 value object to assign to |ASN.1| component.
verifyConstraints : :class:`bool`
If `False`, skip constraints validation
matchTags: :class:`bool`
If `False`, skip component tags matching
matchConstraints: :class:`bool`
If `False`, skip component constraints matching
innerFlag: :class:`bool`
If `True`, search for matching *tagSet* recursively.
Returns
-------
self | 2.774991 | 3.028303 | 0.916352 |
oldIdx = self._currentIdx
Set.setComponentByPosition(self, idx, value, verifyConstraints, matchTags, matchConstraints)
self._currentIdx = idx
if oldIdx is not None and oldIdx != idx:
self._componentValues[oldIdx] = noValue
return self | def setComponentByPosition(self, idx, value=noValue,
verifyConstraints=True,
matchTags=True,
matchConstraints=True) | Assign |ASN.1| type component by position.
Equivalent to Python sequence item assignment operation (e.g. `[]`).
Parameters
----------
idx: :class:`int`
Component index (zero-based). Must either refer to existing
component or to N+1 component. In the latter case a new component
type gets instantiated (if *componentType* is set, or given ASN.1
object is taken otherwise) and appended to the |ASN.1| sequence.
Keyword Args
------------
value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
A Python value to initialize |ASN.1| component with (if *componentType* is set)
or ASN.1 value object to assign to |ASN.1| component. Once a new value is
set to *idx* component, previous value is dropped.
verifyConstraints : :class:`bool`
If `False`, skip constraints validation
matchTags: :class:`bool`
If `False`, skip component tags matching
matchConstraints: :class:`bool`
If `False`, skip component constraints matching
Returns
-------
self | 3.430704 | 4.834318 | 0.709656 |
if self.tagSet:
return self.tagSet
else:
component = self.getComponent()
return component.effectiveTagSet | def effectiveTagSet(self) | Return a :class:`~pyasn1.type.tag.TagSet` object of the currently initialized component or self (if |ASN.1| is tagged). | 4.610319 | 3.060412 | 1.506438 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.