code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
super(Choice, self)._copy(other, copy_func)
self._choice = other._choice
self._name = other._name
self._parsed = copy_func(other._parsed)
|
def _copy(self, other, copy_func)
|
Copies the contents of another Choice object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 4.177617 | 3.626083 | 1.152102 |
self._contents = self.chosen.dump(force=force)
if self._header is None or force:
self._header = b''
if self.explicit is not None:
for class_, tag in self.explicit:
self._header = _dump_header(class_, 1, tag, self._header + self._contents) + self._header
return self._header + self._contents
|
def dump(self, force=False)
|
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
| 5.587995 | 5.246825 | 1.065024 |
return cls(contents=encoded_data, strict=strict)
|
def load(cls, encoded_data, strict=False)
|
Loads a BER/DER-encoded byte string using the current class as the spec
:param encoded_data:
A byte string of BER or DER encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:return:
A Concat object
| 7.72925 | 15.947219 | 0.484677 |
if self.__class__ != other.__class__:
raise TypeError(unwrap(
'''
Can not copy values from %s object to %s object
''',
type_name(other),
type_name(self)
))
self._children = copy_func(other._children)
|
def _copy(self, other, copy_func)
|
Copies the contents of another Concat object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 4.33335 | 4.604441 | 0.941124 |
prefix = ' ' * nest_level
print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
print('%s Children:' % (prefix,))
for child in self._children:
child.debug(nest_level + 2)
|
def debug(self, nest_level=1)
|
Show the binary data and parsed data in a tree structure
| 3.745005 | 3.715415 | 1.007964 |
contents = b''
for child in self._children:
contents += child.dump(force=force)
return contents
|
def dump(self, force=False)
|
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
| 5.776918 | 6.138695 | 0.941066 |
if not isinstance(value, byte_cls):
raise TypeError(unwrap(
'''
%s value must be a byte string, not %s
''',
type_name(self),
type_name(value)
))
self._native = value
self.contents = value
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
A byte string
| 4.753896 | 4.379154 | 1.085574 |
if force:
native = self.native
self.contents = None
self.set(native)
return Asn1Value.dump(self)
|
def dump(self, force=False)
|
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
| 12.326976 | 10.397621 | 1.185557 |
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
%s value must be a unicode string, not %s
''',
type_name(self),
type_name(value)
))
self._unicode = value
self.contents = value.encode(self._encoding)
self._header = None
if self._indefinite:
self._indefinite = False
self.method = 0
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the string
:param value:
A unicode string
| 4.240726 | 4.142323 | 1.023755 |
super(AbstractString, self)._copy(other, copy_func)
self._unicode = other._unicode
|
def _copy(self, other, copy_func)
|
Copies the contents of another AbstractString object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 7.728889 | 5.638606 | 1.370709 |
self._native = bool(value)
self.contents = b'\x00' if not value else b'\xff'
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
True, False or another value that works with bool()
| 7.413832 | 6.636179 | 1.117184 |
if self.contents is None:
return None
if self._native is None:
self._native = self.__bool__()
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
True, False or None
| 6.586493 | 5.073801 | 1.298138 |
if isinstance(value, str_cls):
if self._map is None:
raise ValueError(unwrap(
'''
%s value is a unicode string, but no _map provided
''',
type_name(self)
))
if value not in self._reverse_map:
raise ValueError(unwrap(
'''
%s value, %s, is not present in the _map
''',
type_name(self),
value
))
value = self._reverse_map[value]
elif not isinstance(value, int_types):
raise TypeError(unwrap(
'''
%s value must be an integer or unicode string when a name_map
is provided, not %s
''',
type_name(self),
type_name(value)
))
self._native = self._map[value] if self._map and value in self._map else value
self.contents = int_to_bytes(value, signed=True)
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
An integer, or a unicode string if _map is set
:raises:
ValueError - when an invalid value is passed
| 3.335223 | 3.064277 | 1.088421 |
if self.contents is None:
return None
if self._native is None:
self._native = self.__int__()
if self._map is not None and self._native in self._map:
self._native = self._map[self._native]
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
An integer or None
| 3.965048 | 3.343959 | 1.185734 |
ValueMap._setup(self)
cls = self.__class__
if cls._map is not None:
cls._size = max(self._map.keys()) + 1
|
def _setup(self)
|
Generates _reverse_map from _map
| 7.79111 | 5.954066 | 1.308536 |
if isinstance(value, set):
if self._map is None:
raise ValueError(unwrap(
'''
%s._map has not been defined
''',
type_name(self)
))
bits = [0] * self._size
self._native = value
for index in range(0, self._size):
key = self._map.get(index)
if key is None:
continue
if key in value:
bits[index] = 1
value = ''.join(map(str_cls, bits))
elif value.__class__ == tuple:
if self._map is None:
self._native = value
else:
self._native = set()
for index, bit in enumerate(value):
if bit:
name = self._map.get(index, index)
self._native.add(name)
value = ''.join(map(str_cls, value))
else:
raise TypeError(unwrap(
'''
%s value must be a tuple of ones and zeros or a set of unicode
strings, not %s
''',
type_name(self),
type_name(value)
))
self._chunk = None
if self._map is not None:
if len(value) > self._size:
raise ValueError(unwrap(
'''
%s value must be at most %s bits long, specified was %s long
''',
type_name(self),
self._size,
len(value)
))
# A NamedBitList must have trailing zero bit truncated. See
# https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
# section 11.2,
# https://tools.ietf.org/html/rfc5280#page-134 and
# https://www.ietf.org/mail-archive/web/pkix/current/msg10443.html
value = value.rstrip('0')
size = len(value)
size_mod = size % 8
extra_bits = 0
if size_mod != 0:
extra_bits = 8 - size_mod
value += '0' * extra_bits
size_in_bytes = int(math.ceil(size / 8))
if extra_bits:
extra_bits_byte = int_to_bytes(extra_bits)
else:
extra_bits_byte = b'\x00'
if value == '':
value_bytes = b''
else:
value_bytes = int_to_bytes(int(value, 2))
if len(value_bytes) != size_in_bytes:
value_bytes = (b'\x00' * (size_in_bytes - len(value_bytes))) + value_bytes
self.contents = extra_bits_byte + value_bytes
self._header = None
if self._indefinite:
self._indefinite = False
self.method = 0
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
An integer or a tuple of integers 0 and 1
:raises:
ValueError - when an invalid value is passed
| 2.877932 | 2.875462 | 1.000859 |
extra_bits = int_from_bytes(self.contents[0:1])
byte_len = len(self.contents[1:])
bit_string = '' if byte_len == 0 else '{0:b}'.format(int_from_bytes(self.contents[1:]))
bit_len = len(bit_string)
# Left-pad the bit string to a byte multiple to ensure we didn't
# lose any zero bits on the left
mod_bit_len = bit_len % 8
if mod_bit_len != 0:
bit_string = ('0' * (8 - mod_bit_len)) + bit_string
bit_len = len(bit_string)
if bit_len // 8 < byte_len:
missing_bytes = byte_len - (bit_len // 8)
bit_string = ('0' * (8 * missing_bytes)) + bit_string
# Trim off the extra bits on the right used to fill the last byte
if extra_bits > 0:
bit_string = bit_string[0:0 - extra_bits]
return tuple(map(int, tuple(bit_string)))
|
def _as_chunk(self)
|
Allows reconstructing indefinite length values
:return:
A tuple of integers
| 3.178406 | 3.091122 | 1.028237 |
# For BitString we default the value to be all zeros
if self.contents is None:
if self._map is None:
self.set(())
else:
self.set(set())
if self._native is None:
bits = self._merge_chunks()
if self._map:
self._native = set()
for index, bit in enumerate(bits):
if bit:
name = self._map.get(index, index)
self._native.add(name)
else:
self._native = bits
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
If a _map is set, a set of names, or if no _map is set, a tuple of
integers 1 and 0. None if no value.
| 5.020844 | 4.015481 | 1.250372 |
super(OctetBitString, self)._copy(other, copy_func)
self._bytes = other._bytes
|
def _copy(self, other, copy_func)
|
Copies the contents of another OctetBitString object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 9.546224 | 4.479647 | 2.131021 |
extra_bits = int_from_bytes(self.contents[0:1])
bit_string = '{0:b}'.format(int_from_bytes(self.contents[1:]))
# Ensure we have leading zeros since these chunks may be concatenated together
mod_bit_len = len(bit_string) % 8
if mod_bit_len != 0:
bit_string = ('0' * (8 - mod_bit_len)) + bit_string
if extra_bits > 0:
return bit_string[0:0 - extra_bits]
return bit_string
|
def _as_chunk(self)
|
Allows reconstructing indefinite length values
:return:
A unicode string of bits - 1s and 0s
| 3.810591 | 3.571173 | 1.067042 |
if self.contents is None:
return None
if self._native is None:
extra_bits = int_from_bytes(self.contents[0:1])
# Fast path
if not self._indefinite and extra_bits == 0:
self._native = int_from_bytes(self.contents[1:])
else:
if self._indefinite and extra_bits > 0:
raise ValueError('Constructed bit string has extra bits on indefinite container')
self._native = int(self._merge_chunks(), 2)
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
An integer or None
| 4.708457 | 4.522164 | 1.041195 |
if not isinstance(value, byte_cls):
raise TypeError(unwrap(
'''
%s value must be a byte string, not %s
''',
type_name(self),
type_name(value)
))
self._bytes = value
self.contents = value
self._header = None
if self._indefinite:
self._indefinite = False
self.method = 0
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
A byte string
| 4.624728 | 4.329711 | 1.068138 |
super(OctetString, self)._copy(other, copy_func)
self._bytes = other._bytes
|
def _copy(self, other, copy_func)
|
Copies the contents of another OctetString object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 7.490945 | 4.755952 | 1.575068 |
if not isinstance(value, int_types):
raise TypeError(unwrap(
'''
%s value must be an integer, not %s
''',
type_name(self),
type_name(value)
))
self._native = value
self.contents = int_to_bytes(value, signed=False)
self._header = None
if self._indefinite:
self._indefinite = False
self.method = 0
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
An integer
:raises:
ValueError - when an invalid value is passed
| 4.542552 | 4.671026 | 0.972496 |
if self.contents is None:
return None
if self._native is None:
self._native = int_from_bytes(self._merge_chunks())
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
An integer or None
| 7.483261 | 6.340801 | 1.180176 |
if self._parsed is None or self._parsed[1:3] != (spec, spec_params):
parsed_value, _ = _parse_build(self.__bytes__(), spec=spec, spec_params=spec_params)
self._parsed = (parsed_value, spec, spec_params)
return self._parsed[0]
|
def parse(self, spec=None, spec_params=None)
|
Parses the contents generically, or using a spec with optional params
:param spec:
A class derived from Asn1Value that defines what class_ and tag the
value should have, and the semantics of the encoded value. The
return value will be of this type. If omitted, the encoded value
will be decoded using the standard universal tag based on the
encoded tag number.
:param spec_params:
A dict of params to pass to the spec object
:return:
An object of the type spec, or if not present, a child of Asn1Value
| 4.061496 | 3.9351 | 1.03212 |
super(ParsableOctetString, self)._copy(other, copy_func)
self._bytes = other._bytes
self._parsed = copy_func(other._parsed)
|
def _copy(self, other, copy_func)
|
Copies the contents of another ParsableOctetString object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 5.853501 | 3.33601 | 1.754641 |
if self.contents is None:
return None
if self._parsed is not None:
return self._parsed[0].native
else:
return self.__bytes__()
|
def native(self)
|
The native Python datatype representation of this value
:return:
A byte string or None
| 7.945441 | 6.403487 | 1.240799 |
if force:
if self._parsed is not None:
native = self.parsed.dump(force=force)
else:
native = self.native
self.contents = None
self.set(native)
return Asn1Value.dump(self)
|
def dump(self, force=False)
|
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
| 7.642019 | 7.001025 | 1.091557 |
if cls._map is None:
raise ValueError(unwrap(
'''
%s._map has not been defined
''',
type_name(cls)
))
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
value must be a unicode string, not %s
''',
type_name(value)
))
return cls._map.get(value, value)
|
def map(cls, value)
|
Converts a dotted unicode string OID into a mapped unicode string
:param value:
A dotted unicode string OID
:raises:
ValueError - when no _map dict has been defined on the class
TypeError - when value is not a unicode string
:return:
A mapped unicode string
| 3.437082 | 2.58042 | 1.331986 |
if cls not in _SETUP_CLASSES:
cls()._setup()
_SETUP_CLASSES[cls] = True
if cls._map is None:
raise ValueError(unwrap(
'''
%s._map has not been defined
''',
type_name(cls)
))
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
value must be a unicode string, not %s
''',
type_name(value)
))
if value in cls._reverse_map:
return cls._reverse_map[value]
if not _OID_RE.match(value):
raise ValueError(unwrap(
'''
%s._map does not contain an entry for "%s"
''',
type_name(cls),
value
))
return value
|
def unmap(cls, value)
|
Converts a mapped unicode string value into a dotted unicode string OID
:param value:
A mapped unicode string OR dotted unicode string OID
:raises:
ValueError - when no _map dict has been defined on the class or the value can't be unmapped
TypeError - when value is not a unicode string
:return:
A dotted unicode string OID
| 3.15812 | 2.821303 | 1.119384 |
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
%s value must be a unicode string, not %s
''',
type_name(self),
type_name(value)
))
self._native = value
if self._map is not None:
if value in self._reverse_map:
value = self._reverse_map[value]
self.contents = b''
first = None
for index, part in enumerate(value.split('.')):
part = int(part)
# The first two parts are merged into a single byte
if index == 0:
first = part
continue
elif index == 1:
part = (first * 40) + part
encoded_part = chr_cls(0x7F & part)
part = part >> 7
while part > 0:
encoded_part = chr_cls(0x80 | (0x7F & part)) + encoded_part
part = part >> 7
self.contents += encoded_part
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def set(self, value)
|
Sets the value of the object
:param value:
A unicode string. May be a dotted integer string, or if _map is
provided, one of the mapped values.
:raises:
ValueError - when an invalid value is passed
| 3.547414 | 3.374133 | 1.051356 |
if self._dotted is None:
output = []
part = 0
for byte in self.contents:
if _PY2:
byte = ord(byte)
part = part * 128
part += byte & 127
# Last byte in subidentifier has the eighth bit set to 0
if byte & 0x80 == 0:
if len(output) == 0:
output.append(str_cls(part // 40))
output.append(str_cls(part % 40))
else:
output.append(str_cls(part))
part = 0
self._dotted = '.'.join(output)
return self._dotted
|
def dotted(self)
|
:return:
A unicode string of the object identifier in dotted notation, thus
ignoring any mapped value
| 3.966115 | 3.516962 | 1.127711 |
if self.contents is None:
return None
if self._native is None:
self._native = self.dotted
if self._map is not None and self._native in self._map:
self._native = self._map[self._native]
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
A unicode string or None. If _map is not defined, the unicode string
is a string of dotted integers. If _map is defined and the dotted
string is present in the _map, the mapped value is returned.
| 4.238715 | 3.029523 | 1.399136 |
if not isinstance(value, int_types) and not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
%s value must be an integer or a unicode string, not %s
''',
type_name(self),
type_name(value)
))
if isinstance(value, str_cls):
if value not in self._reverse_map:
raise ValueError(unwrap(
'''
%s value "%s" is not a valid value
''',
type_name(self),
value
))
value = self._reverse_map[value]
elif value not in self._map:
raise ValueError(unwrap(
'''
%s value %s is not a valid value
''',
type_name(self),
value
))
Integer.set(self, value)
|
def set(self, value)
|
Sets the value of the object
:param value:
An integer or a unicode string from _map
:raises:
ValueError - when an invalid value is passed
| 2.29057 | 2.181014 | 1.050232 |
if self.contents is None:
return None
if self._native is None:
self._native = self._map[self.__int__()]
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
A unicode string or None
| 7.520761 | 6.090786 | 1.234777 |
if self.children is None:
return self._contents
if self._is_mutated():
self._set_contents()
return self._contents
|
def contents(self)
|
:return:
A byte string of the DER-encoded contents of the sequence
| 7.375335 | 6.775738 | 1.088492 |
mutated = self._mutated
if self.children is not None:
for child in self.children:
if isinstance(child, Sequence) or isinstance(child, SequenceOf):
mutated = mutated or child._is_mutated()
return mutated
|
def _is_mutated(self)
|
:return:
A boolean - if the sequence or any children (recursively) have been
mutated
| 3.662448 | 3.12322 | 1.172651 |
child = self.children[index]
if child.__class__ == tuple:
child = self.children[index] = _build(*child)
return child
|
def _lazy_child(self, index)
|
Builds a child object if the child has only been parsed into a tuple so far
| 5.275812 | 3.733817 | 1.412981 |
if self.children is None:
self._parse_children()
contents = BytesIO()
for index, info in enumerate(self._fields):
child = self.children[index]
if child is None:
child_dump = b''
elif child.__class__ == tuple:
if force:
child_dump = self._lazy_child(index).dump(force=force)
else:
child_dump = child[3] + child[4] + child[5]
else:
child_dump = child.dump(force=force)
# Skip values that are the same as the default
if info[2] and 'default' in info[2]:
default_value = info[1](**info[2])
if default_value.dump() == child_dump:
continue
contents.write(child_dump)
self._contents = contents.getvalue()
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def _set_contents(self, force=False)
|
Updates the .contents attribute of the value with the encoded value of
all of the child objects
:param force:
Ensure all contents are in DER format instead of possibly using
cached BER-encoded data
| 3.735253 | 3.637809 | 1.026786 |
cls = self.__class__
cls._field_map = {}
cls._field_ids = []
cls._precomputed_specs = []
for index, field in enumerate(cls._fields):
if len(field) < 3:
field = field + ({},)
cls._fields[index] = field
cls._field_map[field[0]] = index
cls._field_ids.append(_build_id_tuple(field[2], field[1]))
if cls._oid_pair is not None:
cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
for index, field in enumerate(cls._fields):
has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
if has_callback or is_mapped_oid:
cls._precomputed_specs.append(None)
else:
cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None))
|
def _setup(self)
|
Generates _field_map, _field_ids and _oid_nums for use in parsing
| 2.913681 | 2.565665 | 1.135644 |
name, field_spec, field_params = self._fields[index]
value_spec = field_spec
spec_override = None
if self._spec_callbacks is not None and name in self._spec_callbacks:
callback = self._spec_callbacks[name]
spec_override = callback(self)
if spec_override:
# Allow a spec callback to specify both the base spec and
# the override, for situations such as OctetString and parse_as
if spec_override.__class__ == tuple and len(spec_override) == 2:
field_spec, value_spec = spec_override
if value_spec is None:
value_spec = field_spec
spec_override = None
# When no field spec is specified, use a single return value as that
elif field_spec is None:
field_spec = spec_override
value_spec = field_spec
spec_override = None
else:
value_spec = spec_override
elif self._oid_nums is not None and self._oid_nums[1] == index:
oid = self._lazy_child(self._oid_nums[0]).native
if oid in self._oid_specs:
spec_override = self._oid_specs[oid]
value_spec = spec_override
return (name, field_spec, value_spec, field_params, spec_override)
|
def _determine_spec(self, index)
|
Determine how a value for a field should be constructed
:param index:
The field number
:return:
A tuple containing the following elements:
- unicode string of the field name
- Asn1Value class of the field spec
- Asn1Value class of the value spec
- None or dict of params to pass to the field spec
- None or Asn1Value class indicating the value spec was derived from an OID or a spec callback
| 3.726591 | 3.275924 | 1.137569 |
if value is None and 'optional' in field_params:
return VOID
specs_different = field_spec != value_spec
is_any = issubclass(field_spec, Any)
if issubclass(value_spec, Choice):
is_asn1value = isinstance(value, Asn1Value)
is_tuple = isinstance(value, tuple) and len(value) == 2
is_dict = isinstance(value, dict) and len(value) == 1
if not is_asn1value and not is_tuple and not is_dict:
raise ValueError(unwrap(
'''
Can not set a native python value to %s, which has the
choice type of %s - value must be an instance of Asn1Value
''',
field_name,
type_name(value_spec)
))
if is_tuple or is_dict:
value = value_spec(value)
if not isinstance(value, value_spec):
wrapper = value_spec()
wrapper.validate(value.class_, value.tag, value.contents)
wrapper._parsed = value
new_value = wrapper
else:
new_value = value
elif isinstance(value, field_spec):
new_value = value
if specs_different:
new_value.parse(value_spec)
elif (not specs_different or is_any) and not isinstance(value, value_spec):
if (not is_any or specs_different) and isinstance(value, Asn1Value):
raise TypeError(unwrap(
'''
%s value must be %s, not %s
''',
field_name,
type_name(value_spec),
type_name(value)
))
new_value = value_spec(value, **field_params)
else:
if isinstance(value, value_spec):
new_value = value
else:
if isinstance(value, Asn1Value):
raise TypeError(unwrap(
'''
%s value must be %s, not %s
''',
field_name,
type_name(value_spec),
type_name(value)
))
new_value = value_spec(value)
# For when the field is OctetString or OctetBitString with embedded
# values we need to wrap the value in the field spec to get the
# appropriate encoded value.
if specs_different and not is_any:
wrapper = field_spec(value=new_value.dump(), **field_params)
wrapper._parsed = (new_value, new_value.__class__, None)
new_value = wrapper
new_value = _fix_tagging(new_value, field_params)
return new_value
|
def _make_value(self, field_name, field_spec, value_spec, field_params, value)
|
Contructs an appropriate Asn1Value object for a field
:param field_name:
A unicode string of the field name
:param field_spec:
An Asn1Value class that is the field spec
:param value_spec:
An Asn1Value class that is the vaue spec
:param field_params:
None or a dict of params for the field spec
:param value:
The value to construct an Asn1Value object from
:return:
An instance of a child class of Asn1Value
| 2.973685 | 2.930776 | 1.014641 |
if not isinstance(field_name, str_cls):
raise TypeError(unwrap(
'''
field_name must be a unicode string, not %s
''',
type_name(field_name)
))
if self._fields is None:
raise ValueError(unwrap(
'''
Unable to retrieve spec for field %s in the class %s because
_fields has not been set
''',
repr(field_name),
type_name(self)
))
index = self._field_map[field_name]
info = self._determine_spec(index)
return info[2]
|
def spec(self, field_name)
|
Determines the spec to use for the field specified. Depending on how
the spec is determined (_oid_pair or _spec_callbacks), it may be
necessary to set preceding field values before calling this. Usually
specs, if dynamic, are controlled by a preceding ObjectIdentifier
field.
:param field_name:
A unicode string of the field name to get the spec for
:return:
A child class of asn1crypto.core.Asn1Value that the field must be
encoded using
| 3.43625 | 3.317993 | 1.035641 |
if self.contents is None:
return None
if self._native is None:
if self.children is None:
self._parse_children(recurse=True)
try:
self._native = OrderedDict()
for index, child in enumerate(self.children):
if child.__class__ == tuple:
child = _build(*child)
self.children[index] = child
try:
name = self._fields[index][0]
except (IndexError):
name = str_cls(index)
self._native[name] = child.native
except (ValueError, TypeError) as e:
self._native = None
args = e.args[1:]
e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
raise e
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
An OrderedDict or None. If an OrderedDict, all child values are
recursively converted to native representation also.
| 3.795248 | 3.588954 | 1.05748 |
super(Sequence, self)._copy(other, copy_func)
if self.children is not None:
self.children = []
for child in other.children:
if child.__class__ == tuple:
self.children.append(child)
else:
self.children.append(child.copy())
|
def _copy(self, other, copy_func)
|
Copies the contents of another Sequence object to itself
:param object:
Another instance of the same class
:param copy_func:
An reference of copy.copy() or copy.deepcopy() to use when copying
lists, dicts and objects
| 3.043188 | 3.101306 | 0.98126 |
if self.children is None:
self._parse_children()
prefix = ' ' * nest_level
_basic_debug(prefix, self)
for field_name in self:
child = self._lazy_child(self._field_map[field_name])
if child is not VOID:
print('%s Field "%s"' % (prefix, field_name))
child.debug(nest_level + 3)
|
def debug(self, nest_level=1)
|
Show the binary data and parsed data in a tree structure
| 5.769238 | 5.798856 | 0.994892 |
if force:
self._set_contents(force=force)
if self._fields and self.children is not None:
for index, (field_name, _, params) in enumerate(self._fields):
if self.children[index] is not VOID:
continue
if 'default' in params or 'optional' in params:
continue
raise ValueError(unwrap(
'''
Field "%s" is missing from structure
''',
field_name
))
return Asn1Value.dump(self)
|
def dump(self, force=False)
|
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
| 6.898635 | 6.239398 | 1.105657 |
if isinstance(value, self._child_spec):
new_value = value
elif issubclass(self._child_spec, Any):
if isinstance(value, Asn1Value):
new_value = value
else:
raise ValueError(unwrap(
'''
Can not set a native python value to %s where the
_child_spec is Any - value must be an instance of Asn1Value
''',
type_name(self)
))
elif issubclass(self._child_spec, Choice):
if not isinstance(value, Asn1Value):
raise ValueError(unwrap(
'''
Can not set a native python value to %s where the
_child_spec is the choice type %s - value must be an
instance of Asn1Value
''',
type_name(self),
self._child_spec.__name__
))
if not isinstance(value, self._child_spec):
wrapper = self._child_spec()
wrapper.validate(value.class_, value.tag, value.contents)
wrapper._parsed = value
value = wrapper
new_value = value
else:
return self._child_spec(value=value)
params = {}
if self._child_spec.explicit:
params['explicit'] = self._child_spec.explicit
if self._child_spec.implicit:
params['implicit'] = (self._child_spec.class_, self._child_spec.tag)
return _fix_tagging(new_value, params)
|
def _make_value(self, value)
|
Constructs a _child_spec value from a native Python data type, or
an appropriate Asn1Value object
:param value:
A native Python value, or some child of Asn1Value
:return:
An object of type _child_spec
| 3.033626 | 2.776863 | 1.092465 |
# We inline this checks to prevent method invocation each time
if self.children is None:
self._parse_children()
self.children.append(self._make_value(value))
if self._native is not None:
self._native.append(self.children[-1].native)
self._mutated = True
|
def append(self, value)
|
Allows adding a child to the end of the sequence
:param value:
Native python datatype that will be passed to _child_spec to create
new child object
| 9.20348 | 8.88241 | 1.036147 |
if self.children is None:
self._parse_children()
contents = BytesIO()
for child in self:
contents.write(child.dump(force=force))
self._contents = contents.getvalue()
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def _set_contents(self, force=False)
|
Encodes all child objects into the contents for this object
:param force:
Ensure all contents are in DER format instead of possibly using
cached BER-encoded data
| 4.478815 | 4.055387 | 1.104411 |
try:
self.children = []
if self._contents is None:
return
contents_length = len(self._contents)
child_pointer = 0
while child_pointer < contents_length:
parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
if self._child_spec:
child = parts + (self._child_spec,)
else:
child = parts
if recurse:
child = _build(*child)
if isinstance(child, (Sequence, SequenceOf)):
child._parse_children(recurse=True)
self.children.append(child)
except (ValueError, TypeError) as e:
self.children = None
args = e.args[1:]
e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
raise e
|
def _parse_children(self, recurse=False)
|
Parses the contents and generates Asn1Value objects based on the
definitions from _child_spec.
:param recurse:
If child objects that are Sequence or SequenceOf objects should
be recursively parsed
:raises:
ValueError - when an error occurs parsing child objects
| 3.422292 | 3.040405 | 1.125604 |
if self.contents is None:
return None
if self._native is None:
if self.children is None:
self._parse_children(recurse=True)
try:
self._native = [child.native for child in self]
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
raise e
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
A list or None. If a list, all child values are recursively
converted to native representation also.
| 4.14322 | 3.7794 | 1.096264 |
if self.children is None:
self._parse_children()
prefix = ' ' * nest_level
_basic_debug(prefix, self)
for child in self:
child.debug(nest_level + 1)
|
def debug(self, nest_level=1)
|
Show the binary data and parsed data in a tree structure
| 5.070299 | 4.943918 | 1.025563 |
if force:
self._set_contents(force=force)
return Asn1Value.dump(self)
|
def dump(self, force=False)
|
Encodes the value using DER
:param force:
If the encoded contents already exist, clear them and regenerate
to ensure they are in DER format instead of BER format
:return:
A byte string of the DER-encoded value
| 13.328537 | 10.83566 | 1.230062 |
cls = self.__class__
if self._contents is None:
if self._fields:
self.children = [VOID] * len(self._fields)
for index, (_, _, params) in enumerate(self._fields):
if 'default' in params:
if cls._precomputed_specs[index]:
field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index]
else:
field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index)
self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None)
return
try:
child_map = {}
contents_length = len(self.contents)
child_pointer = 0
seen_field = 0
while child_pointer < contents_length:
parts, child_pointer = _parse(self.contents, contents_length, pointer=child_pointer)
id_ = (parts[0], parts[2])
field = self._field_ids.get(id_)
if field is None:
raise ValueError(unwrap(
'''
Data for field %s (%s class, %s method, tag %s) does
not match any of the field definitions
''',
seen_field,
CLASS_NUM_TO_NAME_MAP.get(parts[0]),
METHOD_NUM_TO_NAME_MAP.get(parts[1]),
parts[2],
))
_, field_spec, value_spec, field_params, spec_override = (
cls._precomputed_specs[field] or self._determine_spec(field))
if field_spec is None or (spec_override and issubclass(field_spec, Any)):
field_spec = value_spec
spec_override = None
if spec_override:
child = parts + (field_spec, field_params, value_spec)
else:
child = parts + (field_spec, field_params)
if recurse:
child = _build(*child)
if isinstance(child, (Sequence, SequenceOf)):
child._parse_children(recurse=True)
child_map[field] = child
seen_field += 1
total_fields = len(self._fields)
for index in range(0, total_fields):
if index in child_map:
continue
name, field_spec, value_spec, field_params, spec_override = (
cls._precomputed_specs[index] or self._determine_spec(index))
if field_spec is None or (spec_override and issubclass(field_spec, Any)):
field_spec = value_spec
spec_override = None
missing = False
if not field_params:
missing = True
elif 'optional' not in field_params and 'default' not in field_params:
missing = True
elif 'optional' in field_params:
child_map[index] = VOID
elif 'default' in field_params:
child_map[index] = field_spec(**field_params)
if missing:
raise ValueError(unwrap(
'''
Missing required field "%s" from %s
''',
name,
type_name(self)
))
self.children = []
for index in range(0, total_fields):
self.children.append(child_map[index])
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
raise e
|
def _parse_children(self, recurse=False)
|
Parses the contents and generates Asn1Value objects based on the
definitions from _fields.
:param recurse:
If child objects that are Sequence or SequenceOf objects should
be recursively parsed
:raises:
ValueError - when an error occurs parsing child objects
| 2.796246 | 2.736456 | 1.021849 |
if self.children is None:
self._parse_children()
child_tag_encodings = []
for index, child in enumerate(self.children):
child_encoding = child.dump(force=force)
# Skip encoding defaulted children
name, spec, field_params = self._fields[index]
if 'default' in field_params:
if spec(**field_params).dump() == child_encoding:
continue
child_tag_encodings.append((child.tag, child_encoding))
child_tag_encodings.sort(key=lambda ct: ct[0])
self._contents = b''.join([ct[1] for ct in child_tag_encodings])
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def _set_contents(self, force=False)
|
Encodes all child objects into the contents for this object.
This method is overridden because a Set needs to be encoded by
removing defaulted fields and then sorting the fields by tag.
:param force:
Ensure all contents are in DER format instead of possibly using
cached BER-encoded data
| 4.329433 | 3.817703 | 1.134042 |
if self.children is None:
self._parse_children()
child_encodings = []
for child in self:
child_encodings.append(child.dump(force=force))
self._contents = b''.join(sorted(child_encodings))
self._header = None
if self._trailer != b'':
self._trailer = b''
|
def _set_contents(self, force=False)
|
Encodes all child objects into the contents for this object.
This method is overridden because a SetOf needs to be encoded by
sorting the child encodings.
:param force:
Ensure all contents are in DER format instead of possibly using
cached BER-encoded data
| 4.593885 | 3.992104 | 1.150743 |
if self.contents is None:
return None
if self._native is None:
string = str_cls(self)
has_timezone = re.search('[-\\+]', string)
# We don't know what timezone it is in, or it is UTC because of a Z
# suffix, so we just assume UTC
if not has_timezone:
string = string.rstrip('Z')
date = self._date_by_len(string)
self._native = date.replace(tzinfo=timezone.utc)
else:
# Python 2 doesn't support the %z format code, so we have to manually
# process the timezone offset.
date = self._date_by_len(string[0:-5])
hours = int(string[-4:-2])
minutes = int(string[-2:])
delta = timedelta(hours=abs(hours), minutes=minutes)
if hours < 0:
date -= delta
else:
date += delta
self._native = date.replace(tzinfo=timezone.utc)
return self._native
|
def native(self)
|
The native Python datatype representation of this value
:return:
A datetime.datetime object in the UTC timezone or None
| 4.134492 | 3.823604 | 1.081308 |
if isinstance(value, datetime):
value = value.strftime('%y%m%d%H%M%SZ')
if _PY2:
value = value.decode('ascii')
AbstractString.set(self, value)
# Set it to None and let the class take care of converting the next
# time that .native is called
self._native = None
|
def set(self, value)
|
Sets the value of the object
:param value:
A unicode string or a datetime.datetime object
:raises:
ValueError - when an invalid value is passed
| 8.346081 | 8.129295 | 1.026667 |
strlen = len(string)
year_num = int(string[0:2])
if year_num < 50:
prefix = '20'
else:
prefix = '19'
if strlen == 10:
return datetime.strptime(prefix + string, '%Y%m%d%H%M')
if strlen == 12:
return datetime.strptime(prefix + string, '%Y%m%d%H%M%S')
return string
|
def _date_by_len(self, string)
|
Parses a date from a string based on its length
:param string:
A unicode string to parse
:return:
A datetime.datetime object or a unicode string
| 2.418535 | 2.366999 | 1.021773 |
strlen = len(string)
date_format = None
if strlen == 10:
date_format = '%Y%m%d%H'
elif strlen == 12:
date_format = '%Y%m%d%H%M'
elif strlen == 14:
date_format = '%Y%m%d%H%M%S'
elif strlen == 18:
date_format = '%Y%m%d%H%M%S.%f'
if date_format:
if len(string) >= 4 and string[0:4] == '0000':
# Year 2000 shares a calendar with year 0, and is supported natively
t = datetime.strptime('2000' + string[4:], date_format)
return extended_datetime(
0,
t.month,
t.day,
t.hour,
t.minute,
t.second,
t.microsecond,
t.tzinfo
)
return datetime.strptime(string, date_format)
return string
|
def _date_by_len(self, string)
|
Parses a date from a string based on its length
:param string:
A unicode string to parse
:return:
A datetime.datetime object, asn1crypto.util.extended_datetime object or
a unicode string
| 2.445741 | 2.284513 | 1.070574 |
super(CertificateChoices, self).validate(class_, tag, contents)
if self._choice == 2:
if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2':
self._choice = 3
|
def validate(self, class_, tag, contents)
|
Ensures that the class and tag specified exist as an alternative. This
custom version fixes parsing broken encodings there a V2 attribute
# certificate is encoded as a V1
:param class_:
The integer class_ from the encoded value header
:param tag:
The integer tag from the encoded value header
:param contents:
A byte string of the contents of the value - used when the object
is explicitly tagged
:raises:
ValueError - when value is not a valid alternative
| 11.310297 | 11.169267 | 1.012627 |
if inspect.isclass(value):
cls = value
else:
cls = value.__class__
if cls.__module__ in set(['builtins', '__builtin__']):
return cls.__name__
return '%s.%s' % (cls.__module__, cls.__name__)
|
def type_name(value)
|
Returns a user-readable name for the type of an object
:param value:
A value to get the type name of
:return:
A unicode string of the object's type name
| 2.727213 | 2.681622 | 1.017001 |
while len(bytes_) < width:
bytes_ = b'\x00' + bytes_
return bytes_
|
def fill_width(bytes_, width)
|
Ensure a byte string representing a positive integer is a specific width
(in bytes)
:param bytes_:
The integer byte string
:param width:
The desired width as an integer
:return:
A byte string of the width specified
| 3.170955 | 3.935508 | 0.805729 |
if not isinstance(class_, int):
raise TypeError('class_ must be an integer, not %s' % type_name(class_))
if class_ < 0 or class_ > 3:
raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
if not isinstance(method, int):
raise TypeError('method must be an integer, not %s' % type_name(method))
if method < 0 or method > 1:
raise ValueError('method must be 0 or 1, not %s' % method)
if not isinstance(tag, int):
raise TypeError('tag must be an integer, not %s' % type_name(tag))
if tag < 0:
raise ValueError('tag must be greater than zero, not %s' % tag)
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
return _dump_header(class_, method, tag, contents) + contents
|
def emit(class_, method, tag, contents)
|
Constructs a byte string of an ASN.1 DER-encoded value
This is typically not useful. Instead, use one of the standard classes from
asn1crypto.core, or construct a new class with specific fields, and call the
.dump() method.
:param class_:
An integer ASN.1 class value: 0 (universal), 1 (application),
2 (context), 3 (private)
:param method:
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
:param tag:
An integer ASN.1 tag value
:param contents:
A byte string of the encoded byte contents
:return:
A byte string of the ASN.1 DER value (header and contents)
| 1.658932 | 1.471054 | 1.127717 |
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
contents_len = len(contents)
info, consumed = _parse(contents, contents_len)
if strict and consumed != contents_len:
raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed))
return info
|
def parse(contents, strict=False)
|
Parses a byte string of ASN.1 BER/DER-encoded data.
This is typically not useful. Instead, use one of the standard classes from
asn1crypto.core, or construct a new class with specific fields, and call the
.load() class method.
:param contents:
A byte string of BER/DER-encoded data
:param strict:
A boolean indicating if trailing data should be forbidden - if so, a
ValueError will be raised when trailing data exists
:raises:
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
TypeError - when contents is not a byte string
:return:
A 6-element tuple:
- 0: integer class (0 to 3)
- 1: integer method
- 2: integer tag
- 3: byte string header
- 4: byte string content
- 5: byte string trailer
| 4.134 | 3.875047 | 1.066826 |
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
info, consumed = _parse(contents, len(contents))
return consumed
|
def peek(contents)
|
Parses a byte string of ASN.1 BER/DER-encoded data to find the length
This is typically used to look into an encoded value to see how long the
next chunk of ASN.1-encoded data is. Primarily it is useful when a
value is a concatenation of multiple values.
:param contents:
A byte string of BER/DER-encoded data
:raises:
ValueError - when the contents do not contain an ASN.1 header or are truncated in some way
TypeError - when contents is not a byte string
:return:
An integer with the number of bytes occupied by the ASN.1 value
| 6.308819 | 5.883173 | 1.07235 |
if data_len < pointer + 2:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer))
start = pointer
first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
tag = first_octet & 31
# Base 128 length using 8th bit as continuation indicator
if tag == 31:
tag = 0
while True:
num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
tag *= 128
tag += num & 127
if num >> 7 == 0:
break
length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
if length_octet >> 7 == 0:
if lengths_only:
return (pointer, pointer + (length_octet & 127))
contents_end = pointer + (length_octet & 127)
else:
length_octets = length_octet & 127
if length_octets:
pointer += length_octets
contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
if lengths_only:
return (pointer, contents_end)
else:
# To properly parse indefinite length values, we need to scan forward
# parsing headers until we find a value with a length of zero. If we
# just scanned looking for \x00\x00, nested indefinite length values
# would not work.
contents_end = pointer
# Unfortunately we need to understand the contents of the data to
# properly scan forward, which bleeds some representation info into
# the parser. This condition handles the unused bits byte in
# constructed bit strings.
if tag == 3:
contents_end += 1
while contents_end < data_len:
sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True)
if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00':
break
if lengths_only:
return (pointer, contents_end)
if contents_end > data_len:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
return (
(
first_octet >> 6,
(first_octet >> 5) & 1,
tag,
encoded_data[start:pointer],
encoded_data[pointer:contents_end - 2],
b'\x00\x00'
),
contents_end
)
if contents_end > data_len:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
return (
(
first_octet >> 6,
(first_octet >> 5) & 1,
tag,
encoded_data[start:pointer],
encoded_data[pointer:contents_end],
b''
),
contents_end
)
|
def _parse(encoded_data, data_len, pointer=0, lengths_only=False)
|
Parses a byte string into component parts
:param encoded_data:
A byte string that contains BER-encoded data
:param data_len:
The integer length of the encoded data
:param pointer:
The index in the byte string to parse from
:param lengths_only:
A boolean to cause the call to return a 2-element tuple of the integer
number of bytes in the header and the integer number of bytes in the
contents. Internal use only.
:return:
A 2-element tuple:
- 0: A tuple of (class_, method, tag, header, content, trailer)
- 1: An integer indicating how many bytes were consumed
| 2.849576 | 2.759071 | 1.032803 |
header = b''
id_num = 0
id_num |= class_ << 6
id_num |= method << 5
if tag >= 31:
header += chr_cls(id_num | 31)
while tag > 0:
continuation_bit = 0x80 if tag > 0x7F else 0
header += chr_cls(continuation_bit | (tag & 0x7F))
tag = tag >> 7
else:
header += chr_cls(id_num | tag)
length = len(contents)
if length <= 127:
header += chr_cls(length)
else:
length_bytes = int_to_bytes(length)
header += chr_cls(0x80 | len(length_bytes))
header += length_bytes
return header
|
def _dump_header(class_, method, tag, contents)
|
Constructs the header bytes for an ASN.1 object
:param class_:
An integer ASN.1 class value: 0 (universal), 1 (application),
2 (context), 3 (private)
:param method:
An integer ASN.1 method value: 0 (primitive), 1 (constructed)
:param tag:
An integer ASN.1 tag value
:param contents:
A byte string of the encoded byte contents
:return:
A byte string of the ASN.1 DER header
| 2.699469 | 2.397816 | 1.125803 |
if not isinstance(value, str_cls):
raise TypeError(unwrap(
'''
value must be a unicode string, not %s
''',
type_name(value)
))
scheme = None
# Python 2.6 doesn't split properly is the URL doesn't start with http:// or https://
if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'):
real_prefix = None
prefix_match = re.match('^[^:]*://', value)
if prefix_match:
real_prefix = prefix_match.group(0)
value = 'http://' + value[len(real_prefix):]
parsed = urlsplit(value)
if real_prefix:
value = real_prefix + value[7:]
scheme = _urlquote(real_prefix[:-3])
else:
parsed = urlsplit(value)
if scheme is None:
scheme = _urlquote(parsed.scheme)
hostname = parsed.hostname
if hostname is not None:
hostname = hostname.encode('idna')
# RFC 3986 allows userinfo to contain sub-delims
username = _urlquote(parsed.username, safe='!$&\'()*+,;=')
password = _urlquote(parsed.password, safe='!$&\'()*+,;=')
port = parsed.port
if port is not None:
port = str_cls(port).encode('ascii')
netloc = b''
if username is not None:
netloc += username
if password:
netloc += b':' + password
netloc += b'@'
if hostname is not None:
netloc += hostname
if port is not None:
default_http = scheme == b'http' and port == b'80'
default_https = scheme == b'https' and port == b'443'
if not normalize or (not default_http and not default_https):
netloc += b':' + port
# RFC 3986 allows a path to contain sub-delims, plus "@" and ":"
path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:')
# RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?"
query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:')
# RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?"
fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:')
if normalize and query is None and fragment is None and path == b'/':
path = None
# Python 2.7 compat
if path is None:
path = ''
output = urlunsplit((scheme, netloc, path, query, fragment))
if isinstance(output, str_cls):
output = output.encode('latin1')
return output
|
def iri_to_uri(value, normalize=False)
|
Encodes a unicode IRI into an ASCII byte string URI
:param value:
A unicode string of an IRI
:param normalize:
A bool that controls URI normalization
:return:
A byte string of the ASCII-encoded URI
| 2.38949 | 2.414543 | 0.989624 |
if not isinstance(value, byte_cls):
raise TypeError(unwrap(
'''
value must be a byte string, not %s
''',
type_name(value)
))
parsed = urlsplit(value)
scheme = parsed.scheme
if scheme is not None:
scheme = scheme.decode('ascii')
username = _urlunquote(parsed.username, remap=[':', '@'])
password = _urlunquote(parsed.password, remap=[':', '@'])
hostname = parsed.hostname
if hostname:
hostname = hostname.decode('idna')
port = parsed.port
if port and not isinstance(port, int_types):
port = port.decode('ascii')
netloc = ''
if username is not None:
netloc += username
if password:
netloc += ':' + password
netloc += '@'
if hostname is not None:
netloc += hostname
if port is not None:
netloc += ':' + str_cls(port)
path = _urlunquote(parsed.path, remap=['/'], preserve=True)
query = _urlunquote(parsed.query, remap=['&', '='], preserve=True)
fragment = _urlunquote(parsed.fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
|
def uri_to_iri(value)
|
Converts an ASCII URI byte string into a unicode IRI
:param value:
An ASCII-encoded byte string of the URI
:return:
A unicode string of the IRI
| 2.275174 | 2.20262 | 1.03294 |
bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
replacements = ['%%%02x' % num for num in bytes_as_ints]
return (''.join(replacements), exc.end)
|
def _iri_utf8_errors_handler(exc)
|
Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte
sequences encoded in %XX format, but as part of a unicode string.
:param exc:
The UnicodeDecodeError exception
:return:
A 2-element tuple of (replacement unicode string, integer index to
resume at)
| 6.358415 | 4.273042 | 1.48803 |
if string is None or string == '':
return None
# Anything already hex quoted is pulled out of the URL and unquoted if
# possible
escapes = []
if re.search('%[0-9a-fA-F]{2}', string):
# Try to unquote any percent values, restoring them if they are not
# valid UTF-8. Also, requote any safe chars since encoded versions of
# those are functionally different than the unquoted ones.
def _try_unescape(match):
byte_string = unquote_to_bytes(match.group(0))
unicode_string = byte_string.decode('utf-8', 'iriutf8')
for safe_char in list(safe):
unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
return unicode_string
string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
# Once we have the minimal set of hex quoted values, removed them from
# the string so that they are not double quoted
def _extract_escape(match):
escapes.append(match.group(0).encode('ascii'))
return '\x00'
string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
if not isinstance(output, byte_cls):
output = output.encode('ascii')
# Restore the existing quoted values that we extracted
if len(escapes) > 0:
def _return_escape(_):
return escapes.pop(0)
output = re.sub(b'%00', _return_escape, output)
return output
|
def _urlquote(string, safe='')
|
Quotes a unicode string for use in a URL
:param string:
A unicode string
:param safe:
A unicode string of character to not encode
:return:
None (if string is None) or an ASCII byte string of the quoted string
| 4.065702 | 3.966405 | 1.025035 |
if byte_string is None:
return byte_string
if byte_string == b'':
return ''
if preserve:
replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
preserve_unmap = {}
for char in remap:
replacement = replacements.pop(0)
preserve_unmap[replacement] = char
byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
byte_string = unquote_to_bytes(byte_string)
if remap:
for char in remap:
byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
output = byte_string.decode('utf-8', 'iriutf8')
if preserve:
for replacement, original in preserve_unmap.items():
output = output.replace(replacement, original)
return output
|
def _urlunquote(byte_string, remap=None, preserve=None)
|
Unquotes a URI portion from a byte string into unicode using UTF-8
:param byte_string:
A byte string of the data to unquote
:param remap:
A list of characters (as unicode) that should be re-mapped to a
%XX encoding. This is used when characters are not valid in part of a
URL.
:param preserve:
A bool - indicates that the chars to be remapped if they occur in
non-hex form, should be preserved. E.g. / for URL path.
:return:
A unicode string
| 2.497441 | 2.398616 | 1.041201 |
if name != 'teletex':
return None
return codecs.CodecInfo(
name='teletex',
encode=TeletexCodec().encode,
decode=TeletexCodec().decode,
incrementalencoder=TeletexIncrementalEncoder,
incrementaldecoder=TeletexIncrementalDecoder,
streamreader=TeletexStreamReader,
streamwriter=TeletexStreamWriter,
)
|
def teletex_search_function(name)
|
Search function for teletex codec that is passed to codecs.register()
| 1.988684 | 1.75818 | 1.131104 |
parser = argparse.ArgumentParser(
description='draw basic graphs on terminal')
parser.add_argument(
'filename',
nargs='?',
default="-",
help='data file name (comma or space separated). Defaults to stdin.')
parser.add_argument(
'--title',
help='Title of graph'
)
parser.add_argument(
'--width',
type=int,
default=50,
help='width of graph in characters default:50'
)
parser.add_argument(
'--format',
default='{:<5.2f}',
help='format specifier to use.'
)
parser.add_argument(
'--suffix',
default='',
help='string to add as a suffix to all data points.'
)
parser.add_argument(
'--no-labels',
action='store_true',
help='Do not print the label column'
)
parser.add_argument(
'--color',
nargs='*',
choices=AVAILABLE_COLORS,
help='Graph bar color( s )'
)
parser.add_argument(
'--vertical',
action='store_true',
help='Vertical graph'
)
parser.add_argument(
'--stacked',
action='store_true',
help='Stacked bar graph'
)
parser.add_argument(
'--different-scale',
action='store_true',
help='Categories have different scales.'
)
parser.add_argument(
'--calendar',
action='store_true',
help='Calendar Heatmap chart'
)
parser.add_argument(
'--start-dt',
help='Start date for Calendar chart'
)
parser.add_argument(
'--custom-tick',
default='',
help='Custom tick mark, emoji approved'
)
parser.add_argument(
'--delim',
default='',
help='Custom delimiter, default , or space'
)
parser.add_argument(
'--verbose',
action='store_true',
help='Verbose output, helpful for debugging'
)
parser.add_argument(
'--version',
action='store_true',
help='Display version and exit'
)
if len(sys.argv) == 1:
if sys.stdin.isatty():
parser.print_usage()
sys.exit(2)
args = vars(parser.parse_args())
if args['custom_tick'] != '':
global TICK, SM_TICK
TICK = args['custom_tick']
SM_TICK = ''
if args['delim'] != '':
global DELIM
DELIM = args['delim']
return args
|
def init_args()
|
Parse and return the arguments.
| 2.732783 | 2.713027 | 1.007282 |
args = init_args()
if args['version']:
print('termgraph v{}'.format(VERSION))
sys.exit()
_, labels, data, colors = read_data(args)
if args['calendar']:
calendar_heatmap(data, labels, args)
else:
chart(colors, data, args, labels)
|
def main()
|
Main function.
| 6.476211 | 6.362117 | 1.017933 |
length = 0
for i in range(len(labels)):
if len(labels[i]) > length:
length = len(labels[i])
return length
|
def find_max_label_length(labels)
|
Return the maximum length for the labels.
| 2.099309 | 1.86087 | 1.128133 |
min_dat = find_min(data)
# We offset by the minimum if there's a negative.
off_data = []
if min_dat < 0:
min_dat = abs(min_dat)
for dat in data:
off_data.append([_d + min_dat for _d in dat])
else:
off_data = data
min_dat = find_min(off_data)
max_dat = find_max(off_data)
if max_dat < width:
# Don't need to normalize if the max value
# is less than the width we allow.
return off_data
# max_dat / width is the value for a single tick. norm_factor is the
# inverse of this value
# If you divide a number to the value of single tick, you will find how
# many ticks it does contain basically.
norm_factor = width / float(max_dat)
normal_dat = []
for dat in off_data:
normal_dat.append([_v * norm_factor for _v in dat])
return normal_dat
|
def normalize(data, width)
|
Normalize the data and return it.
| 4.483782 | 4.511772 | 0.993796 |
val_min = find_min(data)
for i in range(len(labels)):
if args['no_labels']:
# Hide the labels.
label = ''
else:
label = "{:<{x}}: ".format(labels[i],
x=find_max_label_length(labels))
values = data[i]
num_blocks = normal_dat[i]
for j in range(len(values)):
# In Multiple series graph 1st category has label at the beginning,
# whereas the rest categories have only spaces.
if j > 0:
len_label = len(label)
label = ' ' * len_label
tail = ' {}{}'.format(args['format'].format(values[j]),
args['suffix'])
if colors:
color = colors[j]
else:
color = None
if not args['vertical']:
print(label, end="")
yield(values[j], int(num_blocks[j]), val_min, color)
if not args['vertical']:
print(tail)
|
def horiz_rows(labels, data, normal_dat, args, colors)
|
Prepare the horizontal graph.
Each row is printed through the print_row function.
| 5.249706 | 5.238721 | 1.002097 |
if color:
sys.stdout.write(f'\033[{color}m') # Start to write colorized.
if num_blocks < 1 and (value > val_min or value > 0):
# Print something if it's not the smallest
# and the normal value is less than one.
sys.stdout.write(SM_TICK)
else:
for _ in range(num_blocks):
sys.stdout.write(TICK)
if color:
sys.stdout.write('\033[0m')
|
def print_row(value, num_blocks, val_min, color)
|
A method to print a row for a horizontal graphs.
i.e:
1: ▇▇ 2
2: ▇▇▇ 3
3: ▇▇▇▇ 4
| 4.797872 | 5.303768 | 0.904616 |
val_min = find_min(data)
for i in range(len(labels)):
if args['no_labels']:
# Hide the labels.
label = ''
else:
label = "{:<{x}}: ".format(labels[i],
x=find_max_label_length(labels))
print(label, end="")
values = data[i]
num_blocks = normal_data[i]
for j in range(len(values)):
print_row(values[j], int(num_blocks[j]), val_min, colors[j])
tail = ' {}{}'.format(args['format'].format(sum(values)),
args['suffix'])
print(tail)
|
def stacked_graph(labels, data, normal_data, len_categories, args, colors)
|
Prepare the horizontal stacked graph.
Each row is printed through the print_row function.
| 4.839279 | 4.577848 | 1.057108 |
global maxi, value_list
value_list.append(str(value))
# In case the number of blocks at the end of the normalization is less
# than the default number, use the maxi variable to escape.
if maxi < num_blocks:
maxi = num_blocks
if num_blocks > 0:
vertical_list.append((TICK * num_blocks))
else:
vertical_list.append(SM_TICK)
# Zip_longest method in order to turn them vertically.
for row in zip_longest(*vertical_list, fillvalue=' '):
zipped_list.append(row)
counter, result_list = 0, []
# Combined with the maxi variable, escapes the appending method at
# the correct point or the default one (width).
for i in reversed(zipped_list):
result_list.append(i)
counter += 1
if maxi == args['width']:
if counter == (args['width']):
break
else:
if counter == maxi:
break
# Return a list of rows which will be used to print the result vertically.
return result_list
|
def vertically(value, num_blocks, val_min, color, args)
|
Prepare the vertical graph.
The whole graph is printed through the print_vertical function.
| 6.855279 | 6.885273 | 0.995644 |
if color:
sys.stdout.write(f'\033[{color}m') # Start to write colorized.
for row in vertical_rows:
print(*row)
sys.stdout.write('\033[0m') # End of printing colored
print("-" * len(row) + "Values" + "-" * len(row))
# Print Values
for value in zip_longest(*value_list, fillvalue=' '):
print(" ".join(value))
if args['no_labels'] == False:
print("-" * len(row) + "Labels" + "-" * len(row))
# Print Labels
for label in zip_longest(*labels, fillvalue=''):
print(" ".join(label))
|
def print_vertical(vertical_rows, labels, color, args)
|
Print the whole vertical graph.
| 3.519219 | 3.449762 | 1.020134 |
len_categories = len(data[0])
if len_categories > 1:
# Stacked graph
if args['stacked']:
normal_dat = normalize(data, args['width'])
stacked_graph(labels, data, normal_dat, len_categories,
args, colors)
return
if not colors:
colors = [None] * len_categories
# Multiple series graph with different scales
# Normalization per category
if args['different_scale']:
for i in range(len_categories):
cat_data = []
for dat in data:
cat_data.append([dat[i]])
# Normalize data, handle negatives.
normal_cat_data = normalize(cat_data, args['width'])
# Generate data for a row.
for row in horiz_rows(labels, cat_data, normal_cat_data,
args, [colors[i]]):
# Print the row
if not args['vertical']:
print_row(*row)
else:
vertic = vertically(*row, args=args)
# Vertical graph
if args['vertical']:
print_vertical(vertic, labels, colors[i], args)
print()
value_list.clear(), zipped_list.clear(), vertical_list.clear()
return
# One category/Multiple series graph with same scale
# All-together normalization
if not args['stacked']:
normal_dat = normalize(data, args['width'])
for row in horiz_rows(labels, data, normal_dat, args, colors):
if not args['vertical']:
print_row(*row)
else:
vertic = vertically(*row, args=args)
if args['vertical'] and len_categories == 1:
if colors:
color = colors[0]
else:
color = None
print_vertical(vertic, labels, color, args)
print()
|
def chart(colors, data, args, labels)
|
Handle the normalization of data and the printing of the graph.
| 3.871169 | 3.742415 | 1.034404 |
len_categories = len(data[0])
# Check that there are data for all labels.
if len(labels) != len(data):
print(">> Error: Label and data array sizes don't match")
sys.exit(1)
# Check that there are data for all categories per label.
for dat in data:
if len(dat) != len_categories:
print(">> Error: There are missing values")
sys.exit(1)
colors = []
# If user inserts colors, they should be as many as the categories.
if args['color'] is not None:
if len(args['color']) != len_categories:
print(">> Error: Color and category array sizes don't match")
sys.exit(1)
for color in args['color']:
colors.append(AVAILABLE_COLORS.get(color))
# Vertical graph for multiple series of same scale is not supported yet.
if args['vertical'] and len_categories > 1 and not args['different_scale']:
print(">> Error: Vertical graph for multiple series of same "
"scale is not supported yet.")
sys.exit(1)
# If user hasn't inserted colors, pick the first n colors
# from the dict (n = number of categories).
if args['stacked'] and not colors:
colors = [v for v in list(AVAILABLE_COLORS.values())[:len_categories]]
return colors
|
def check_data(labels, data, args)
|
Check that all data were inserted correctly. Return the colors.
| 3.74248 | 3.540979 | 1.056905 |
for i in range(len(categories)):
if colors:
sys.stdout.write(f'\033[{colors[i]}m') # Start to write colorized.
sys.stdout.write(TICK + ' ' + categories[i] + ' ')
if colors:
sys.stdout.write('\033[0m') # Back to original.
print('\n\n')
|
def print_categories(categories, colors)
|
Print a tick and the category's name for each category above
the graph.
| 4.148755 | 4.158222 | 0.997723 |
filename = args['filename']
stdin = filename == '-'
if args['verbose']:
print(f'>> Reading data from {( "stdin" if stdin else filename )}')
print('')
if args['title']:
print('# ' + args['title'] + '\n')
categories, labels, data, colors = ([] for i in range(4))
f = sys.stdin if stdin else open(filename, "r")
for line in f:
line = line.strip()
if line:
if not line.startswith('#'):
if line.find(DELIM) > 0:
cols = line.split(DELIM)
else:
cols = line.split()
# Line contains categories.
if line.startswith('@'):
cols[0] = cols[0].replace("@ ", "")
categories = cols
# Line contains label and values.
else:
labels.append(cols[0].strip())
data_points = []
for i in range(1, len(cols)):
data_points.append(float(cols[i].strip()))
data.append(data_points)
f.close()
# Check that all data are valid. (i.e. There are no missing values.)
colors = check_data(labels, data, args)
if categories:
# Print categories' names above the graph.
print_categories(categories, colors)
return categories, labels, data, colors
|
def read_data(args)
|
Read data from a file or stdin and returns it.
Filename includes (categories), labels and data.
We append categories and labels to lists.
Data are inserted to a list of lists due to the categories.
i.e.
labels = ['2001', '2002', '2003', ...]
categories = ['boys', 'girls']
data = [ [20.4, 40.5], [30.7, 100.0], ...]
| 3.653851 | 3.512559 | 1.040225 |
if args['color']:
colornum = AVAILABLE_COLORS.get(args['color'][0])
else:
colornum = AVAILABLE_COLORS.get('blue')
dt_dict = {}
for i in range(len(labels)):
dt_dict[labels[i]] = data[i][0]
# get max value
max_val = float(max(data)[0])
tick_1 = "░"
tick_2 = "▒"
tick_3 = "▓"
tick_4 = "█"
if args['custom_tick']:
tick_1 = tick_2 = tick_3 = tick_4 = args['custom_tick']
# check if start day set, otherwise use one year ago
if args['start_dt']:
start_dt = datetime.strptime(args['start_dt'], '%Y-%m-%d')
else:
start = datetime.now()
start_dt = datetime(year=start.year-1, month=start.month,
day=start.day)
# modify start date to be a Monday, subtract weekday() from day
start_dt = start_dt - timedelta(start_dt.weekday())
# TODO: legend doesn't line up properly for all start dates/data
# top legend for months
sys.stdout.write(" ")
for month in range(13):
month_dt = datetime(year=start_dt.year, month=start_dt.month, day=1) +\
timedelta(days=month*31)
sys.stdout.write(month_dt.strftime("%b") + " ")
if args['custom_tick']: #assume custom tick is emoji which is one wider
sys.stdout.write(" ")
sys.stdout.write('\n')
for day in range(7):
sys.stdout.write(DAYS[day] + ': ')
for week in range(53):
day_ = start_dt + timedelta(days=day + week*7)
day_str = day_.strftime("%Y-%m-%d")
if day_str in dt_dict:
if dt_dict[day_str] > max_val * 0.75:
tick = tick_4
elif dt_dict[day_str] > max_val * 0.50:
tick = tick_3
elif dt_dict[day_str] > max_val * 0.25:
tick = tick_2
else:
tick = tick_1
else:
tick = ' '
if colornum:
sys.stdout.write(f'\033[{colornum}m')
sys.stdout.write(tick)
if colornum:
sys.stdout.write('\033[0m')
sys.stdout.write('\n')
|
def calendar_heatmap(data, labels, args)
|
Print a calendar heatmap.
| 2.760209 | 2.743207 | 1.006198 |
'Returns a (uid, gid, pid) tuple'
ctxp = _libfuse.fuse_get_context()
ctx = ctxp.contents
return ctx.uid, ctx.gid, ctx.pid
|
def fuse_get_context()
|
Returns a (uid, gid, pid) tuple
| 7.326336 | 5.05 | 1.45076 |
'''
This will shutdown the FUSE mount and cause the call to FUSE(...) to
return, similar to sending SIGINT to the process.
Flags the native FUSE session as terminated and will cause any running FUSE
event loops to exit on the next opportunity. (see fuse.c::fuse_exit)
'''
fuse_ptr = ctypes.c_void_p(_libfuse.fuse_get_context().contents.fuse)
_libfuse.fuse_exit(fuse_ptr)
|
def fuse_exit()
|
This will shutdown the FUSE mount and cause the call to FUSE(...) to
return, similar to sending SIGINT to the process.
Flags the native FUSE session as terminated and will cause any running FUSE
event loops to exit on the next opportunity. (see fuse.c::fuse_exit)
| 10.443046 | 2.046892 | 5.101904 |
'Decorator for the methods that follow'
try:
if func.__name__ == "init":
# init may not fail, as its return code is just stored as
# private_data field of struct fuse_context
return func(*args, **kwargs) or 0
else:
try:
return func(*args, **kwargs) or 0
except OSError as e:
if e.errno > 0:
log.debug(
"FUSE operation %s raised a %s, returning errno %s.",
func.__name__, type(e), e.errno, exc_info=True)
return -e.errno
else:
log.error(
"FUSE operation %s raised an OSError with negative "
"errno %s, returning errno.EINVAL.",
func.__name__, e.errno, exc_info=True)
return -errno.EINVAL
except Exception:
log.error("Uncaught exception from FUSE operation %s, "
"returning errno.EINVAL.",
func.__name__, exc_info=True)
return -errno.EINVAL
except BaseException as e:
self.__critical_exception = e
log.critical(
"Uncaught critical exception from FUSE operation %s, aborting.",
func.__name__, exc_info=True)
# the raised exception (even SystemExit) will be caught by FUSE
# potentially causing SIGSEGV, so tell system to stop/interrupt FUSE
fuse_exit()
return -errno.EFAULT
|
def _wrapper(func, *args, **kwargs)
|
Decorator for the methods that follow
| 4.80938 | 4.572 | 1.05192 |
'creates a symlink `target -> source` (e.g. ln -s source target)'
return self.operations('symlink', target.decode(self.encoding),
source.decode(self.encoding))
|
def symlink(self, source, target)
|
creates a symlink `target -> source` (e.g. ln -s source target)
| 11.505744 | 6.742435 | 1.706467 |
'creates a hard link `target -> source` (e.g. ln source target)'
return self.operations('link', target.decode(self.encoding),
source.decode(self.encoding))
|
def link(self, source, target)
|
creates a hard link `target -> source` (e.g. ln source target)
| 14.637341 | 6.752306 | 2.167754 |
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incompatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(errno.ENOENT)
return dict(st_mode=(S_IFDIR | 0o755), st_nlink=2)
|
def getattr(self, path, fh=None)
|
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incompatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
| 6.130605 | 1.689602 | 3.628433 |
self.reply_err(req, errno.ENOENT)
|
def lookup(self, req, parent, name)
|
Look up a directory entry by name and get its attributes.
Valid replies:
reply_entry
reply_err
| 16.739452 | 12.012486 | 1.393504 |
if ino == 1:
attr = {'st_ino': 1, 'st_mode': S_IFDIR | 0o755, 'st_nlink': 2}
self.reply_attr(req, attr, 1.0)
else:
self.reply_err(req, errno.ENOENT)
|
def getattr(self, req, ino, fi)
|
Get file attributes
Valid replies:
reply_attr
reply_err
| 3.147633 | 3.012395 | 1.044894 |
self.reply_err(req, errno.EROFS)
|
def setattr(self, req, ino, attr, to_set, fi)
|
Set file attributes
Valid replies:
reply_attr
reply_err
| 12.274911 | 10.180473 | 1.205731 |
self.reply_err(req, errno.EROFS)
|
def mknod(self, req, parent, name, mode, rdev)
|
Create file node
Valid replies:
reply_entry
reply_err
| 10.541024 | 13.177213 | 0.799943 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.