code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
if (cls._storage_media_image_remainder_list is None or
cls._storage_media_image_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_STORAGE_MEDIA_IMAGE)
cls._storage_media_image_remainder_list = remainder_list
cls._storage_media_image_store = specification_store
if cls._storage_media_image_scanner is None:
cls._storage_media_image_scanner = cls._GetSignatureScanner(
cls._storage_media_image_store)
return cls._GetTypeIndicators(
cls._storage_media_image_scanner, cls._storage_media_image_store,
cls._storage_media_image_remainder_list, path_spec,
resolver_context=resolver_context) | def GetStorageMediaImageTypeIndicators(cls, path_spec, resolver_context=None) | Determines if a file contains a supported storage media image types.
Args:
path_spec (PathSpec): path specification.
resolver_context (Optional[Context]): resolver context, where None
represents the built-in context which is not multi process safe.
Returns:
list[str]: supported format type indicators. | 2.708931 | 2.906393 | 0.932059 |
if (cls._volume_system_remainder_list is None or
cls._volume_system_store is None):
specification_store, remainder_list = cls._GetSpecificationStore(
definitions.FORMAT_CATEGORY_VOLUME_SYSTEM)
cls._volume_system_remainder_list = remainder_list
cls._volume_system_store = specification_store
if cls._volume_system_scanner is None:
cls._volume_system_scanner = cls._GetSignatureScanner(
cls._volume_system_store)
return cls._GetTypeIndicators(
cls._volume_system_scanner, cls._volume_system_store,
cls._volume_system_remainder_list, path_spec,
resolver_context=resolver_context) | def GetVolumeSystemTypeIndicators(cls, path_spec, resolver_context=None) | Determines if a file contains a supported volume system types.
Args:
path_spec (PathSpec): path specification.
resolver_context (Optional[Context]): resolver context, where None
represents the built-in context which is not multi process safe.
Returns:
list[str]: supported format type indicators. | 3.101903 | 3.342274 | 0.928082 |
try:
uncompressed_data = self._zlib_decompressor.decompress(compressed_data)
remaining_compressed_data = getattr(
self._zlib_decompressor, 'unused_data', b'')
except zlib.error as exception:
raise errors.BackEndError((
'Unable to decompress zlib compressed stream with error: '
'{0!s}.').format(exception))
return uncompressed_data, remaining_compressed_data | def Decompress(self, compressed_data) | Decompresses the compressed data.
Args:
compressed_data (bytes): compressed data.
Returns:
tuple(bytes, bytes): uncompressed data and remaining compressed data.
Raises:
BackEndError: if the zlib compressed stream cannot be decompressed. | 3.415501 | 2.427465 | 1.407023 |
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
try:
tsk_image_object = tsk_image.TSKFileSystemImage(file_object)
tsk_file_system = pytsk3.FS_Info(tsk_image_object)
except:
file_object.close()
raise
self._file_object = file_object
self._tsk_file_system = tsk_file_system | def _Open(self, path_spec, mode='rb') | Opens the file system object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 2.120769 | 2.229355 | 0.951293 |
# Opening a file by inode number is faster than opening a file by location.
tsk_file = None
inode = getattr(path_spec, 'inode', None)
location = getattr(path_spec, 'location', None)
try:
if inode is not None:
tsk_file = self._tsk_file_system.open_meta(inode=inode)
elif location is not None:
tsk_file = self._tsk_file_system.open(location)
except IOError:
pass
return tsk_file is not None | def FileEntryExistsByPathSpec(self, path_spec) | Determines if a file entry for a path specification exists.
Args:
path_spec (PathSpec): path specification.
Returns:
bool: True if the file entry exists. | 3.108978 | 3.371929 | 0.922018 |
# Opening a file by inode number is faster than opening a file by location.
tsk_file = None
inode = getattr(path_spec, 'inode', None)
location = getattr(path_spec, 'location', None)
root_inode = self.GetRootInode()
if (location == self.LOCATION_ROOT or
(inode is not None and root_inode is not None and inode == root_inode)):
tsk_file = self._tsk_file_system.open(self.LOCATION_ROOT)
return tsk_file_entry.TSKFileEntry(
self._resolver_context, self, path_spec, tsk_file=tsk_file,
is_root=True)
try:
if inode is not None:
tsk_file = self._tsk_file_system.open_meta(inode=inode)
elif location is not None:
tsk_file = self._tsk_file_system.open(location)
except IOError:
pass
if tsk_file is None:
return None
# TODO: is there a way to determine the parent inode number here?
return tsk_file_entry.TSKFileEntry(
self._resolver_context, self, path_spec, tsk_file=tsk_file) | def GetFileEntryByPathSpec(self, path_spec) | Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
TSKFileEntry: a file entry or None if not available. | 2.392823 | 2.424213 | 0.987051 |
if self._tsk_fs_type is None:
self._tsk_fs_type = pytsk3.TSK_FS_TYPE_UNSUPP
if (not self._tsk_file_system or
not hasattr(self._tsk_file_system, 'info')):
return self._tsk_fs_type
self._tsk_fs_type = getattr(
self._tsk_file_system.info, 'ftype', pytsk3.TSK_FS_TYPE_UNSUPP)
return self._tsk_fs_type | def GetFsType(self) | Retrieves the file system type.
Returns:
pytsk3.TSK_FS_TYPE_ENUM: file system type. | 2.51551 | 2.192013 | 1.14758 |
kwargs = {}
root_inode = self.GetRootInode()
if root_inode is not None:
kwargs['inode'] = root_inode
kwargs['location'] = self.LOCATION_ROOT
kwargs['parent'] = self._path_spec.parent
path_spec = tsk_path_spec.TSKPathSpec(**kwargs)
return self.GetFileEntryByPathSpec(path_spec) | def GetRootFileEntry(self) | Retrieves the root file entry.
Returns:
TSKFileEntry: a file entry. | 3.358673 | 3.043639 | 1.103506 |
# Opening a file by inode number is faster than opening a file
# by location.
inode = getattr(path_spec, 'inode', None)
location = getattr(path_spec, 'location', None)
if inode is not None:
tsk_file = self._tsk_file_system.open_meta(inode=inode)
elif location is not None:
tsk_file = self._tsk_file_system.open(location)
else:
raise errors.PathSpecError(
'Path specification missing inode and location.')
return tsk_file | def GetTSKFileByPathSpec(self, path_spec) | Retrieves the SleuthKit file object for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
pytsk3.File: TSK file.
Raises:
PathSpecError: if the path specification is missing inode and location. | 2.97542 | 2.809793 | 1.058946 |
tsk_fs_type = self.GetFsType()
return tsk_fs_type in [
pytsk3.TSK_FS_TYPE_HFS, pytsk3.TSK_FS_TYPE_HFS_DETECT] | def IsHFS(self) | Determines if the file system is HFS, HFS+ or HFSX.
Returns:
bool: True if the file system is HFS. | 3.937999 | 4.027835 | 0.977696 |
tsk_fs_type = self.GetFsType()
return tsk_fs_type in [
pytsk3.TSK_FS_TYPE_NTFS, pytsk3.TSK_FS_TYPE_NTFS_DETECT] | def IsNTFS(self) | Determines if the file system is NTFS.
Returns:
bool: True if the file system is NTFS. | 3.552807 | 3.537721 | 1.004264 |
# pylint: disable=protected-access
super(EWFFile, self)._Close()
for file_object in self._file_objects:
file_object.close()
self._file_objects = [] | def _Close(self) | Closes the file-like object. | 4.93883 | 4.135974 | 1.194115 |
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
parent_path_spec = path_spec.parent
file_system = resolver.Resolver.OpenFileSystem(
parent_path_spec, resolver_context=self._resolver_context)
# Note that we cannot use pyewf's glob function since it does not
# handle the file system abstraction dfvfs provides.
segment_file_path_specs = ewf.EWFGlobPathSpec(file_system, path_spec)
if not segment_file_path_specs:
return None
if parent_path_spec.IsSystemLevel():
# Typically the file-like object cache should have room for 127 items.
self._resolver_context.SetMaximumNumberOfFileObjects(
len(segment_file_path_specs) + 127)
for segment_file_path_spec in segment_file_path_specs:
file_object = resolver.Resolver.OpenFileObject(
segment_file_path_spec, resolver_context=self._resolver_context)
self._file_objects.append(file_object)
ewf_handle = pyewf.handle()
ewf_handle.open_file_objects(self._file_objects)
return ewf_handle | def _OpenFileObject(self, path_spec) | Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
pyewf.handle: a file-like object or None.
Raises:
PathSpecError: if the path specification is invalid. | 3.428213 | 3.156809 | 1.085974 |
if not path_spec:
raise ValueError('Missing path specification.')
if path_spec.HasParent():
raise errors.PathSpecError('Unsupported path specification with parent.')
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
# Windows does not support running os.stat on device files so we use
# libsmdev to do an initial check.
try:
is_device = pysmdev.check_device(location)
except IOError as exception:
# Since os.stat() will not recognize Windows device file names and
# will return '[Error 87] The parameter is incorrect' we check here
# if pysmdev exception message contains ' access denied ' and raise
# AccessError instead.
# Note that exception.message no longer works in Python 3.
exception_string = str(exception)
if not isinstance(exception_string, py2to3.UNICODE_TYPE):
exception_string = py2to3.UNICODE_TYPE(
exception_string, errors='replace')
if ' access denied ' in exception_string:
raise errors.AccessError(
'Access denied to file: {0:s} with error: {1!s}'.format(
location, exception_string))
is_device = False
if not is_device:
try:
stat_info = os.stat(location)
except OSError as exception:
raise IOError('Unable to open file with error: {0!s}.'.format(
exception))
# In case the libsmdev check is not able to detect the device also use
# the stat information.
if stat.S_ISCHR(stat_info.st_mode) or stat.S_ISBLK(stat_info.st_mode):
is_device = True
if is_device:
self._file_object = pysmdev.handle()
self._file_object.open(location, mode=mode)
self._size = self._file_object.media_size
else:
self._file_object = open(location, mode=mode)
self._size = stat_info.st_size | def _Open(self, path_spec=None, mode='rb') | Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 3.200272 | 3.161696 | 1.012201 |
if not self._is_open:
raise IOError('Not opened.')
if size is None:
size = self._size - self._file_object.tell()
return self._file_object.read(size) | def read(self, size=None) | Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed. | 3.70846 | 3.883105 | 0.955024 |
if not self._is_open:
raise IOError('Not opened.')
# For a yet unknown reason a Python file-like object on Windows allows for
# invalid whence values to be passed to the seek function. This check
# makes sure the behavior of the function is the same on all platforms.
if whence not in [os.SEEK_SET, os.SEEK_CUR, os.SEEK_END]:
raise IOError('Unsupported whence.')
self._file_object.seek(offset, whence) | def seek(self, offset, whence=os.SEEK_SET) | Seeks to an offset within the file-like object.
Args:
offset (int): offset to seek to.
whence (Optional(int)): value that indicates whether offset is an absolute
or relative position within the file.
Raises:
IOError: if the seek failed.
OSError: if the seek failed. | 5.051412 | 4.906955 | 1.029439 |
try:
uncompressed_data = self._bz2_decompressor.decompress(compressed_data)
remaining_compressed_data = getattr(
self._bz2_decompressor, 'unused_data', b'')
except (EOFError, IOError) as exception:
raise errors.BackEndError((
'Unable to decompress BZIP2 compressed stream with error: '
'{0!s}.').format(exception))
return uncompressed_data, remaining_compressed_data | def Decompress(self, compressed_data) | Decompresses the compressed data.
Args:
compressed_data (bytes): compressed data.
Returns:
tuple(bytes, bytes): uncompressed data and remaining compressed data.
Raises:
BackEndError: if the BZIP2 compressed stream cannot be decompressed. | 3.526127 | 2.41111 | 1.46245 |
decrypted_data = self._rc4_cipher.decrypt(encrypted_data)
return decrypted_data, b'' | def Decrypt(self, encrypted_data) | Decrypts the encrypted data.
Args:
encrypted_data (bytes): encrypted data.
Returns:
tuple[bytes,bytes]: decrypted data and remaining encrypted data. | 7.640221 | 7.816691 | 0.977424 |
if self._connection:
self._cursor = None
self._connection.close()
self._connection = None
# TODO: move this to a central temp file manager and have it track errors.
# https://github.com/log2timeline/dfvfs/issues/92
try:
os.remove(self._temp_file_path)
except (IOError, OSError):
pass
self._temp_file_path = '' | def Close(self) | Closes the database file object.
Raises:
IOError: if the close failed.
OSError: if the close failed. | 4.996913 | 4.53584 | 1.101651 |
if not self._connection:
raise IOError('Not opened.')
self._cursor.execute(self._NUMBER_OF_ROWS_QUERY.format(table_name))
row = self._cursor.fetchone()
if not row:
raise IOError(
'Unable to retrieve number of rows of table: {0:s}'.format(
table_name))
number_of_rows = row[0]
if isinstance(number_of_rows, py2to3.STRING_TYPES):
try:
number_of_rows = int(number_of_rows, 10)
except ValueError as exception:
raise IOError((
'Unable to determine number of rows of table: {0:s} '
'with error: {1!s}').format(table_name, exception))
return number_of_rows | def GetNumberOfRows(self, table_name) | Retrieves the number of rows in the table.
Args:
table_name (str): name of the table.
Returns:
int: number of rows.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened. | 2.104744 | 1.893804 | 1.111384 |
if not self._connection:
raise IOError('Not opened.')
if not column_name:
return False
table_name = table_name.lower()
column_names = self._column_names_per_table.get(table_name, None)
if column_names is None:
column_names = []
self._cursor.execute(self._HAS_COLUMN_QUERY.format(table_name))
for row in self._cursor.fetchall():
if not row[1]:
continue
row_column_name = row[1]
if isinstance(row_column_name, bytes):
row_column_name = row_column_name.decode('utf-8')
column_names.append(row_column_name.lower())
self._column_names_per_table[table_name] = column_names
column_name = column_name.lower()
return column_name in column_names | def HasColumn(self, table_name, column_name) | Determines if a specific column exists.
Args:
table_name (str): name of the table.
column_name (str): name of the column.
Returns:
bool: True if the column exists.
Raises:
IOError: if the database file is not opened.
OSError: if the database file is not opened. | 2.115135 | 2.06858 | 1.022506 |
if not self._connection:
raise IOError('Not opened.')
if not table_name:
return False
if self._table_names is None:
self._table_names = []
self._cursor.execute(self._HAS_TABLE_QUERY)
for row in self._cursor.fetchall():
if not row[0]:
continue
row_table_name = row[0]
if isinstance(row_table_name, bytes):
row_table_name = row_table_name.decode('utf-8')
self._table_names.append(row_table_name.lower())
table_name = table_name.lower()
return table_name in self._table_names | def HasTable(self, table_name) | Determines if a specific table exists.
Args:
table_name (str): name of the table.
Returns:
bool: True if the column exists.
Raises:
IOError: if the database file is not opened.
OSError: if the database file is not opened. | 2.277976 | 2.187695 | 1.041267 |
if not file_object:
raise ValueError('Missing file-like object.')
# Since pysqlite3 does not provide an exclusive read-only mode and
# cannot interact with a file-like object directly we make a temporary
# copy. Before making a copy we check the header signature.
file_object.seek(0, os.SEEK_SET)
data = file_object.read(len(self._HEADER_SIGNATURE))
if data != self._HEADER_SIGNATURE:
file_object.close()
raise IOError('Unsupported SQLite database signature.')
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
self._temp_file_path = temp_file.name
while data:
temp_file.write(data)
data = file_object.read(self._COPY_BUFFER_SIZE)
self._connection = sqlite3.connect(self._temp_file_path)
self._connection.text_factory = bytes
self._cursor = self._connection.cursor() | def Open(self, file_object) | Opens the database file object.
Args:
file_object (FileIO): file-like object.
Raises:
IOError: if the SQLite database signature does not match.
OSError: if the SQLite database signature does not match.
ValueError: if the file-like object is invalid. | 3.284811 | 3.01509 | 1.089457 |
# TODO: catch Warning and return None.
# Note that we cannot pass parameters as a keyword argument here.
# A parameters value of None is not supported.
if parameters:
self._cursor.execute(query, parameters)
else:
self._cursor.execute(query)
return self._cursor.fetchall() | def Query(self, query, parameters=None) | Queries the database file.
Args:
query (str): SQL query.
parameters (Optional[dict|tuple]): query parameters.
Returns:
list[sqlite3.Row]: rows resulting from the query. | 6.106789 | 6.107788 | 0.999837 |
string_parts = []
string_parts.append(getattr(path_spec.parent, 'comparable', ''))
string_parts.append('type: {0:s}'.format(path_spec.type_indicator))
return ''.join(string_parts) | def _GetFileSystemCacheIdentifier(self, path_spec) | Determines the file system cache identifier for the path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
str: identifier of the VFS object. | 3.440039 | 4.13195 | 0.832546 |
self._file_object_cache.CacheObject(path_spec.comparable, file_object) | def CacheFileObject(self, path_spec, file_object) | Caches a file-like object based on a path specification.
Args:
path_spec (PathSpec): path specification.
file_object (FileIO): file-like object. | 5.33009 | 6.713336 | 0.793955 |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
self._file_system_cache.CacheObject(identifier, file_system) | def CacheFileSystem(self, path_spec, file_system) | Caches a file system object based on a path specification.
Args:
path_spec (PathSpec): path specification.
file_system (FileSystem): file system object. | 6.604201 | 8.14401 | 0.810927 |
cache_value = self._file_object_cache.GetCacheValue(path_spec.comparable)
if not cache_value:
return False
while not cache_value.IsDereferenced():
cache_value.vfs_object.close()
return True | def ForceRemoveFileObject(self, path_spec) | Forces the removal of a file-like object based on a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
bool: True if the file-like object was cached. | 6.194344 | 6.554058 | 0.945116 |
cache_value = self._file_object_cache.GetCacheValue(path_spec.comparable)
if not cache_value:
return None
return cache_value.reference_count | def GetFileObjectReferenceCount(self, path_spec) | Retrieves the reference count of a cached file-like object.
Args:
path_spec (PathSpec): path specification.
Returns:
int: reference count or None if there is no file-like object for
the corresponding path specification cached. | 4.241524 | 4.307525 | 0.984678 |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
return self._file_system_cache.GetObject(identifier) | def GetFileSystem(self, path_spec) | Retrieves a file system object defined by path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
FileSystem: a file system object or None if not cached. | 7.254837 | 8.194967 | 0.88528 |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
cache_value = self._file_system_cache.GetCacheValue(identifier)
if not cache_value:
return None
return cache_value.reference_count | def GetFileSystemReferenceCount(self, path_spec) | Retrieves the reference count of a cached file system object.
Args:
path_spec (PathSpec): path specification.
Returns:
int: reference count or None if there is no file system object for
the corresponding path specification cached. | 3.735253 | 3.57036 | 1.046184 |
identifier = self._GetFileSystemCacheIdentifier(path_spec)
self._file_system_cache.GrabObject(identifier) | def GrabFileSystem(self, path_spec) | Grabs a cached file system object defined by path specification.
Args:
path_spec (PathSpec): path specification. | 9.715619 | 12.504594 | 0.776964 |
identifier, cache_value = self._file_object_cache.GetCacheValueByObject(
file_object)
if not identifier:
raise RuntimeError('Object not cached.')
if not cache_value:
raise RuntimeError('Invalid cache value.')
self._file_object_cache.ReleaseObject(identifier)
result = cache_value.IsDereferenced()
if result:
self._file_object_cache.RemoveObject(identifier)
return result | def ReleaseFileObject(self, file_object) | Releases a cached file-like object.
Args:
file_object (FileIO): file-like object.
Returns:
bool: True if the file-like object can be closed.
Raises:
PathSpecError: if the path specification is incorrect.
RuntimeError: if the file-like object is not cached or an inconsistency
is detected in the cache. | 3.892056 | 3.53509 | 1.100978 |
identifier, cache_value = self._file_system_cache.GetCacheValueByObject(
file_system)
if not identifier:
raise RuntimeError('Object not cached.')
if not cache_value:
raise RuntimeError('Invalid cache value.')
self._file_system_cache.ReleaseObject(identifier)
result = cache_value.IsDereferenced()
if result:
self._file_system_cache.RemoveObject(identifier)
return result | def ReleaseFileSystem(self, file_system) | Releases a cached file system object.
Args:
file_system (FileSystem): file system object.
Returns:
bool: True if the file system object can be closed.
Raises:
PathSpecError: if the path specification is incorrect.
RuntimeError: if the file system object is not cached or an inconsistency
is detected in the cache. | 4.066647 | 3.676834 | 1.106019 |
# pylint: disable=protected-access
super(RawFile, self)._Close()
for file_object in self._file_objects:
file_object.close()
self._file_objects = [] | def _Close(self) | Closes the file-like object. | 4.643444 | 3.650631 | 1.271957 |
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
parent_path_spec = path_spec.parent
file_system = resolver.Resolver.OpenFileSystem(
parent_path_spec, resolver_context=self._resolver_context)
# Note that we cannot use pysmraw's glob function since it does not
# handle the file system abstraction dfvfs provides.
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
if not segment_file_path_specs:
return None
if parent_path_spec.IsSystemLevel():
# Typically the file-like object cache should have room for 127 items.
self._resolver_context.SetMaximumNumberOfFileObjects(
len(segment_file_path_specs) + 127)
file_objects = []
for segment_file_path_spec in segment_file_path_specs:
file_object = resolver.Resolver.OpenFileObject(
segment_file_path_spec, resolver_context=self._resolver_context)
file_objects.append(file_object)
raw_handle = pysmraw.handle()
raw_handle.open_file_objects(file_objects)
return raw_handle | def _OpenFileObject(self, path_spec) | Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
pysmraw.handle: a file-like object or None.
Raises:
PathSpecError: if the path specification is invalid. | 3.462806 | 3.157377 | 1.096735 |
if not self._file_object_set_in_init:
self._file_object.close()
self._file_object = None
self._decoder = None
self._decoded_data = b''
self._encoded_data = b'' | def _Close(self) | Closes the file-like object.
If the file-like object was passed in the init function
the encoded stream file-like object does not control
the file-like object and should not actually close it. | 4.979322 | 3.838631 | 1.297161 |
self._file_object.seek(0, os.SEEK_SET)
self._decoder = self._GetDecoder()
self._decoded_data = b''
encoded_data_offset = 0
encoded_data_size = self._file_object.get_size()
decoded_stream_size = 0
while encoded_data_offset < encoded_data_size:
read_count = self._ReadEncodedData(self._ENCODED_DATA_BUFFER_SIZE)
if read_count == 0:
break
encoded_data_offset += read_count
decoded_stream_size += self._decoded_data_size
return decoded_stream_size | def _GetDecodedStreamSize(self) | Retrieves the decoded stream size.
Returns:
int: decoded stream size. | 2.573799 | 2.641382 | 0.974414 |
self._file_object.seek(0, os.SEEK_SET)
self._decoder = self._GetDecoder()
self._decoded_data = b''
encoded_data_offset = 0
encoded_data_size = self._file_object.get_size()
while encoded_data_offset < encoded_data_size:
read_count = self._ReadEncodedData(self._ENCODED_DATA_BUFFER_SIZE)
if read_count == 0:
break
encoded_data_offset += read_count
if decoded_data_offset < self._decoded_data_size:
self._decoded_data_offset = decoded_data_offset
break
decoded_data_offset -= self._decoded_data_size | def _AlignDecodedDataOffset(self, decoded_data_offset) | Aligns the encoded file with the decoded data offset.
Args:
decoded_data_offset (int): decoded data offset. | 2.462614 | 2.471824 | 0.996274 |
encoded_data = self._file_object.read(read_size)
read_count = len(encoded_data)
self._encoded_data = b''.join([self._encoded_data, encoded_data])
self._decoded_data, self._encoded_data = (
self._decoder.Decode(self._encoded_data))
self._decoded_data_size = len(self._decoded_data)
return read_count | def _ReadEncodedData(self, read_size) | Reads encoded data from the file-like object.
Args:
read_size (int): number of bytes of encoded data to read.
Returns:
int: number of bytes of encoded data read. | 2.804983 | 3.220531 | 0.870969 |
if self._is_open:
raise IOError('Already open.')
if decoded_stream_size < 0:
raise ValueError((
'Invalid decoded stream size: {0:d} value out of '
'bounds.').format(decoded_stream_size))
self._decoded_stream_size = decoded_stream_size | def SetDecodedStreamSize(self, decoded_stream_size) | Sets the decoded stream size.
This function is used to set the decoded stream size if it can be
determined separately.
Args:
decoded_stream_size (int): size of the decoded stream in bytes.
Raises:
IOError: if the file-like object is already open.
OSError: if the file-like object is already open.
ValueError: if the decoded stream size is invalid. | 3.398859 | 2.810497 | 1.209345 |
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError(
'Invalid current offset: {0:d} value less than zero.'.format(
self._current_offset))
if self._decoded_stream_size is None:
self._decoded_stream_size = self._GetDecodedStreamSize()
if self._decoded_stream_size < 0:
raise IOError('Invalid decoded stream size.')
if self._current_offset >= self._decoded_stream_size:
return b''
if self._realign_offset:
self._AlignDecodedDataOffset(self._current_offset)
self._realign_offset = False
if size is None:
size = self._decoded_stream_size
if self._current_offset + size > self._decoded_stream_size:
size = self._decoded_stream_size - self._current_offset
decoded_data = b''
if size == 0:
return decoded_data
while size > self._decoded_data_size:
decoded_data = b''.join([
decoded_data,
self._decoded_data[self._decoded_data_offset:]])
remaining_decoded_data_size = (
self._decoded_data_size - self._decoded_data_offset)
self._current_offset += remaining_decoded_data_size
size -= remaining_decoded_data_size
if self._current_offset >= self._decoded_stream_size:
break
read_count = self._ReadEncodedData(self._ENCODED_DATA_BUFFER_SIZE)
self._decoded_data_offset = 0
if read_count == 0:
break
if size > 0:
slice_start_offset = self._decoded_data_offset
slice_end_offset = slice_start_offset + size
decoded_data = b''.join([
decoded_data,
self._decoded_data[slice_start_offset:slice_end_offset]])
self._decoded_data_offset += size
self._current_offset += size
return decoded_data | def read(self, size=None) | Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed. | 2.017758 | 2.032737 | 0.992631 |
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError(
'Invalid current offset: {0:d} value less than zero.'.format(
self._current_offset))
if whence == os.SEEK_CUR:
offset += self._current_offset
elif whence == os.SEEK_END:
if self._decoded_stream_size is None:
self._decoded_stream_size = self._GetDecodedStreamSize()
if self._decoded_stream_size is None:
raise IOError('Invalid decoded stream size.')
offset += self._decoded_stream_size
elif whence != os.SEEK_SET:
raise IOError('Unsupported whence.')
if offset < 0:
raise IOError('Invalid offset value less than zero.')
if offset != self._current_offset:
self._current_offset = offset
self._realign_offset = True | def seek(self, offset, whence=os.SEEK_SET) | Seeks to an offset within the file-like object.
Args:
offset (int): offset to seek to.
whence (Optional(int)): value that indicates whether offset is an absolute
or relative position within the file.
Raises:
IOError: if the seek failed.
OSError: if the seek failed. | 2.31301 | 2.350619 | 0.984 |
if not self._is_open:
raise IOError('Not opened.')
if self._decoded_stream_size is None:
self._decoded_stream_size = self._GetDecodedStreamSize()
return self._decoded_stream_size | def get_size(self) | Retrieves the size of the file-like object.
Returns:
int: size of the decoded stream.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened. | 4.086142 | 3.218726 | 1.269491 |
if attribute.identifier in self._attributes:
raise KeyError((
'Volume attribute object already set for volume attribute '
'identifier: {0:s}.').format(attribute.identifier))
self._attributes[attribute.identifier] = attribute | def _AddAttribute(self, attribute) | Adds an attribute.
Args:
attribute (VolumeAttribute): a volume attribute.
Raises:
KeyError: if volume attribute is already set for the corresponding volume
attribute identifier. | 4.59796 | 2.711618 | 1.695652 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return iter(self._attributes.values()) | def attributes(self) | generator[VolumeAttribute]: volume attributes generator. | 6.774667 | 5.678485 | 1.193041 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return self._extents | def extents(self) | list[VolumeExtent]: volume extents. | 7.016397 | 6.47279 | 1.083983 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return len(self._attributes) | def number_of_attributes(self) | int: number of attributes. | 6.088315 | 5.107643 | 1.192001 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return len(self._extents) | def number_of_extents(self) | int: number of extents. | 6.013635 | 5.192192 | 1.158207 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
if identifier not in self._attributes:
return None
return self._attributes[identifier] | def GetAttribute(self, identifier) | Retrieves a specific attribute.
Args:
identifier (str): identifier of the attribute within the volume.
Returns:
VolumeAttribute: volume attribute or None if not available. | 3.927527 | 3.925481 | 1.000521 |
if volume.identifier in self._volumes:
raise KeyError(
'Volume object already set for volume identifier: {0:s}'.format(
volume.identifier))
self._volumes[volume.identifier] = volume
self._volume_identifiers.append(volume.identifier) | def _AddVolume(self, volume) | Adds a volume.
Args:
volume (Volume): a volume.
Raises:
KeyError: if volume is already set for the corresponding volume
identifier. | 3.110505 | 2.379332 | 1.307302 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return len(self._sections) | def number_of_sections(self) | int: number of sections. | 5.916162 | 5.05914 | 1.169401 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return len(self._volumes) | def number_of_volumes(self) | int: number of volumes. | 6.261619 | 5.469213 | 1.144885 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return self._sections | def sections(self) | list[VolumeExtent]: sections. | 7.027991 | 6.933893 | 1.013571 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return iter(self._volumes.values()) | def volumes(self) | generator(Volume): volumes generator. | 7.213045 | 5.996637 | 1.202848 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
if section_index < 0 or section_index >= len(self._sections):
return None
return self._sections[section_index] | def GetSectionByIndex(self, section_index) | Retrieves a specific section based on the index.
Args:
section_index (int): index of the section.
Returns:
VolumeExtent: a volume extent or None if not available. | 3.09674 | 3.061388 | 1.011547 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
return self._volumes[volume_identifier] | def GetVolumeByIdentifier(self, volume_identifier) | Retrieves a specific volume based on the identifier.
Args:
volume_identifier (str): identifier of the volume within
the volume system.
Returns:
Volume: a volume. | 5.986366 | 6.253883 | 0.957224 |
if not self._is_parsed:
self._Parse()
self._is_parsed = True
if volume_index < 0 or volume_index >= len(self._volume_identifiers):
return None
volume_identifier = self._volume_identifiers[volume_index]
return self._volumes[volume_identifier] | def GetVolumeByIndex(self, volume_index) | Retrieves a specific volume based on the index.
Args:
volume_index (int): index of the volume.
Returns:
Volume: a volume or None if not available. | 3.044153 | 2.916644 | 1.043718 |
if not self._file_object_set_in_init:
self._file_object.close()
self._file_object = None
self._compressed_data = b''
self._uncompressed_data = b''
self._decompressor = None | def _Close(self) | Closes the file-like object.
If the file-like object was passed in the init function
the compressed stream file-like object does not control
the file-like object and should not actually close it. | 4.487175 | 3.544721 | 1.265875 |
self._file_object.seek(0, os.SEEK_SET)
self._decompressor = self._GetDecompressor()
self._uncompressed_data = b''
compressed_data_offset = 0
compressed_data_size = self._file_object.get_size()
uncompressed_stream_size = 0
while compressed_data_offset < compressed_data_size:
read_count = self._ReadCompressedData(self._COMPRESSED_DATA_BUFFER_SIZE)
if read_count == 0:
break
compressed_data_offset += read_count
uncompressed_stream_size += self._uncompressed_data_size
return uncompressed_stream_size | def _GetUncompressedStreamSize(self) | Retrieves the uncompressed stream size.
Returns:
int: uncompressed stream size. | 2.345181 | 2.436869 | 0.962375 |
self._file_object.seek(0, os.SEEK_SET)
self._decompressor = self._GetDecompressor()
self._uncompressed_data = b''
compressed_data_offset = 0
compressed_data_size = self._file_object.get_size()
while compressed_data_offset < compressed_data_size:
read_count = self._ReadCompressedData(self._COMPRESSED_DATA_BUFFER_SIZE)
if read_count == 0:
break
compressed_data_offset += read_count
if uncompressed_data_offset < self._uncompressed_data_size:
self._uncompressed_data_offset = uncompressed_data_offset
break
uncompressed_data_offset -= self._uncompressed_data_size | def _AlignUncompressedDataOffset(self, uncompressed_data_offset) | Aligns the compressed file with the uncompressed data offset.
Args:
uncompressed_data_offset (int): uncompressed data offset. | 2.243129 | 2.268214 | 0.988941 |
compressed_data = self._file_object.read(read_size)
read_count = len(compressed_data)
self._compressed_data = b''.join([self._compressed_data, compressed_data])
self._uncompressed_data, self._compressed_data = (
self._decompressor.Decompress(self._compressed_data))
self._uncompressed_data_size = len(self._uncompressed_data)
return read_count | def _ReadCompressedData(self, read_size) | Reads compressed data from the file-like object.
Args:
read_size (int): number of bytes of compressed data to read.
Returns:
int: number of bytes of compressed data read. | 2.67348 | 2.910893 | 0.91844 |
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError(
'Invalid current offset: {0:d} value less than zero.'.format(
self._current_offset))
if self._uncompressed_stream_size is None:
self._uncompressed_stream_size = self._GetUncompressedStreamSize()
if self._uncompressed_stream_size < 0:
raise IOError('Invalid uncompressed stream size.')
if self._current_offset >= self._uncompressed_stream_size:
return b''
if self._realign_offset:
self._AlignUncompressedDataOffset(self._current_offset)
self._realign_offset = False
if size is None:
size = self._uncompressed_stream_size
if self._current_offset + size > self._uncompressed_stream_size:
size = self._uncompressed_stream_size - self._current_offset
uncompressed_data = b''
if size == 0:
return uncompressed_data
while size > self._uncompressed_data_size:
uncompressed_data = b''.join([
uncompressed_data,
self._uncompressed_data[self._uncompressed_data_offset:]])
remaining_uncompressed_data_size = (
self._uncompressed_data_size - self._uncompressed_data_offset)
self._current_offset += remaining_uncompressed_data_size
size -= remaining_uncompressed_data_size
if self._current_offset >= self._uncompressed_stream_size:
break
read_count = self._ReadCompressedData(self._COMPRESSED_DATA_BUFFER_SIZE)
self._uncompressed_data_offset = 0
if read_count == 0:
break
if size > 0:
slice_start_offset = self._uncompressed_data_offset
slice_end_offset = slice_start_offset + size
uncompressed_data = b''.join([
uncompressed_data,
self._uncompressed_data[slice_start_offset:slice_end_offset]])
self._uncompressed_data_offset += size
self._current_offset += size
return uncompressed_data | def read(self, size=None) | Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed. | 1.915442 | 1.926589 | 0.994214 |
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError(
'Invalid current offset: {0:d} value less than zero.'.format(
self._current_offset))
if whence == os.SEEK_CUR:
offset += self._current_offset
elif whence == os.SEEK_END:
if self._uncompressed_stream_size is None:
self._uncompressed_stream_size = self._GetUncompressedStreamSize()
if self._uncompressed_stream_size is None:
raise IOError('Invalid uncompressed stream size.')
offset += self._uncompressed_stream_size
elif whence != os.SEEK_SET:
raise IOError('Unsupported whence.')
if offset < 0:
raise IOError('Invalid offset value less than zero.')
if offset != self._current_offset:
self._current_offset = offset
self._realign_offset = True | def seek(self, offset, whence=os.SEEK_SET) | Seeks to an offset within the file-like object.
Args:
offset (int): offset to seek to.
whence (Optional(int)): value that indicates whether offset is an absolute
or relative position within the file.
Raises:
IOError: if the seek failed.
OSError: if the seek failed. | 2.245558 | 2.274397 | 0.98732 |
if not self._is_open:
raise IOError('Not opened.')
if self._uncompressed_stream_size is None:
self._uncompressed_stream_size = self._GetUncompressedStreamSize()
return self._uncompressed_stream_size | def get_size(self) | Retrieves the size of the file-like object.
Returns:
int: size of the uncompressed stream.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened. | 3.699835 | 2.89403 | 1.278437 |
if not path_spec:
raise ValueError('Missing path specification.')
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
self._file_system = resolver.Resolver.OpenFileSystem(
path_spec, resolver_context=self._resolver_context)
tsk_volume = self._file_system.GetTSKVolume()
tsk_vs, _ = tsk_partition.GetTSKVsPartByPathSpec(tsk_volume, path_spec)
if tsk_vs is None:
raise errors.PathSpecError(
'Unable to retrieve TSK volume system part from path '
'specification.')
range_offset = tsk_partition.TSKVsPartGetStartSector(tsk_vs)
range_size = tsk_partition.TSKVsPartGetNumberOfSectors(tsk_vs)
if range_offset is None or range_size is None:
raise errors.PathSpecError(
'Unable to retrieve TSK volume system part data range from path '
'specification.')
bytes_per_sector = tsk_partition.TSKVolumeGetBytesPerSector(tsk_volume)
range_offset *= bytes_per_sector
range_size *= bytes_per_sector
self.SetRange(range_offset, range_size)
self._file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
self._file_object_set_in_init = True
# pylint: disable=protected-access
super(TSKPartitionFile, self)._Open(path_spec=path_spec, mode=mode) | def _Open(self, path_spec=None, mode='rb') | Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 2.291711 | 2.332774 | 0.982397 |
stat_object = vfs_stat.VFSStat()
if self._compressed_stream:
stat_object.size = self._compressed_stream.get_size()
stat_object.type = self.entry_type
return stat_object | def _GetStat(self) | Retrieves information about the file entry.
Returns:
VFSStat: a stat object. | 5.328766 | 4.662899 | 1.142801 |
self._vshadow_volume.close()
self._vshadow_volume = None
self._file_object.close()
self._file_object = None | def _Close(self) | Closes the file system.
Raises:
IOError: if the close failed. | 7.483313 | 6.266708 | 1.194138 |
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
try:
vshadow_volume = pyvshadow.volume()
vshadow_volume.open_file_object(file_object)
except:
file_object.close()
raise
self._file_object = file_object
self._vshadow_volume = vshadow_volume | def _Open(self, path_spec, mode='rb') | Opens the file system object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode. The default is 'rb' which
represents read-only binary.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 2.114773 | 2.352603 | 0.898908 |
store_index = vshadow.VShadowPathSpecGetStoreIndex(path_spec)
# The virtual root file has not corresponding store index but
# should have a location.
if store_index is None:
location = getattr(path_spec, 'location', None)
return location is not None and location == self.LOCATION_ROOT
return 0 <= store_index < self._vshadow_volume.number_of_stores | def FileEntryExistsByPathSpec(self, path_spec) | Determines if a file entry for a path specification exists.
Args:
path_spec (PathSpec): path specification.
Returns:
bool: True if the file entry exists. | 5.657192 | 6.202469 | 0.912087 |
store_index = vshadow.VShadowPathSpecGetStoreIndex(path_spec)
# The virtual root file has not corresponding store index but
# should have a location.
if store_index is None:
location = getattr(path_spec, 'location', None)
if location is None or location != self.LOCATION_ROOT:
return None
return vshadow_file_entry.VShadowFileEntry(
self._resolver_context, self, path_spec, is_root=True,
is_virtual=True)
if store_index < 0 or store_index >= self._vshadow_volume.number_of_stores:
return None
return vshadow_file_entry.VShadowFileEntry(
self._resolver_context, self, path_spec) | def GetFileEntryByPathSpec(self, path_spec) | Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
VShadowFileEntry: file entry or None if not available. | 2.887001 | 3.003624 | 0.961173 |
path_spec = vshadow_path_spec.VShadowPathSpec(
location=self.LOCATION_ROOT, parent=self._path_spec.parent)
return self.GetFileEntryByPathSpec(path_spec) | def GetRootFileEntry(self) | Retrieves the root file entry.
Returns:
VShadowFileEntry: file entry or None if not available. | 3.20507 | 3.381213 | 0.947905 |
store_index = vshadow.VShadowPathSpecGetStoreIndex(path_spec)
if store_index is None:
return None
return self._vshadow_volume.get_store(store_index) | def GetVShadowStoreByPathSpec(self, path_spec) | Retrieves a VSS store for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
pyvshadow.store: a VSS store or None if not available. | 4.323236 | 5.228823 | 0.826809 |
self._tsk_attribute = None
self._tsk_file = None
self._file_system.Close()
self._file_system = None | def _Close(self) | Closes the file-like object. | 8.359637 | 7.719512 | 1.082923 |
if not path_spec:
raise ValueError('Missing path specification.')
data_stream = getattr(path_spec, 'data_stream', None)
file_system = resolver.Resolver.OpenFileSystem(
path_spec, resolver_context=self._resolver_context)
file_entry = file_system.GetFileEntryByPathSpec(path_spec)
if not file_entry:
file_system.Close()
raise IOError('Unable to retrieve file entry.')
tsk_file = file_entry.GetTSKFile()
tsk_attribute = None
# Note that because pytsk3.File does not explicitly defines info
# we need to check if the attribute exists and has a value other
# than None.
if getattr(tsk_file, 'info', None) is None:
file_system.Close()
raise IOError('Missing attribute info in file (pytsk3.File).')
# Note that because pytsk3.TSK_FS_FILE does not explicitly defines meta
# we need to check if the attribute exists and has a value other
# than None.
if getattr(tsk_file.info, 'meta', None) is None:
file_system.Close()
raise IOError(
'Missing attribute meta in file.info pytsk3.TSK_FS_FILE).')
# Note that because pytsk3.TSK_FS_META does not explicitly defines size
# we need to check if the attribute exists.
if not hasattr(tsk_file.info.meta, 'size'):
file_system.Close()
raise IOError(
'Missing attribute size in file.info.meta (pytsk3.TSK_FS_META).')
# Note that because pytsk3.TSK_FS_META does not explicitly defines type
# we need to check if the attribute exists.
if not hasattr(tsk_file.info.meta, 'type'):
file_system.Close()
raise IOError(
'Missing attribute type in file.info.meta (pytsk3.TSK_FS_META).')
if data_stream:
for attribute in tsk_file:
if getattr(attribute, 'info', None) is None:
continue
# The value of the attribute name will be None for the default
# data stream.
attribute_name = getattr(attribute.info, 'name', None)
if attribute_name is None:
attribute_name = ''
else:
try:
# pytsk3 returns an UTF-8 encoded byte string.
attribute_name = attribute_name.decode('utf8')
except UnicodeError:
# Continue here since we cannot represent the attribute name.
continue
attribute_type = getattr(attribute.info, 'type', None)
if attribute_name == data_stream and attribute_type in (
pytsk3.TSK_FS_ATTR_TYPE_HFS_DEFAULT,
pytsk3.TSK_FS_ATTR_TYPE_HFS_DATA,
pytsk3.TSK_FS_ATTR_TYPE_NTFS_DATA):
tsk_attribute = attribute
break
if tsk_attribute is None:
file_system.Close()
raise IOError('Unable to open data stream: {0:s}.'.format(data_stream))
if (not tsk_attribute and
tsk_file.info.meta.type != pytsk3.TSK_FS_META_TYPE_REG):
file_system.Close()
raise IOError('Not a regular file.')
self._current_offset = 0
self._file_system = file_system
self._tsk_attribute = tsk_attribute
self._tsk_file = tsk_file
if self._tsk_attribute:
self._size = self._tsk_attribute.info.size
else:
self._size = self._tsk_file.info.meta.size | def _Open(self, path_spec=None, mode='rb') | Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 1.999546 | 2.025908 | 0.986988 |
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError('Invalid current offset value less than zero.')
# The SleuthKit is not POSIX compliant in its read behavior. Therefore
# pytsk3 will raise an IOError if the read offset is beyond the data size.
if self._current_offset >= self._size:
return b''
if size is None or self._current_offset + size > self._size:
size = self._size - self._current_offset
if self._tsk_attribute:
data = self._tsk_file.read_random(
self._current_offset, size, self._tsk_attribute.info.type,
self._tsk_attribute.info.id)
else:
data = self._tsk_file.read_random(self._current_offset, size)
# It is possible the that returned data size is not the same as the
# requested data size. At this layer we don't care and this discrepancy
# should be dealt with on a higher layer if necessary.
self._current_offset += len(data)
return data | def read(self, size=None) | Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed. | 3.913616 | 3.919578 | 0.998479 |
vslvm_logical_volume = self._file_entry.GetLVMLogicalVolume()
volume_attribute = volume_system.VolumeAttribute(
'identifier', vslvm_logical_volume.identifier)
self._AddAttribute(volume_attribute)
# TODO: implement in pyvslvm
# TODO: add support for creation time
# TODO: add support for logical volume extents
volume_extent = volume_system.VolumeExtent(0, vslvm_logical_volume.size)
self._extents.append(volume_extent) | def _Parse(self) | Extracts attributes and extents from the volume. | 5.064054 | 4.849107 | 1.044327 |
warnings.simplefilter('default', DeprecationWarning)
warnings.warn('Call to deprecated function: {0:s}.'.format(
function.__name__), category=DeprecationWarning, stacklevel=2)
return function(*args, **kwargs)
IssueDeprecationWarning.__name__ = function.__name__
IssueDeprecationWarning.__doc__ = function.__doc__
IssueDeprecationWarning.__dict__.update(function.__dict__)
return IssueDeprecationWarning | def deprecated(function): # pylint: disable=invalid-name
def IssueDeprecationWarning(*args, **kwargs) | Decorator to mark functions or methods as deprecated. | 1.756732 | 1.855083 | 0.946983 |
if self._zip_ext_file:
self._zip_ext_file.close()
self._zip_ext_file = None
self._zip_file = None
self._zip_info = None
self._file_system.Close()
self._file_system = None | def _Close(self) | Closes the file-like object. | 3.483598 | 3.091556 | 1.12681 |
if not path_spec:
raise ValueError('Missing path specification.')
file_system = resolver.Resolver.OpenFileSystem(
path_spec, resolver_context=self._resolver_context)
file_entry = file_system.GetFileEntryByPathSpec(path_spec)
if not file_entry:
file_system.Close()
raise IOError('Unable to retrieve file entry.')
if not file_entry.IsFile():
file_system.Close()
raise IOError('Not a regular file.')
self._file_system = file_system
self._zip_file = self._file_system.GetZipFile()
self._zip_info = file_entry.GetZipInfo()
self._current_offset = 0
self._uncompressed_stream_size = self._zip_info.file_size | def _Open(self, path_spec=None, mode='rb') | Opens the file-like object defined by path specification.
Args:
path_spec (Optional[PathSpec]): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 2.045379 | 2.22359 | 0.919854 |
if self._zip_ext_file:
self._zip_ext_file.close()
self._zip_ext_file = None
try:
# The open can fail if the file path in the local file header
# does not use the same path segment separator as the corresponding
# entry in the central directory.
self._zip_ext_file = self._zip_file.open(self._zip_info, 'r')
except zipfile.BadZipfile as exception:
raise IOError(
'Unable to open ZIP file with error: {0!s}'.format(exception))
self._uncompressed_data = b''
self._uncompressed_data_size = 0
self._uncompressed_data_offset = 0
while uncompressed_data_offset > 0:
self._ReadCompressedData(self._UNCOMPRESSED_DATA_BUFFER_SIZE)
if uncompressed_data_offset < self._uncompressed_data_size:
self._uncompressed_data_offset = uncompressed_data_offset
break
uncompressed_data_offset -= self._uncompressed_data_size | def _AlignUncompressedDataOffset(self, uncompressed_data_offset) | Aligns the compressed file with the uncompressed data offset.
Args:
uncompressed_data_offset (int): uncompressed data offset.
Raises:
IOError: if the ZIP file could not be opened.
OSError: if the ZIP file could not be opened. | 2.855219 | 2.785294 | 1.025105 |
self._uncompressed_data = self._zip_ext_file.read(read_size)
self._uncompressed_data_size = len(self._uncompressed_data) | def _ReadCompressedData(self, read_size) | Reads compressed data from the file-like object.
Args:
read_size (int): number of bytes of compressed data to read. | 3.746568 | 4.084569 | 0.917249 |
tsk_image_object = tsk_image.TSKFileSystemImage(file_object)
try:
pytsk3.Volume_Info(tsk_image_object)
except IOError:
return None
return self.type_indicator | def AnalyzeFileObject(self, file_object) | Retrieves the format specification.
Args:
file_object (FileIO): file-like object.
Returns:
str: type indicator if the file-like object contains a supported format
or None otherwise. | 6.293557 | 7.113819 | 0.884695 |
if not self._file_entry_types:
return None
return (
self._CheckIsDevice(file_entry) or self._CheckIsDirectory(file_entry) or
self._CheckIsFile(file_entry) or self._CheckIsLink(file_entry) or
self._CheckIsPipe(file_entry) or self._CheckIsSocket(file_entry)) | def _CheckFileEntryType(self, file_entry) | Checks the file entry type find specifications.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if
not or None if no file entry type specification is defined. | 2.409071 | 2.503023 | 0.962465 |
if definitions.FILE_ENTRY_TYPE_DEVICE not in self._file_entry_types:
return False
return file_entry.IsDevice() | def _CheckIsDevice(self, file_entry) | Checks the is_device find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | 4.684381 | 5.738847 | 0.816258 |
if definitions.FILE_ENTRY_TYPE_DIRECTORY not in self._file_entry_types:
return False
return file_entry.IsDirectory() | def _CheckIsDirectory(self, file_entry) | Checks the is_directory find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | 4.529125 | 5.360287 | 0.844941 |
if definitions.FILE_ENTRY_TYPE_FILE not in self._file_entry_types:
return False
return file_entry.IsFile() | def _CheckIsFile(self, file_entry) | Checks the is_file find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | 5.22414 | 5.970556 | 0.874984 |
if definitions.FILE_ENTRY_TYPE_LINK not in self._file_entry_types:
return False
return file_entry.IsLink() | def _CheckIsLink(self, file_entry) | Checks the is_link find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | 4.884577 | 5.540087 | 0.881679 |
if definitions.FILE_ENTRY_TYPE_PIPE not in self._file_entry_types:
return False
return file_entry.IsPipe() | def _CheckIsPipe(self, file_entry) | Checks the is_pipe find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | 4.668839 | 5.586047 | 0.835804 |
if definitions.FILE_ENTRY_TYPE_SOCKET not in self._file_entry_types:
return False
return file_entry.IsSocket() | def _CheckIsSocket(self, file_entry) | Checks the is_socket find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | 4.475013 | 5.630701 | 0.794752 |
if self._location_segments is None:
return False
if search_depth < 0 or search_depth > self._number_of_location_segments:
return False
# Note that the root has no entry in the location segments and
# no name to match.
if search_depth == 0:
segment_name = ''
else:
segment_name = self._location_segments[search_depth - 1]
if self._is_regex:
if isinstance(segment_name, py2to3.STRING_TYPES):
# Allow '\n' to be matched by '.' and make '\w', '\W', '\b', '\B',
# '\d', '\D', '\s' and '\S' Unicode safe.
flags = re.DOTALL | re.UNICODE
if not self._is_case_sensitive:
flags |= re.IGNORECASE
try:
segment_name = r'^{0:s}$'.format(segment_name)
segment_name = re.compile(segment_name, flags=flags)
except sre_constants.error:
# TODO: set self._location_segments[search_depth - 1] to None ?
return False
self._location_segments[search_depth - 1] = segment_name
elif not self._is_case_sensitive:
segment_name = segment_name.lower()
self._location_segments[search_depth - 1] = segment_name
if search_depth > 0:
if self._is_regex:
if not segment_name.match(file_entry.name): # pylint: disable=no-member
return False
elif self._is_case_sensitive:
if segment_name != file_entry.name:
return False
elif segment_name != file_entry.name.lower():
return False
return True | def _CheckLocation(self, file_entry, search_depth) | Checks the location find specification.
Args:
file_entry (FileEntry): file entry.
search_depth (int): number of location path segments to compare.
Returns:
bool: True if the file entry matches the find specification, False if not. | 2.579995 | 2.530308 | 1.019637 |
if self._location_segments is not None:
if search_depth >= self._number_of_location_segments:
return True
return False | def AtMaximumDepth(self, search_depth) | Determines if the find specification is at maximum depth.
Args:
search_depth (int): number of location path segments to compare.
Returns:
bool: True if at maximum depth, False if not. | 8.075013 | 5.682251 | 1.421094 |
if self._location_segments is None:
location_match = None
else:
location_match = self._CheckLocation(file_entry, search_depth)
if not location_match:
return False, location_match
if search_depth != self._number_of_location_segments:
return False, location_match
match = self._CheckFileEntryType(file_entry)
if match is not None and not match:
return False, location_match
match = self._CheckIsAllocated(file_entry)
if match is not None and not match:
return False, location_match
return True, location_match | def Matches(self, file_entry, search_depth) | Determines if the file entry matches the find specification.
Args:
file_entry (FileEntry): file entry.
search_depth (int): number of location path segments to compare.
Returns:
tuple: contains:
bool: True if the file entry matches the find specification, False
otherwise.
bool: True if the location matches, False if not or None if no location
specified. | 2.990015 | 2.583515 | 1.157344 |
if self._location is not None:
self._location_segments = self._SplitPath(
self._location, file_system.PATH_SEPARATOR)
elif self._location_regex is not None:
path_separator = file_system.PATH_SEPARATOR
if path_separator == '\\':
# The backslash '\' is escaped within a regular expression.
path_separator = '\\\\'
self._location_segments = self._SplitPath(
self._location_regex, path_separator)
if self._location_segments is not None:
self._number_of_location_segments = len(self._location_segments) | def PrepareMatches(self, file_system) | Prepare find specification for matching.
Args:
file_system (FileSystem): file system. | 3.144102 | 3.032583 | 1.036774 |
sub_find_specs = []
for find_spec in find_specs:
match, location_match = find_spec.Matches(file_entry, search_depth)
if match:
yield file_entry.path_spec
# pylint: disable=singleton-comparison
if location_match != False and not find_spec.AtMaximumDepth(search_depth):
sub_find_specs.append(find_spec)
if not sub_find_specs:
return
search_depth += 1
try:
for sub_file_entry in file_entry.sub_file_entries:
for matching_path_spec in self._FindInFileEntry(
sub_file_entry, sub_find_specs, search_depth):
yield matching_path_spec
except errors.AccessError:
pass | def _FindInFileEntry(self, file_entry, find_specs, search_depth) | Searches for matching file entries within the file entry.
Args:
file_entry (FileEntry): file entry.
find_specs (list[FindSpec]): find specifications.
search_depth (int): number of location path segments to compare.
Yields:
PathSpec: path specification of a matching file entry. | 2.630747 | 2.700904 | 0.974025 |
if not find_specs:
find_specs.append(FindSpec())
for find_spec in find_specs:
find_spec.PrepareMatches(self._file_system)
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
file_entry = self._file_system.GetFileEntryByPathSpec(self._mount_point)
else:
file_entry = self._file_system.GetRootFileEntry()
for matching_path_spec in self._FindInFileEntry(file_entry, find_specs, 0):
yield matching_path_spec | def Find(self, find_specs=None) | Searches for matching file entries within the file system.
Args:
find_specs (list[FindSpec]): find specifications. where None
will return all allocated file entries.
Yields:
PathSpec: path specification of a matching file entry. | 3.14965 | 3.198809 | 0.984632 |
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
if not location.startswith(self._mount_point.location):
raise errors.PathSpecError(
'Path specification does not contain mount point.')
else:
if not hasattr(path_spec, 'parent'):
raise errors.PathSpecError('Path specification missing parent.')
if path_spec.parent != self._mount_point:
raise errors.PathSpecError(
'Path specification does not contain mount point.')
path_segments = self._file_system.SplitPath(location)
if path_spec_factory.Factory.IsSystemLevelTypeIndicator(
self._file_system.type_indicator):
mount_point_path_segments = self._file_system.SplitPath(
self._mount_point.location)
path_segments = path_segments[len(mount_point_path_segments):]
return '{0:s}{1:s}'.format(
self._file_system.PATH_SEPARATOR,
self._file_system.PATH_SEPARATOR.join(path_segments)) | def GetRelativePath(self, path_spec) | Returns the relative path based on a resolved path specification.
The relative path is the location of the upper most path specification.
The the location of the mount point is stripped off if relevant.
Args:
path_spec (PathSpec): path specification.
Returns:
str: corresponding relative path or None if the relative path could not
be determined.
Raises:
PathSpecError: if the path specification is incorrect. | 1.858751 | 1.960503 | 0.948099 |
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
try:
tsk_image_object = tsk_image.TSKFileSystemImage(file_object)
tsk_volume = pytsk3.Volume_Info(tsk_image_object)
except:
file_object.close()
raise
self._file_object = file_object
self._tsk_volume = tsk_volume | def _Open(self, path_spec, mode='rb') | Opens the file system object defined by path specification.
Args:
path_spec (PathSpec): a path specification.
mode (Optional[str]): file access mode. The default is 'rb' which
represents read-only binary.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid. | 2.141618 | 2.300931 | 0.930761 |
tsk_vs_part, _ = tsk_partition.GetTSKVsPartByPathSpec(
self._tsk_volume, path_spec)
# The virtual root file has not corresponding TSK volume system part object
# but should have a location.
if tsk_vs_part is None:
location = getattr(path_spec, 'location', None)
return location is not None and location == self.LOCATION_ROOT
return True | def FileEntryExistsByPathSpec(self, path_spec) | Determines if a file entry for a path specification exists.
Args:
path_spec (PathSpec): a path specification.
Returns:
bool: True if the file entry exists or false otherwise. | 6.419697 | 7.322379 | 0.876723 |
tsk_vs_part, partition_index = tsk_partition.GetTSKVsPartByPathSpec(
self._tsk_volume, path_spec)
location = getattr(path_spec, 'location', None)
# The virtual root file has not corresponding TSK volume system part object
# but should have a location.
if tsk_vs_part is None:
if location is None or location != self.LOCATION_ROOT:
return None
return tsk_partition_file_entry.TSKPartitionFileEntry(
self._resolver_context, self, path_spec, is_root=True,
is_virtual=True)
if location is None and partition_index is not None:
path_spec.location = '/p{0:d}'.format(partition_index)
return tsk_partition_file_entry.TSKPartitionFileEntry(
self._resolver_context, self, path_spec) | def GetFileEntryByPathSpec(self, path_spec) | Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
TSKPartitionFileEntry: a file entry or None of not available. | 3.275112 | 3.506583 | 0.93399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.