code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if path.startswith('\\\\.\\') or path.startswith('\\\\?\\'): if len(path) < 7 or path[5] != ':' or path[6] != self._PATH_SEPARATOR: # Cannot handle a non-volume path. return None path = path[7:] elif path.startswith('\\\\'): # Cannot handle an UNC path. return None elif len(path) >= 3 and path[1] == ':': # Check if the path is a Volume 'absolute' path. if path[2] != self._PATH_SEPARATOR: # Cannot handle a Volume 'relative' path. return None path = path[3:] elif path.startswith('\\'): path = path[1:] else: # Cannot handle a relative path. return None return path
def _PathStripPrefix(self, path)
Strips the prefix from a path. Args: path (str): Windows path to strip the prefix from. Returns: str: path without the prefix or None if the path is not supported.
3.012013
3.039889
0.99083
# Allow for paths that start with an environment variable e.g. # %SystemRoot%\file.txt if path.startswith('%'): path_segment, _, _ = path.partition(self._PATH_SEPARATOR) if not self._PATH_EXPANSION_VARIABLE.match(path_segment): path = None else: path = self._PathStripPrefix(path) if path is None: return None, None if path_spec_factory.Factory.IsSystemLevelTypeIndicator( self._file_system.type_indicator): file_entry = self._file_system.GetFileEntryByPathSpec(self._mount_point) expanded_path_segments = self._file_system.SplitPath( self._mount_point.location) else: file_entry = self._file_system.GetRootFileEntry() expanded_path_segments = [] number_of_expanded_path_segments = 0 search_path_segments = path.split(self._PATH_SEPARATOR) while search_path_segments: path_segment = search_path_segments.pop(0) if file_entry is None: return None, None # Ignore empty path segments or path segments containing a single dot. if not path_segment or path_segment == '.': continue if path_segment == '..': # Only allow to traverse back up to the mount point. if number_of_expanded_path_segments > 0: _ = expanded_path_segments.pop(0) number_of_expanded_path_segments -= 1 file_entry = file_entry.GetParentFileEntry() continue if (expand_variables and self._PATH_EXPANSION_VARIABLE.match(path_segment)): path_segment = self._environment_variables.get( path_segment[1:-1].upper(), path_segment) if self._PATH_SEPARATOR in path_segment: # The expanded path segment itself can consist of multiple # path segments, hence we need to split it and prepend it to # the search path segments list. path_segments = path_segment.split(self._PATH_SEPARATOR) path_segments.extend(search_path_segments) search_path_segments = path_segments path_segment = search_path_segments.pop(0) sub_file_entry = file_entry.GetSubFileEntryByName( path_segment, case_sensitive=False) if sub_file_entry is None: return None, None expanded_path_segments.append(sub_file_entry.name) number_of_expanded_path_segments += 1 file_entry = sub_file_entry location = self._file_system.JoinPath(expanded_path_segments) return location, file_entry.path_spec
def _ResolvePath(self, path, expand_variables=True)
Resolves a Windows path in file system specific format. This function will check if the individual path segments exists within the file system. For this it will prefer the first case sensitive match above a case insensitive match. If no match was found None is returned. Args: path (str): Windows path to resolve. expand_variables (Optional[bool]): True if path variables should be expanded or not. Returns: tuple[str, PathSpec]: location and matching path specification or (None, None) if not available.
2.450398
2.411138
1.016283
location, path_spec = self._ResolvePath( path, expand_variables=expand_variables) if not location or not path_spec: return None # Note that we don't want to set the keyword arguments when not used because # the path specification base class will check for unused keyword arguments # and raise. kwargs = path_spec_factory.Factory.GetProperties(path_spec) kwargs['location'] = location if not path_spec_factory.Factory.IsSystemLevelTypeIndicator( self._file_system.type_indicator): kwargs['parent'] = self._mount_point return path_spec_factory.Factory.NewPathSpec( self._file_system.type_indicator, **kwargs)
def ResolvePath(self, path, expand_variables=True)
Resolves a Windows path in file system specific format. Args: path (str): Windows path to resolve. expand_variables (Optional[bool]): True if path variables should be expanded or not. Returns: PathSpec: path specification in file system specific format.
3.789451
3.991
0.949499
if isinstance(value, py2to3.STRING_TYPES): value = self._PathStripPrefix(value) if value is not None: self._environment_variables[name.upper()] = value
def SetEnvironmentVariable(self, name, value)
Sets an environment variable in the Windows path helper. Args: name (str): name of the environment variable without enclosing %-characters, e.g. SystemRoot as in %SystemRoot%. value (str): value of the environment variable.
6.991795
6.80016
1.028181
volume_index = getattr(path_spec, 'volume_index', None) if volume_index is not None: return volume_index location = getattr(path_spec, 'location', None) if location is None or not location.startswith('/apfs'): return None try: volume_index = int(location[5:], 10) - 1 except (TypeError, ValueError): volume_index = None if volume_index is None or volume_index < 0 or volume_index > 99: volume_index = None return volume_index
def APFSContainerPathSpecGetVolumeIndex(path_spec)
Retrieves the volume index from the path specification. Args: path_spec (PathSpec): path specification. Returns: int: volume index or None if the index cannot be determined.
1.83138
2.104255
0.870322
is_locked = fsapfs_volume.is_locked() if is_locked: password = key_chain.GetCredential(path_spec, 'password') if password: fsapfs_volume.set_password(password) recovery_password = key_chain.GetCredential(path_spec, 'recovery_password') if recovery_password: fsapfs_volume.set_recovery_password(recovery_password) is_locked = not fsapfs_volume.unlock() return not is_locked
def APFSUnlockVolume(fsapfs_volume, path_spec, key_chain)
Unlocks an APFS volume using the path specification. Args: fsapfs_volume (pyapfs.volume): APFS volume. path_spec (PathSpec): path specification. key_chain (KeyChain): key chain. Returns: bool: True if the volume is unlocked, False otherwise.
2.2341
2.593276
0.861497
if self._database_object: self._database_object.Close() self._blob = None self._current_offset = 0 self._size = 0 self._table_name = None
def _Close(self)
Closes the file-like object.
5.89961
5.339976
1.104801
if not self._database_object: raise IOError('Not opened.') if self._number_of_rows is None: self._number_of_rows = self._database_object.GetNumberOfRows( self._table_name) return self._number_of_rows
def GetNumberOfRows(self)
Retrieves the number of rows of the table. Returns: int: number of rows. Raises: IOError: if the file-like object has not been opened. OSError: if the file-like object has not been opened.
3.578535
2.903012
1.232697
if not self._database_object: raise IOError('Not opened.') if self._current_offset < 0: raise IOError('Invalid offset value out of bounds.') if size == 0 or self._current_offset >= self._size: return b'' if size is None: size = self._size if self._current_offset + size > self._size: size = self._size - self._current_offset start_offset = self._current_offset self._current_offset += size return self._blob[start_offset:self._current_offset]
def read(self, size=None)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
2.896483
2.890964
1.001909
if not self._database_object: raise IOError('Not opened.') if whence == os.SEEK_CUR: offset += self._current_offset elif whence == os.SEEK_END: offset += self._size elif whence != os.SEEK_SET: raise IOError('Unsupported whence.') if offset < 0: raise IOError('Invalid offset value out of bounds.') self._current_offset = offset
def seek(self, offset, whence=os.SEEK_SET)
Seeks to an offset within the file-like object. Args: offset (int): offset to seek to. whence (Optional(int)): value that indicates whether offset is an absolute or relative position within the file. Raises: IOError: if the seek failed. OSError: if the seek failed.
2.666872
2.787143
0.956848
if not path_spec: raise ValueError('Missing path specification.') if path_spec.HasParent(): raise errors.PathSpecError('Unsupported path specification with parent.') location = getattr(path_spec, 'location', None) if location is None: raise errors.PathSpecError('Path specification missing location.') self._current_offset = 0 self._size = len(self._file_data)
def _Open(self, path_spec=None, mode='rb')
Opens the file-like object defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.703551
2.921857
0.925285
if not self._is_open: raise IOError('Not opened.') if self._current_offset < 0: raise IOError( 'Invalid current offset: {0:d} value less than zero.'.format( self._current_offset)) if self._file_data is None or self._current_offset >= self._size: return b'' if size is None: size = self._size if self._current_offset + size > self._size: size = self._size - self._current_offset start_offset = self._current_offset self._current_offset += size return self._file_data[start_offset:self._current_offset]
def read(self, size=None)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
2.432027
2.392635
1.016464
fsapfs_volume = self._file_entry.GetAPFSVolume() volume_attribute = volume_system.VolumeAttribute( 'identifier', fsapfs_volume.identifier) self._AddAttribute(volume_attribute) volume_attribute = volume_system.VolumeAttribute( 'name', fsapfs_volume.name) self._AddAttribute(volume_attribute)
def _Parse(self)
Extracts attributes and extents from the volume.
3.978474
3.837426
1.036756
root_file_entry = self._file_system.GetRootFileEntry() for sub_file_entry in root_file_entry.sub_file_entries: volume = APFSVolume(sub_file_entry) self._AddVolume(volume)
def _Parse(self)
Extracts sections and volumes from the volume system.
3.968303
3.187581
1.244926
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') parent_path_spec = path_spec.parent parent_location = getattr(parent_path_spec, 'location', None) if not parent_location: raise errors.PathSpecError( 'Unsupported parent path specification without location.') # Note that we cannot use pyvmdk's open_extent_data_files function # since it does not handle the file system abstraction dfvfs provides. file_system = resolver.Resolver.OpenFileSystem( parent_path_spec, resolver_context=self._resolver_context) file_object = resolver.Resolver.OpenFileObject( parent_path_spec, resolver_context=self._resolver_context) vmdk_handle = pyvmdk.handle() vmdk_handle.open_file_object(file_object) parent_location_path_segments = file_system.SplitPath(parent_location) extent_data_files = [] for extent_descriptor in iter(vmdk_handle.extent_descriptors): extent_data_filename = extent_descriptor.filename _, path_separator, filename = extent_data_filename.rpartition('/') if not path_separator: _, path_separator, filename = extent_data_filename.rpartition('\\') if not path_separator: filename = extent_data_filename # The last parent location path segment contains the extent data filename. # Since we want to check if the next extent data file exists we remove # the previous one form the path segments list and add the new filename. # After that the path segments list can be used to create the location # string. parent_location_path_segments.pop() parent_location_path_segments.append(filename) extent_data_file_location = file_system.JoinPath( parent_location_path_segments) # Note that we don't want to set the keyword arguments when not used # because the path specification base class will check for unused # keyword arguments and raise. kwargs = path_spec_factory.Factory.GetProperties(parent_path_spec) kwargs['location'] = extent_data_file_location if parent_path_spec.parent is not None: kwargs['parent'] = parent_path_spec.parent extent_data_file_path_spec = path_spec_factory.Factory.NewPathSpec( parent_path_spec.type_indicator, **kwargs) if not file_system.FileEntryExistsByPathSpec(extent_data_file_path_spec): break extent_data_files.append(extent_data_file_path_spec) if len(extent_data_files) != vmdk_handle.number_of_extents: raise IOError('Unable to locate all extent data files.') file_objects = [] for extent_data_file_path_spec in extent_data_files: file_object = resolver.Resolver.OpenFileObject( extent_data_file_path_spec, resolver_context=self._resolver_context) file_objects.append(file_object) # TODO: add parent image support. vmdk_handle.open_extent_data_files_file_objects(file_objects) return vmdk_handle
def _OpenFileObject(self, path_spec)
Opens the file-like object defined by path specification. Args: path_spec (PathSpec): path specification. Returns: pyvmdk.handle: a file-like object. Raises: IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect.
2.502056
2.482348
1.007939
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') encoding_method = getattr(path_spec, 'encoding_method', None) if not encoding_method: raise errors.PathSpecError( 'Unsupported path specification without encoding method.') self._encoding_method = encoding_method
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): a path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.1876
2.455337
0.890957
return encoded_stream_file_entry.EncodedStreamFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: EncodedStreamFileEntry: a file entry or None if not available.
4.185284
4.052677
1.032721
path_spec = encoded_stream_path_spec.EncodedStreamPathSpec( encoding_method=self._encoding_method, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: EncodedStreamFileEntry: a file entry or None if not available.
3.594408
3.31018
1.085865
sub_comparable_string = 'identifier: {0:s}'.format(self.identifier) return self._GetComparable(sub_comparable_string=sub_comparable_string)
def comparable(self)
str: comparable representation of the path specification.
6.485528
5.156755
1.257676
format_specification = specification.FormatSpecification( self.type_indicator) # FAT volume header signature. format_specification.AddNewSignature(b'\x55\xaa', offset=510) if definitions.PREFERRED_NTFS_BACK_END == self.TYPE_INDICATOR: # NTFS file system signature. format_specification.AddNewSignature(b'NTFS ', offset=3) # HFS boot block signature. format_specification.AddNewSignature(b'LK', offset=0) # HFS master directory block signature. format_specification.AddNewSignature(b'BD', offset=0) # HFS+ file system signature. format_specification.AddNewSignature(b'H+', offset=1024) # HFSX file system signature. format_specification.AddNewSignature(b'HX', offset=1024) # Ext file system signature. format_specification.AddNewSignature(b'\x53\xef', offset=1080) # ISO9660 file system signature. format_specification.AddNewSignature(b'CD001', offset=32769) # YAFFS file system signature. return format_specification
def GetFormatSpecification(self)
Retrieves the format specification. Returns: FormatSpecification: format specification or None if the format cannot be defined by a specification object.
4.49934
4.461439
1.008495
self._zip_file.close() self._zip_file = None self._file_object.close() self._file_object = None
def _Close(self)
Closes the file system object. Raises: IOError: if the close failed.
4.115685
3.70584
1.110594
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) try: zip_file = zipfile.ZipFile(file_object, 'r') except: file_object.close() raise self._file_object = file_object self._zip_file = zip_file
def _Open(self, path_spec, mode='rb')
Opens the file system object defined by path specification. Args: path_spec (PathSpec): path specification of the file system. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
1.839695
2.038454
0.902495
location = getattr(path_spec, 'location', None) if (location is None or not location.startswith(self.LOCATION_ROOT)): return False if len(location) == 1: return True try: self._zip_file.getinfo(location[1:]) return True except KeyError: pass # Check if location could be a virtual directory. for name in iter(self._zip_file.namelist()): # The ZIP info name does not have the leading path separator as # the location string does. if name.startswith(location[1:]): return True return False
def FileEntryExistsByPathSpec(self, path_spec)
Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): path specification of the file entry. Returns: bool: True if the file entry exists.
3.998183
4.452697
0.897924
if not self.FileEntryExistsByPathSpec(path_spec): return None location = getattr(path_spec, 'location', None) if len(location) == 1: return zip_file_entry.ZipFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True) kwargs = {} try: kwargs['zip_info'] = self._zip_file.getinfo(location[1:]) except KeyError: kwargs['is_virtual'] = True return zip_file_entry.ZipFileEntry( self._resolver_context, self, path_spec, **kwargs)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification of the file entry. Returns: ZipFileEntry: a file entry or None.
2.347823
2.418781
0.970664
path_spec = zip_path_spec.ZipPathSpec( location=self.LOCATION_ROOT, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: ZipFileEntry: a file entry or None.
3.467582
3.649816
0.95007
location = getattr(path_spec, 'location', None) if location is None: raise errors.PathSpecError('Path specification missing location.') if not location.startswith(self.LOCATION_ROOT): raise errors.PathSpecError('Invalid location in path specification.') if len(location) > 1: return self._zip_file.getinfo(location[1:]) return None
def GetZipInfoByPathSpec(self, path_spec)
Retrieves the ZIP info for a path specification. Args: path_spec (PathSpec): a path specification. Returns: zipfile.ZipInfo: a ZIP info object or None if not available. Raises: PathSpecError: if the path specification is incorrect.
2.784477
3.078459
0.904503
sub_comparable_string = 'location: {0:s}'.format(self.location) return self._GetComparable(sub_comparable_string=sub_comparable_string)
def comparable(self)
str: comparable representation of the path specification.
7.01045
5.32841
1.315674
vshadow_store = self._file_entry.GetVShadowStore() self._AddAttribute(volume_system.VolumeAttribute( 'identifier', vshadow_store.identifier)) self._AddAttribute(volume_system.VolumeAttribute( 'copy_identifier', vshadow_store.copy_identifier)) self._AddAttribute(volume_system.VolumeAttribute( 'copy_set_identifier', vshadow_store.copy_set_identifier)) self._AddAttribute(volume_system.VolumeAttribute( 'creation_time', vshadow_store.get_creation_time_as_integer())) volume_extent = volume_system.VolumeExtent(0, vshadow_store.volume_size) self._extents.append(volume_extent)
def _Parse(self)
Extracts attributes and extents from the volume.
3.051961
2.799145
1.090319
self._cpio_archive_file_entry = None self._cpio_archive_file = None self._file_system.Close() self._file_system = None
def _Close(self)
Closes the file-like object.
6.400625
5.954444
1.074932
if not path_spec: raise ValueError('Missing path specification.') file_system = resolver.Resolver.OpenFileSystem( path_spec, resolver_context=self._resolver_context) file_entry = file_system.GetFileEntryByPathSpec(path_spec) if not file_entry: file_system.Close() raise IOError('Unable to retrieve file entry.') self._file_system = file_system self._cpio_archive_file = self._file_system.GetCPIOArchiveFile() self._cpio_archive_file_entry = file_entry.GetCPIOArchiveFileEntry() self._current_offset = 0
def _Open(self, path_spec=None, mode='rb')
Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.415611
2.640957
0.914673
if not self._is_open: raise IOError('Not opened.') if self._current_offset >= self._cpio_archive_file_entry.data_size: return b'' file_offset = ( self._cpio_archive_file_entry.data_offset + self._current_offset) read_size = self._cpio_archive_file_entry.data_size - self._current_offset if read_size > size: read_size = size data = self._cpio_archive_file.ReadDataAtOffset(file_offset, read_size) # It is possible the that returned data size is not the same as the # requested data size. At this layer we don't care and this discrepancy # should be dealt with on a higher layer if necessary. self._current_offset += len(data) return data
def read(self, size=None)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
3.702943
3.720507
0.995279
if not self._is_open: raise IOError('Not opened.') if whence == os.SEEK_CUR: offset += self._current_offset elif whence == os.SEEK_END: offset += self._cpio_archive_file_entry.data_size elif whence != os.SEEK_SET: raise IOError('Unsupported whence.') if offset < 0: raise IOError('Invalid offset value less than zero.') self._current_offset = offset
def seek(self, offset, whence=os.SEEK_SET)
Seeks to an offset within the file-like object. Args: offset (int): offset to seek to. whence (Optional(int)): value that indicates whether offset is an absolute or relative position within the file. Raises: IOError: if the seek failed. OSError: if the seek failed.
3.01301
3.153393
0.955482
self._bde_volume.close() self._bde_volume = None self._file_object.close() self._file_object = None
def _Close(self)
Closes the file system. Raises: IOError: if the close failed.
7.261421
6.378485
1.138424
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') resolver.Resolver.key_chain.ExtractCredentialsFromPathSpec(path_spec) bde_volume = pybde.volume() file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) try: bde.BDEVolumeOpen( bde_volume, path_spec, file_object, resolver.Resolver.key_chain) except: file_object.close() raise self._bde_volume = bde_volume self._file_object = file_object
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. The default is 'rb' read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.631968
2.803416
0.938843
return bde_file_entry.BDEFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: BDEFileEntry: file entry or None.
3.721632
4.246001
0.876503
path_spec = bde_path_spec.BDEPathSpec(parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: BDEFileEntry: file entry or None.
4.034844
3.561223
1.132994
index_split = -(len(encrypted_data) % AES.block_size) if index_split: remaining_encrypted_data = encrypted_data[index_split:] encrypted_data = encrypted_data[:index_split] else: remaining_encrypted_data = b'' decrypted_data = self._aes_cipher.decrypt(encrypted_data) return decrypted_data, remaining_encrypted_data
def Decrypt(self, encrypted_data)
Decrypts the encrypted data. Args: encrypted_data (bytes): encrypted data. Returns: tuple[bytes, bytes]: decrypted data and remaining encrypted data.
2.834046
2.500118
1.133565
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') if path_spec.parent.type_indicator != ( definitions.TYPE_INDICATOR_APFS_CONTAINER): raise errors.PathSpecError( 'Unsupported path specification not type APFS container.') apfs_container_file_system = resolver.Resolver.OpenFileSystem( path_spec.parent, resolver_context=self._resolver_context) fsapfs_volume = apfs_container_file_system.GetAPFSVolumeByPathSpec( path_spec.parent) if not fsapfs_volume: raise IOError('Unable to retrieve APFS volume') try: is_locked = not apfs_helper.APFSUnlockVolume( fsapfs_volume, path_spec.parent, resolver.Resolver.key_chain) except IOError as exception: raise IOError('Unable to unlock APFS volume with error: {0!s}'.format( exception)) if is_locked: raise IOError('Unable to unlock APFS volume.') self._fsapfs_volume = fsapfs_volume
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the APFS volume could not be retrieved or unlocked. OSError: if the APFS volume could not be retrieved or unlocked. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.22025
2.113356
1.050581
# Opening a file by identifier is faster than opening a file by location. fsapfs_file_entry = None location = getattr(path_spec, 'location', None) identifier = getattr(path_spec, 'identifier', None) try: if identifier is not None: fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_identifier( identifier) elif location is not None: fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_path(location) except IOError as exception: raise errors.BackEndError(exception) return fsapfs_file_entry is not None
def FileEntryExistsByPathSpec(self, path_spec)
Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): path specification. Returns: bool: True if the file entry exists. Raises: BackEndError: if the file entry cannot be opened.
2.468695
2.577427
0.957814
# Opening a file by identifier is faster than opening a file by location. fsapfs_file_entry = None location = getattr(path_spec, 'location', None) identifier = getattr(path_spec, 'identifier', None) if (location == self.LOCATION_ROOT or identifier == self.ROOT_DIRECTORY_IDENTIFIER): fsapfs_file_entry = self._fsapfs_volume.get_root_directory() return apfs_file_entry.APFSFileEntry( self._resolver_context, self, path_spec, fsapfs_file_entry=fsapfs_file_entry, is_root=True) try: if identifier is not None: fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_identifier( identifier) elif location is not None: fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_path(location) except IOError as exception: raise errors.BackEndError(exception) if fsapfs_file_entry is None: return None return apfs_file_entry.APFSFileEntry( self._resolver_context, self, path_spec, fsapfs_file_entry=fsapfs_file_entry)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: APFSFileEntry: file entry or None if not available. Raises: BackEndError: if the file entry cannot be opened.
1.994601
2.025116
0.984932
# Opening a file by identifier is faster than opening a file by location. location = getattr(path_spec, 'location', None) identifier = getattr(path_spec, 'identifier', None) if identifier is not None: fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_identifier( identifier) elif location is not None: fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_path(location) else: raise errors.PathSpecError( 'Path specification missing location and identifier.') return fsapfs_file_entry
def GetAPFSFileEntryByPathSpec(self, path_spec)
Retrieves the APFS file entry for a path specification. Args: path_spec (PathSpec): a path specification. Returns: pyfsapfs.file_entry: file entry. Raises: PathSpecError: if the path specification is missing location and identifier.
2.497115
2.243999
1.112797
path_spec = apfs_path_spec.APFSPathSpec( location=self.LOCATION_ROOT, identifier=self.ROOT_DIRECTORY_IDENTIFIER, parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: APFSFileEntry: file entry.
3.227576
3.466915
0.930965
if not path_spec: raise ValueError('Missing path specification.') data_stream = getattr(path_spec, 'data_stream', None) if data_stream: raise errors.NotSupported( 'Open data stream: {0:s} not supported.'.format(data_stream)) self._file_system = resolver.Resolver.OpenFileSystem( path_spec, resolver_context=self._resolver_context) file_entry = self._file_system.GetFileEntryByPathSpec(path_spec) if not file_entry: raise IOError('Unable to open file entry.') fsapfs_file_entry = file_entry.GetAPFSFileEntry() if not fsapfs_file_entry: raise IOError('Unable to open APFS file entry.') self._fsapfs_file_entry = fsapfs_file_entry
def _Open(self, path_spec=None, mode='rb')
Opens the file-like object defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. NotSupported: if a data stream, like the resource or named fork, is requested to be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.193246
2.104712
1.042064
if not self._is_open: raise IOError('Not opened.') return self._fsapfs_file_entry.read(size=size)
def read(self, size=None)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
10.010344
12.223156
0.818966
location = getattr(self.path_spec, 'location', None) store_index = getattr(self.path_spec, 'store_index', None) # Only the virtual root file has directory entries. if (store_index is None and location is not None and location == self._file_system.LOCATION_ROOT): vshadow_volume = self._file_system.GetVShadowVolume() for store_index in range(0, vshadow_volume.number_of_stores): yield vshadow_path_spec.VShadowPathSpec( location='/vss{0:d}'.format(store_index + 1), store_index=store_index, parent=self.path_spec.parent)
def _EntriesGenerator(self)
Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: VShadowPathSpec: a path specification.
3.04729
2.884837
1.056313
if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None return VShadowDirectory(self._file_system, self.path_spec)
def _GetDirectory(self)
Retrieves a directory. Returns: VShadowDirectory: a directory None if not available.
7.618931
4.556196
1.672213
stat_object = super(VShadowFileEntry, self)._GetStat() if self._vshadow_store is not None: # File data stat information. stat_object.size = self._vshadow_store.volume_size # Ownership and permissions stat information. # File entry type stat information. # The root file entry is virtual and should have type directory. return stat_object
def _GetStat(self)
Retrieves information about the file entry. Returns: VFSStat: a stat object.
10.71305
9.066123
1.181657
if self._vshadow_store is None: return None timestamp = self._vshadow_store.get_creation_time_as_integer() return dfdatetime_filetime.Filetime(timestamp=timestamp)
def creation_time(self)
dfdatetime.DateTimeValues: creation time or None if not available.
7.31415
3.755394
1.947639
store_index = vshadow.VShadowPathSpecGetStoreIndex(self.path_spec) if store_index is None: return None return self._file_system.GetRootFileEntry()
def GetParentFileEntry(self)
Retrieves the parent file entry. Returns: FileEntry: parent file entry or None if not available.
7.958655
6.24961
1.273464
type_indicator = path_spec_type.TYPE_INDICATOR if type_indicator not in cls._path_spec_types: raise KeyError( 'Path specification type: {0:s} not set.'.format(type_indicator)) del cls._path_spec_types[type_indicator] if type_indicator in cls._system_level_type_indicators: del cls._system_level_type_indicators[type_indicator]
def DeregisterPathSpec(cls, path_spec_type)
Deregisters a path specification. Args: path_spec_type (type): path specification type. Raises: KeyError: if path specification is not registered.
2.272813
2.647818
0.858372
properties = {} for property_name in cls.PROPERTY_NAMES: # Note that we do not want to set the properties when not used. if hasattr(path_spec, property_name): properties[property_name] = getattr(path_spec, property_name) return properties
def GetProperties(cls, path_spec)
Retrieves a dictionary containing the path specification properties. Args: path_spec (PathSpec): path specification. Returns: dict[str, str]: path specification properties. Raises: dict: path specification properties.
3.20124
4.264801
0.750619
if type_indicator not in cls._path_spec_types: raise KeyError( 'Path specification type: {0:s} not set.'.format(type_indicator)) # An empty parent will cause parentless path specifications to raise # so we conveniently remove it here. if 'parent' in kwargs and kwargs['parent'] is None: del kwargs['parent'] path_spec_type = cls._path_spec_types[type_indicator] return path_spec_type(**kwargs)
def NewPathSpec(cls, type_indicator, **kwargs)
Creates a new path specification for the specific type indicator. Args: type_indicator (str): type indicator. kwargs (dict): keyword arguments depending on the path specification. Returns: PathSpec: path specification. Raises: KeyError: if path specification is not registered.
3.338449
3.30764
1.009315
type_indicator = path_spec_type.TYPE_INDICATOR if type_indicator in cls._path_spec_types: raise KeyError( 'Path specification type: {0:s} already set.'.format( type_indicator)) cls._path_spec_types[type_indicator] = path_spec_type if getattr(path_spec_type, '_IS_SYSTEM_LEVEL', False): cls._system_level_type_indicators[type_indicator] = path_spec_type
def RegisterPathSpec(cls, path_spec_type)
Registers a path specification type. Args: path_spec_type (type): path specification type. Raises: KeyError: if path specification is already registered.
2.019786
2.215484
0.911668
# pylint: disable=protected-access element_data_size = ( data_type_map._element_data_type_definition.GetByteSize()) elements_terminator = ( data_type_map._data_type_definition.elements_terminator) byte_stream = [] element_data = file_object.read(element_data_size) byte_stream.append(element_data) while element_data and element_data != elements_terminator: element_data = file_object.read(element_data_size) byte_stream.append(element_data) byte_stream = b''.join(byte_stream) return self._ReadStructureFromByteStream( byte_stream, file_offset, data_type_map, description)
def _ReadString( self, file_object, file_offset, data_type_map, description)
Reads a string. Args: file_object (FileIO): file-like object. file_offset (int): offset of the data relative from the start of the file-like object. data_type_map (dtfabric.DataTypeMap): data type map of the string. description (str): description of the string. Returns: object: structure values object. Raises: FileFormatError: if the string cannot be read. ValueError: if file-like object or date type map are invalid.
2.205455
2.594476
0.850058
data = self._ReadData(file_object, file_offset, data_size, description) return self._ReadStructureFromByteStream( data, file_offset, data_type_map, description)
def _ReadStructure( self, file_object, file_offset, data_size, data_type_map, description)
Reads a structure. Args: file_object (FileIO): file-like object. file_offset (int): offset of the data relative from the start of the file-like object. data_size (int): data size of the structure. data_type_map (dtfabric.DataTypeMap): data type map of the structure. description (str): description of the structure. Returns: object: structure values object. Raises: FileFormatError: if the structure cannot be read. ValueError: if file-like object or date type map are invalid.
2.506787
4.999037
0.501454
if not byte_stream: raise ValueError('Invalid byte stream.') if not data_type_map: raise ValueError('Invalid data type map.') try: return data_type_map.MapByteStream(byte_stream, context=context) except dtfabric_errors.MappingError as exception: raise errors.FileFormatError(( 'Unable to map {0:s} data at offset: 0x{1:08x} with error: ' '{2!s}').format(description, file_offset, exception))
def _ReadStructureFromByteStream( self, byte_stream, file_offset, data_type_map, description, context=None)
Reads a structure from a byte stream. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the data relative from the start of the file-like object. data_type_map (dtfabric.DataTypeMap): data type map of the structure. description (str): description of the structure. context (Optional[dtfabric.DataTypeMapContext]): data type map context. Returns: object: structure values object. Raises: FileFormatError: if the structure cannot be read. ValueError: if file-like object or date type map are invalid.
1.957132
2.002333
0.977426
self._fvde_volume.close() self._fvde_volume = None self._file_object.close() self._file_object = None
def _Close(self)
Closes the file system. Raises: IOError: if the close failed.
8.159478
7.13306
1.143896
if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') resolver.Resolver.key_chain.ExtractCredentialsFromPathSpec(path_spec) fvde_volume = pyfvde.volume() file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) try: fvde.FVDEVolumeOpen( fvde_volume, path_spec, file_object, resolver.Resolver.key_chain) except: file_object.close() raise self._fvde_volume = fvde_volume self._file_object = file_object
def _Open(self, path_spec, mode='rb')
Opens the file system defined by path specification. Args: path_spec (PathSpec): path specification. mode (Optional[str]): file access mode. The default is 'rb' read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.816974
3.010885
0.935597
return fvde_file_entry.FVDEFileEntry( self._resolver_context, self, path_spec, is_root=True, is_virtual=True)
def GetFileEntryByPathSpec(self, path_spec)
Retrieves a file entry for a path specification. Args: path_spec (PathSpec): path specification. Returns: FVDEFileEntry: file entry or None.
4.023053
4.294305
0.936834
path_spec = fvde_path_spec.FVDEPathSpec(parent=self._path_spec.parent) return self.GetFileEntryByPathSpec(path_spec)
def GetRootFileEntry(self)
Retrieves the root file entry. Returns: FVDEFileEntry: file entry or None.
4.061448
3.450794
1.17696
try: if hasattr(lzma, 'LZMA_VERSION'): # Note that we cannot use max_length=0 here due to different # versions of the lzma code. uncompressed_data = self._lzma_decompressor.decompress( compressed_data, 0) else: uncompressed_data = self._lzma_decompressor.decompress(compressed_data) remaining_compressed_data = getattr( self._lzma_decompressor, 'unused_data', b'') except (EOFError, IOError, LZMAError) as exception: raise errors.BackEndError(( 'Unable to decompress XZ compressed stream with error: ' '{0!s}.').format(exception)) return uncompressed_data, remaining_compressed_data
def Decompress(self, compressed_data)
Decompresses the compressed data. Args: compressed_data (bytes): compressed data. Returns: tuple(bytes, bytes): uncompressed data and remaining compressed data. Raises: BackEndError: if the XZ compressed stream cannot be decompressed.
3.481359
2.922266
1.191322
string_parts = [] if self.cipher_mode: string_parts.append('cipher_mode: {0:s}'.format(self.cipher_mode)) if self.encryption_method: string_parts.append('encryption_method: {0:s}'.format( self.encryption_method)) if self.initialization_vector: initialization_vector = codecs.encode(self.initialization_vector, 'hex') initialization_vector = initialization_vector.decode('ascii') string_parts.append('initialization_vector: {0:s}'.format( initialization_vector)) if self.key: key = codecs.encode(self.key, 'hex') key = key.decode('ascii') string_parts.append('key: {0:s}'.format(key)) return self._GetComparable(sub_comparable_string=', '.join(string_parts))
def comparable(self)
str: comparable representation of the path specification.
2.044438
1.942502
1.052476
self._file_object.seek(offset, os.SEEK_SET) return self._file_object.read(size)
def read(self, offset, size)
Reads a byte string from the image object at the specified offset. Args: offset (int): offset where to start reading. size (int): number of bytes to read. Returns: bytes: data read.
2.825948
3.264085
0.86577
if self._normalized_timestamp is None: if self._timestamp is not None: self._normalized_timestamp = decimal.Decimal(self._timestamp) if self.fraction_of_second is not None: fraction_of_second = decimal.Decimal(self.fraction_of_second) if self._precision == dfdatetime_definitions.PRECISION_1_NANOSECOND: fraction_of_second /= self._NANOSECONDS_PER_SECOND else: fraction_of_second /= self._100_NANOSECONDS_PER_SECOND self._normalized_timestamp += fraction_of_second return self._normalized_timestamp
def _GetNormalizedTimestamp(self)
Retrieves the normalized timestamp. Returns: decimal.Decimal: normalized timestamp, which contains the number of seconds since January 1, 1970 00:00:00 and a fraction of second used for increased precision, or None if the normalized timestamp cannot be determined.
2.448803
2.329557
1.051188
date_time_values = self._CopyDateTimeFromString(time_string) year = date_time_values.get('year', 0) month = date_time_values.get('month', 0) day_of_month = date_time_values.get('day_of_month', 0) hours = date_time_values.get('hours', 0) minutes = date_time_values.get('minutes', 0) seconds = date_time_values.get('seconds', 0) microseconds = date_time_values.get('microseconds', 0) self._timestamp = self._GetNumberOfSecondsFromElements( year, month, day_of_month, hours, minutes, seconds) self.fraction_of_second = microseconds if pytsk3.TSK_VERSION_NUM >= 0x040200ff: self.fraction_of_second *= 1000 else: self.fraction_of_second *= 10 self._normalized_timestamp = None self.is_local_time = False
def CopyFromDateTimeString(self, time_string)
Copies a SleuthKit timestamp from a date and time string. Args: time_string (str): date and time value formatted as: YYYY-MM-DD hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from 0 to 9 and the seconds fraction can be either 3 or 6 digits. The time of day, seconds fraction and time zone offset are optional. The default time zone is UTC.
2.431591
2.460365
0.988305
if self._timestamp is None: return None number_of_days, hours, minutes, seconds = self._GetTimeValues( self._timestamp) year, month, day_of_month = self._GetDateValues(number_of_days, 1970, 1, 1) if self.fraction_of_second is None: return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'.format( year, month, day_of_month, hours, minutes, seconds) if pytsk3.TSK_VERSION_NUM >= 0x040200ff: return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:09d}'.format( year, month, day_of_month, hours, minutes, seconds, self.fraction_of_second) return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:07d}'.format( year, month, day_of_month, hours, minutes, seconds, self.fraction_of_second)
def CopyToDateTimeString(self)
Copies the date time value to a date and time string. Returns: str: date and time value formatted as: YYYY-MM-DD hh:mm:ss or YYYY-MM-DD hh:mm:ss.####### or YYYY-MM-DD hh:mm:ss.#########
1.830888
1.776056
1.030873
if self.fraction_of_second is None: return self._timestamp, None return super(TSKTime, self).CopyToStatTimeTuple()
def CopyToStatTimeTuple(self)
Copies the SleuthKit timestamp to a stat timestamp tuple. Returns: tuple[int, int]: a POSIX timestamp in seconds and the remainder in 100 nano seconds or (None, None) on error.
12.450505
8.260564
1.507222
if self._timestamp is None: return None, None, None try: number_of_days, _, _, _ = self._GetTimeValues(self._timestamp) return self._GetDateValues(number_of_days, 1970, 1, 1) except ValueError: return None, None, None
def GetDate(self)
Retrieves the date represented by the date and time values. Returns: tuple[int, int, int]: year, month, day of month or (None, None, None) if the date and time values do not represent a date.
4.072338
3.524477
1.155444
if self._tsk_attribute: # The value of the attribute name will be None for the default # data stream. attribute_name = getattr(self._tsk_attribute.info, 'name', None) if attribute_name: try: # pytsk3 returns an UTF-8 encoded byte string. return attribute_name.decode('utf8') except UnicodeError: pass return ''
def name(self)
str: name.
5.938694
5.553782
1.069306
if not self._tsk_attribute or not self._file_system: return True if self._file_system.IsHFS(): attribute_type = getattr(self._tsk_attribute.info, 'type', None) return attribute_type in ( pytsk3.TSK_FS_ATTR_TYPE_HFS_DEFAULT, pytsk3.TSK_FS_ATTR_TYPE_HFS_DATA) if self._file_system.IsNTFS(): return not bool(self.name) return True
def IsDefault(self)
Determines if the data stream is the default data stream. Returns: bool: True if the data stream is the default data stream, false if not.
3.810775
3.699123
1.030183
# Opening a file by inode number is faster than opening a file # by location. inode = getattr(self.path_spec, 'inode', None) location = getattr(self.path_spec, 'location', None) fs_info = self._file_system.GetFsInfo() tsk_directory = None try: if inode is not None: tsk_directory = fs_info.open_dir(inode=inode) elif location is not None: tsk_directory = fs_info.open_dir(path=location) except IOError as exception: raise errors.BackEndError( 'Unable to open directory with error: {0!s}'.format(exception)) if tsk_directory: for tsk_directory_entry in tsk_directory: # Note that because pytsk3.Directory does not explicitly define info # we need to check if the attribute exists and has a value other # than None. if getattr(tsk_directory_entry, 'info', None) is None: continue # Note that because pytsk3.TSK_FS_FILE does not explicitly define # fs_info we need to check if the attribute exists and has a value # other than None. if getattr(tsk_directory_entry.info, 'fs_info', None) is None: continue # Note that because pytsk3.TSK_FS_FILE does not explicitly define meta # we need to check if the attribute exists and has a value other # than None. if getattr(tsk_directory_entry.info, 'meta', None) is None: # Most directory entries will have an "inode" but not all, e.g. # previously deleted files. Currently directory entries without # a pytsk3.TSK_FS_META object are ignored. continue # Note that because pytsk3.TSK_FS_META does not explicitly define addr # we need to check if the attribute exists. if not hasattr(tsk_directory_entry.info.meta, 'addr'): continue directory_entry_inode = tsk_directory_entry.info.meta.addr directory_entry = None # Ignore references to self. if directory_entry_inode == inode: continue # On non-NTFS file systems ignore inode 0. if directory_entry_inode == 0 and not self._file_system.IsNTFS(): continue # Note that because pytsk3.TSK_FS_FILE does not explicitly define name # we need to check if the attribute exists and has a value other # than None. if getattr(tsk_directory_entry.info, 'name', None) is not None: # Ignore file entries marked as "unallocated". flags = getattr(tsk_directory_entry.info.name, 'flags', 0) if int(flags) & pytsk3.TSK_FS_NAME_FLAG_UNALLOC: continue directory_entry = getattr(tsk_directory_entry.info.name, 'name', '') try: # pytsk3 returns an UTF-8 encoded byte string. directory_entry = directory_entry.decode('utf8') except UnicodeError: # Continue here since we cannot represent the directory entry. continue if directory_entry: # Ignore references to self or parent. if directory_entry in ['.', '..']: continue if location == self._file_system.PATH_SEPARATOR: directory_entry = self._file_system.JoinPath([directory_entry]) else: directory_entry = self._file_system.JoinPath([ location, directory_entry]) yield tsk_path_spec.TSKPathSpec( inode=directory_entry_inode, location=directory_entry, parent=self.path_spec.parent)
def _EntriesGenerator(self)
Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: TSKPathSpec: a path specification. Raises: BackEndError: if pytsk3 cannot open the directory.
2.480341
2.422173
1.024015
if self._attributes is None: self._attributes = [] for tsk_attribute in self._tsk_file: if getattr(tsk_attribute, 'info', None) is None: continue # At the moment there is no way to expose the attribute data # from pytsk3. attribute_object = TSKAttribute(tsk_attribute) self._attributes.append(attribute_object) return self._attributes
def _GetAttributes(self)
Retrieves the attributes. Returns: list[TSKAttribute]: attributes.
4.327285
3.668714
1.17951
if self._data_streams is None: if self._file_system.IsHFS(): known_data_attribute_types = [ pytsk3.TSK_FS_ATTR_TYPE_HFS_DEFAULT, pytsk3.TSK_FS_ATTR_TYPE_HFS_DATA] elif self._file_system.IsNTFS(): known_data_attribute_types = [pytsk3.TSK_FS_ATTR_TYPE_NTFS_DATA] else: known_data_attribute_types = None self._data_streams = [] tsk_fs_meta_type = getattr( self._tsk_file.info.meta, 'type', pytsk3.TSK_FS_META_TYPE_UNDEF) if not known_data_attribute_types: if tsk_fs_meta_type == pytsk3.TSK_FS_META_TYPE_REG: data_stream = TSKDataStream(self._file_system, None) self._data_streams.append(data_stream) else: for tsk_attribute in self._tsk_file: # NTFS allows directories to have data streams. if (not self._file_system.IsNTFS() and tsk_fs_meta_type != pytsk3.TSK_FS_META_TYPE_REG): continue if getattr(tsk_attribute, 'info', None) is None: continue attribute_type = getattr(tsk_attribute.info, 'type', None) if attribute_type in known_data_attribute_types: data_stream = TSKDataStream(self._file_system, tsk_attribute) self._data_streams.append(data_stream) return self._data_streams
def _GetDataStreams(self)
Retrieves the data streams. Returns: list[TSKDataStream]: data streams.
2.055479
1.993876
1.030896
if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None return TSKDirectory(self._file_system, self.path_spec)
def _GetDirectory(self)
Retrieves a directory. Returns: TSKDirectory: directory or None.
6.914124
4.333536
1.595492
if self._link is None: self._link = '' if self.entry_type != definitions.FILE_ENTRY_TYPE_LINK: return self._link # Note that the SleuthKit does not expose NTFS # IO_REPARSE_TAG_MOUNT_POINT or IO_REPARSE_TAG_SYMLINK as a link. link = getattr(self._tsk_file.info.meta, 'link', None) if link is None: return self._link try: # pytsk3 returns an UTF-8 encoded byte string without a leading # path segment separator. link = '{0:s}{1:s}'.format( self._file_system.PATH_SEPARATOR, link.decode('utf8')) except UnicodeError: raise errors.BackEndError( 'pytsk3 returned a non UTF-8 formatted link.') self._link = link return self._link
def _GetLink(self)
Retrieves the link. Returns: str: path of the linked file.
4.364529
4.286644
1.018169
stat_object = super(TSKFileEntry, self)._GetStat() # File data stat information. stat_object.size = getattr(self._tsk_file.info.meta, 'size', None) # Date and time stat information. stat_time, stat_time_nano = self._TSKFileTimeCopyToStatTimeTuple( self._tsk_file, 'bkup') if stat_time is not None: stat_object.bkup = stat_time stat_object.bkup_nano = stat_time_nano stat_time, stat_time_nano = self._TSKFileTimeCopyToStatTimeTuple( self._tsk_file, 'dtime') if stat_time is not None: stat_object.dtime = stat_time stat_object.dtime_nano = stat_time_nano # Ownership and permissions stat information. mode = getattr(self._tsk_file.info.meta, 'mode', None) if mode is not None: # We need to cast mode to an int since it is of type # pytsk3.TSK_FS_META_MODE_ENUM. stat_object.mode = int(mode) stat_object.uid = getattr(self._tsk_file.info.meta, 'uid', None) stat_object.gid = getattr(self._tsk_file.info.meta, 'gid', None) # File entry type stat information. # Other stat information. stat_object.ino = getattr(self._tsk_file.info.meta, 'addr', None) # stat_object.dev = stat_info.st_dev # stat_object.nlink = getattr(self._tsk_file.info.meta, 'nlink', None) # stat_object.fs_type = 'Unknown' flags = getattr(self._tsk_file.info.meta, 'flags', 0) # The flags are an instance of pytsk3.TSK_FS_META_FLAG_ENUM. stat_object.is_allocated = bool(int(flags) & pytsk3.TSK_FS_META_FLAG_ALLOC) return stat_object
def _GetStat(self)
Retrieves the stat object. Returns: VFSStat: stat object.
2.519757
2.503784
1.006379
if self._directory is None: self._directory = self._GetDirectory() if self._directory: for path_spec in self._directory.entries: yield TSKFileEntry(self._resolver_context, self._file_system, path_spec)
def _GetSubFileEntries(self)
Retrieves sub file entries. Yields: TSKFileEntry: a sub file entry.
3.601973
3.090356
1.165553
timestamp = getattr(self._tsk_file.info.meta, name, None) if self._file_system_type in self._TSK_HAS_NANO_FS_TYPES: name_fragment = '{0:s}_nano'.format(name) fraction_of_second = getattr( self._tsk_file.info.meta, name_fragment, None) else: fraction_of_second = None return TSKTime(timestamp=timestamp, fraction_of_second=fraction_of_second)
def _GetTimeValue(self, name)
Retrieves a date and time value. Args: name (str): name of the date and time value, for example "atime" or "mtime". Returns: dfdatetime.DateTimeValues: date and time value or None if not available.
4.727869
4.242086
1.114515
if (not tsk_file or not tsk_file.info or not tsk_file.info.meta or not tsk_file.info.fs_info): raise errors.BackEndError( 'Missing TSK File .info, .info.meta. or .info.fs_info') stat_time = getattr(tsk_file.info.meta, time_value, None) stat_time_nano = None if self._file_system_type in self._TSK_HAS_NANO_FS_TYPES: time_value_nano = '{0:s}_nano'.format(time_value) stat_time_nano = getattr(tsk_file.info.meta, time_value_nano, None) # Sleuthkit 4.2.0 switched from 100 nano seconds precision to # 1 nano seconds precision. if stat_time_nano is not None and pytsk3.TSK_VERSION_NUM >= 0x040200ff: stat_time_nano /= 100 return stat_time, stat_time_nano
def _TSKFileTimeCopyToStatTimeTuple(self, tsk_file, time_value)
Copies a SleuthKit file object time value to a stat timestamp tuple. Args: tsk_file (pytsk3.File): TSK file. time_value (str): name of the time value. Returns: tuple[int, int]: number of seconds since 1970-01-01 00:00:00 and fraction of second in 100 nano seconds intervals. The number of seconds is None on error, or if the file system does not include the requested timestamp. The fraction of second is None on error, or if the file system does not support sub-second precision. Raises: BackEndError: if the TSK File .info, .info.meta or info.fs_info attribute is missing.
3.520179
2.783865
1.264493
if self._name is None: # If pytsk3.FS_Info.open() was used file.info has an attribute name # (pytsk3.TSK_FS_FILE) that contains the name string. Otherwise the # name from the path specification is used. if getattr(self._tsk_file.info, 'name', None) is not None: name = getattr(self._tsk_file.info.name, 'name', None) try: # pytsk3 returns an UTF-8 encoded byte string. self._name = name.decode('utf8') except UnicodeError: raise errors.BackEndError( 'pytsk3 returned a non UTF-8 formatted name.') else: location = getattr(self.path_spec, 'location', None) if location: self._name = self._file_system.BasenamePath(location) return self._name
def name(self)
str: name of the file entry, which does not include the full path. Raises: BackEndError: if pytsk3 returns a non UTF-8 formatted name.
4.00876
3.495793
1.146739
data_stream_names = [ data_stream.name for data_stream in self._GetDataStreams()] if data_stream_name and data_stream_name not in data_stream_names: return None path_spec = copy.deepcopy(self.path_spec) if data_stream_name: # For HFS DECOMP fork name is exposed however libtsk 4.6.0 seems to handle # these differently when opened and the correct behavior seems to be # treating this as the default (nameless) fork instead. For context libtsk # 4.5.0 is unable to read the data steam and yields an error. if self._file_system.IsHFS() and data_stream_name == 'DECOMP': data_stream_name = '' setattr(path_spec, 'data_stream', data_stream_name) return resolver.Resolver.OpenFileObject( path_spec, resolver_context=self._resolver_context)
def GetFileObject(self, data_stream_name='')
Retrieves the file-like object. Args: data_stream_name (Optional[str]): data stream name, where an empty string represents the default data stream. Returns: TSKFileIO: file-like object or None.
6.111388
6.110881
1.000083
link = self._GetLink() if not link: return None # TODO: is there a way to determine the link inode number here? link_inode = None parent_path_spec = getattr(self.path_spec, 'parent', None) path_spec = tsk_path_spec.TSKPathSpec( location=link, parent=parent_path_spec) root_inode = self._file_system.GetRootInode() is_root = bool( link == self._file_system.LOCATION_ROOT or ( link_inode is not None and root_inode is not None and link_inode == root_inode)) return TSKFileEntry( self._resolver_context, self._file_system, path_spec, is_root=is_root)
def GetLinkedFileEntry(self)
Retrieves the linked file entry, e.g. for a symbolic link. Returns: TSKFileEntry: linked file entry or None.
3.108719
2.938459
1.057942
location = getattr(self.path_spec, 'location', None) if location is None: return None parent_inode = self._parent_inode parent_location = self._file_system.DirnamePath(location) if parent_inode is None and parent_location is None: return None if parent_location == '': parent_location = self._file_system.PATH_SEPARATOR root_inode = self._file_system.GetRootInode() is_root = bool( parent_location == self._file_system.LOCATION_ROOT or ( parent_inode is not None and root_inode is not None and parent_inode == root_inode)) parent_path_spec = getattr(self.path_spec, 'parent', None) path_spec = tsk_path_spec.TSKPathSpec( inode=parent_inode, location=parent_location, parent=parent_path_spec) return TSKFileEntry( self._resolver_context, self._file_system, path_spec, is_root=is_root)
def GetParentFileEntry(self)
Retrieves the parent file entry. Returns: TSKFileEntry: parent file entry or None.
2.566456
2.397258
1.07058
encrypted_root_plist = key_chain.GetCredential( path_spec, 'encrypted_root_plist') if encrypted_root_plist: fvde_volume.read_encrypted_root_plist(encrypted_root_plist) password = key_chain.GetCredential(path_spec, 'password') if password: fvde_volume.set_password(password) recovery_password = key_chain.GetCredential(path_spec, 'recovery_password') if recovery_password: fvde_volume.set_recovery_password(recovery_password) fvde_volume.open_file_object(file_object)
def FVDEVolumeOpen(fvde_volume, path_spec, file_object, key_chain)
Opens the FVDE volume using the path specification. Args: fvde_volume (pyfvde.volume): FVDE volume. path_spec (PathSpec): path specification. file_object (FileIO): file-like object. key_chain (KeyChain): key chain.
2.220691
2.442627
0.90914
store_index = getattr(path_spec, 'store_index', None) if store_index is None: location = getattr(path_spec, 'location', None) if location is None or not location.startswith('/vss'): return None store_index = None try: store_index = int(location[4:], 10) - 1 except (TypeError, ValueError): pass if store_index is None or store_index < 0: return None return store_index
def VShadowPathSpecGetStoreIndex(path_spec)
Retrieves the store index from the path specification. Args: path_spec (PathSpec): path specification. Returns: int: store index or None if not available.
2.034074
2.476136
0.821471
if offset < 0 or offset >= self.uncompressed_data_size: raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format( offset, self.uncompressed_data_size)) for end_offset, member in iter(self._members_by_end_offset.items()): if offset < end_offset: return member return None
def _GetMemberForOffset(self, offset)
Finds the member whose data includes the provided offset. Args: offset (int): offset in the uncompressed data to find the containing member for. Returns: gzipfile.GzipMember: gzip file member or None if not available. Raises: ValueError: if the provided offset is outside of the bounds of the uncompressed data.
3.199156
2.908598
1.099896
if not self._gzip_file_object: raise IOError('Not opened.') if whence == os.SEEK_CUR: offset += self._current_offset elif whence == os.SEEK_END: offset += self.uncompressed_data_size elif whence != os.SEEK_SET: raise IOError('Unsupported whence.') if offset < 0: raise IOError('Invalid offset value less than zero.') self._current_offset = offset
def seek(self, offset, whence=os.SEEK_SET)
Seeks to an offset within the file-like object. Args: offset (int): offset to seek to. whence (Optional(int)): value that indicates whether offset is an absolute or relative position within the file. Raises: IOError: if the seek failed or the file has not been opened. OSError: if the seek failed or the file has not been opened.
2.854072
2.894379
0.986074
data = b'' while ((size and len(data) < size) and self._current_offset < self.uncompressed_data_size): member = self._GetMemberForOffset(self._current_offset) member_offset = self._current_offset - member.uncompressed_data_offset data_read = member.ReadAtOffset(member_offset, size) if data_read: self._current_offset += len(data_read) data = b''.join([data, data_read]) return data
def read(self, size=None)
Reads a byte string from the gzip file at the current offset. The function will read a byte string up to the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
3.405289
3.401798
1.001026
if not path_spec: raise ValueError('Missing path specification.') if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') self._gzip_file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) file_size = self._gzip_file_object.get_size() self._gzip_file_object.seek(0, os.SEEK_SET) uncompressed_data_offset = 0 next_member_offset = 0 while next_member_offset < file_size: member = gzipfile.GzipMember( self._gzip_file_object, next_member_offset, uncompressed_data_offset) uncompressed_data_offset = ( uncompressed_data_offset + member.uncompressed_data_size) self._members_by_end_offset[uncompressed_data_offset] = member self.uncompressed_data_size += member.uncompressed_data_size next_member_offset = member.member_end_offset
def _Open(self, path_spec=None, mode='rb')
Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
2.228412
2.307481
0.965734
stat_object = vfs_stat.VFSStat() # File data stat information. stat_object.size = self.path_spec.range_size # File entry type stat information. stat_object.type = stat_object.TYPE_FILE return stat_object
def _GetStat(self)
Retrieves a stat object. Returns: VFSStat: a stat object. Raises: BackEndError: when the encoded stream is missing.
7.321015
8.708031
0.84072
string_parts = [] string_parts.append(getattr(self.parent, 'comparable', '')) string_parts.append('type: {0:s}'.format(self.type_indicator)) if sub_comparable_string: string_parts.append(', {0:s}'.format(sub_comparable_string)) string_parts.append('\n') return ''.join(string_parts)
def _GetComparable(self, sub_comparable_string='')
Retrieves the comparable representation. This is a convenience function for constructing comparables. Args: sub_comparable_string (str): sub comparable string. Returns: str: comparable representation of the path specification.
2.887393
2.897571
0.996487
path_spec_dict = {} for attribute_name, attribute_value in iter(self.__dict__.items()): if attribute_value is None: continue if attribute_name == 'parent': attribute_value = attribute_value.CopyToDict() path_spec_dict[attribute_name] = attribute_value return path_spec_dict
def CopyToDict(self)
Copies the path specification to a dictionary. Returns: dict[str, object]: path specification attributes.
2.977132
2.496773
1.192392
string_parts = [] if self.encrypted_root_plist: string_parts.append('encrypted_root_plist: {0:s}'.format( self.encrypted_root_plist)) if self.password: string_parts.append('password: {0:s}'.format(self.password)) if self.recovery_password: string_parts.append('recovery_password: {0:s}'.format( self.recovery_password)) return self._GetComparable(sub_comparable_string=', '.join(string_parts))
def comparable(self)
str: comparable representation of the path specification.
2.742614
2.589857
1.058983
if mount_point not in cls._mount_points: raise KeyError('Mount point: {0:s} not set.'.format(mount_point)) del cls._mount_points[mount_point]
def DeregisterMountPoint(cls, mount_point)
Deregisters a path specification mount point. Args: mount_point (str): mount point identifier. Raises: KeyError: if the corresponding mount point is not set.
2.588972
2.573287
1.006095
if mount_point in cls._mount_points: raise KeyError('Mount point: {0:s} already set.'.format(mount_point)) cls._mount_points[mount_point] = path_spec
def RegisterMountPoint(cls, mount_point, path_spec)
Registers a path specification mount point. Args: mount_point (str): mount point identifier. path_spec (PathSpec): path specification of the mount point. Raises: KeyError: if the corresponding mount point is already set.
2.256345
2.439153
0.925053
if resolver_helper.type_indicator not in cls._resolver_helpers: raise KeyError( 'Resolver helper object not set for type indicator: {0:s}.'.format( resolver_helper.type_indicator)) del cls._resolver_helpers[resolver_helper.type_indicator]
def DeregisterHelper(cls, resolver_helper)
Deregisters a path specification resolver helper. Args: resolver_helper (ResolverHelper): resolver helper. Raises: KeyError: if resolver helper object is not set for the corresponding type indicator.
3.280131
2.071309
1.583603
if type_indicator not in cls._resolver_helpers: raise KeyError( 'Resolver helper not set for type indicator: {0:s}.'.format( type_indicator)) return cls._resolver_helpers[type_indicator]
def GetHelper(cls, type_indicator)
Retrieves the path specification resolver helper for the specified type. Args: type_indicator (str): type indicator. Returns: ResolverHelper: a resolver helper. Raises: KeyError: if resolver helper is not set for the corresponding type indicator.
3.54822
2.327878
1.52423
if resolver_helper.type_indicator in cls._resolver_helpers: raise KeyError(( 'Resolver helper object already set for type indicator: ' '{0!s}.').format(resolver_helper.type_indicator)) cls._resolver_helpers[resolver_helper.type_indicator] = resolver_helper
def RegisterHelper(cls, resolver_helper)
Registers a path specification resolver helper. Args: resolver_helper (ResolverHelper): resolver helper. Raises: KeyError: if resolver helper object is already set for the corresponding type indicator.
3.359795
2.119146
1.585448
sub_comparable_string = ( 'compression_method: {0:s}').format(self.compression_method) return self._GetComparable(sub_comparable_string=sub_comparable_string)
def comparable(self)
str: comparable representation of the path specification.
6.203188
5.222788
1.187716
if not self._file_object_set_in_init: self._file_object.close() self._file_object = None self._range_offset = -1 self._range_size = -1
def _Close(self)
Closes the file-like object. If the file-like object was passed in the init function the data range file-like object does not control the file-like object and should not actually close it.
4.97485
3.708895
1.341329