index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
41,712
comm.base_comm
handle_msg
Handle a comm_msg message
def handle_msg(self, msg: MessageType) -> None: """Handle a comm_msg message""" logger.debug("handle_msg[%s](%s)", self.comm_id, msg) if self._msg_callback: from IPython import get_ipython shell = get_ipython() if shell: shell.events.trigger("pre_execute") self._msg_callback(msg) if shell: shell.events.trigger("post_execute")
(self, msg: Dict[str, Any]) -> NoneType
41,713
comm.base_comm
on_close
Register a callback for comm_close Will be called with the `data` of the close message. Call `on_close(None)` to disable an existing callback.
def on_close(self, callback: CommCallback | None) -> None: """Register a callback for comm_close Will be called with the `data` of the close message. Call `on_close(None)` to disable an existing callback. """ self._close_callback = callback
(self, callback: Optional[Callable[[Dict[str, Any]], NoneType]]) -> NoneType
41,714
comm.base_comm
on_msg
Register a callback for comm_msg Will be called with the `data` of any comm_msg messages. Call `on_msg(None)` to disable an existing callback.
def on_msg(self, callback: CommCallback | None) -> None: """Register a callback for comm_msg Will be called with the `data` of any comm_msg messages. Call `on_msg(None)` to disable an existing callback. """ self._msg_callback = callback
(self, callback: Optional[Callable[[Dict[str, Any]], NoneType]]) -> NoneType
41,715
comm.base_comm
open
Open the frontend-side version of this comm
def open( self, data: MaybeDict = None, metadata: MaybeDict = None, buffers: BuffersType = None ) -> None: """Open the frontend-side version of this comm""" if data is None: data = self._open_data comm_manager = comm.get_comm_manager() if comm_manager is None: msg = "Comms cannot be opened without a comm_manager." # type:ignore[unreachable] raise RuntimeError(msg) comm_manager.register_comm(self) try: self.publish_msg( "comm_open", data=data, metadata=metadata, buffers=buffers, target_name=self.target_name, target_module=self.target_module, ) self._closed = False except Exception: comm_manager.unregister_comm(self) raise
(self, data: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, buffers: Optional[List[bytes]] = None) -> NoneType
41,716
comm.base_comm
publish_msg
null
def publish_msg( self, msg_type: str, # noqa: ARG002 data: MaybeDict = None, # noqa: ARG002 metadata: MaybeDict = None, # noqa: ARG002 buffers: BuffersType = None, # noqa: ARG002 **keys: t.Any, # noqa: ARG002 ) -> None: msg = "publish_msg Comm method is not implemented" raise NotImplementedError(msg)
(self, msg_type: str, data: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, buffers: Optional[List[bytes]] = None, **keys: Any) -> NoneType
41,717
comm.base_comm
send
Send a message to the frontend-side version of this comm
def send( self, data: MaybeDict = None, metadata: MaybeDict = None, buffers: BuffersType = None ) -> None: """Send a message to the frontend-side version of this comm""" self.publish_msg( "comm_msg", data=data, metadata=metadata, buffers=buffers, )
(self, data: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, buffers: Optional[List[bytes]] = None) -> NoneType
41,718
comm.base_comm
CommManager
Default CommManager singleton implementation for Comms in the Kernel
class CommManager: """Default CommManager singleton implementation for Comms in the Kernel""" # Public APIs def __init__(self) -> None: self.comms: dict[str, BaseComm] = {} self.targets: dict[str, CommTargetCallback] = {} def register_target(self, target_name: str, f: CommTargetCallback | str) -> None: """Register a callable f for a given target name f will be called with two arguments when a comm_open message is received with `target`: - the Comm instance - the `comm_open` message itself. f can be a Python callable or an import string for one. """ if isinstance(f, str): f = import_item(f) self.targets[target_name] = t.cast(CommTargetCallback, f) def unregister_target(self, target_name: str, f: CommTargetCallback) -> CommTargetCallback: # noqa: ARG002 """Unregister a callable registered with register_target""" return self.targets.pop(target_name) def register_comm(self, comm: BaseComm) -> str: """Register a new comm""" comm_id = comm.comm_id self.comms[comm_id] = comm return comm_id def unregister_comm(self, comm: BaseComm) -> None: """Unregister a comm, and close its counterpart""" # unlike get_comm, this should raise a KeyError comm = self.comms.pop(comm.comm_id) def get_comm(self, comm_id: str) -> BaseComm | None: """Get a comm with a particular id Returns the comm if found, otherwise None. This will not raise an error, it will log messages if the comm cannot be found. """ try: return self.comms[comm_id] except KeyError: logger.warning("No such comm: %s", comm_id) if logger.isEnabledFor(logging.DEBUG): # don't create the list of keys if debug messages aren't enabled logger.debug("Current comms: %s", list(self.comms.keys())) return None # Message handlers def comm_open(self, stream: ZMQStream, ident: str, msg: MessageType) -> None: # noqa: ARG002 """Handler for comm_open messages""" from comm import create_comm content = msg["content"] comm_id = content["comm_id"] target_name = content["target_name"] f = self.targets.get(target_name, None) comm = create_comm( comm_id=comm_id, primary=False, target_name=target_name, ) self.register_comm(comm) if f is None: logger.error("No such comm target registered: %s", target_name) else: try: f(comm, msg) return except Exception: logger.error("Exception opening comm with target: %s", target_name, exc_info=True) # Failure. try: comm.close() except Exception: logger.error( """Could not close comm during `comm_open` failure clean-up. The comm may not have been opened yet.""", exc_info=True, ) def comm_msg(self, stream: ZMQStream, ident: str, msg: MessageType) -> None: # noqa: ARG002 """Handler for comm_msg messages""" content = msg["content"] comm_id = content["comm_id"] comm = self.get_comm(comm_id) if comm is None: return try: comm.handle_msg(msg) except Exception: logger.error("Exception in comm_msg for %s", comm_id, exc_info=True) def comm_close(self, stream: ZMQStream, ident: str, msg: MessageType) -> None: # noqa: ARG002 """Handler for comm_close messages""" content = msg["content"] comm_id = content["comm_id"] comm = self.get_comm(comm_id) if comm is None: return self.comms[comm_id]._closed = True del self.comms[comm_id] try: comm.handle_close(msg) except Exception: logger.error("Exception in comm_close for %s", comm_id, exc_info=True)
() -> 'None'
41,719
comm.base_comm
__init__
null
def __init__(self) -> None: self.comms: dict[str, BaseComm] = {} self.targets: dict[str, CommTargetCallback] = {}
(self) -> NoneType
41,720
comm.base_comm
comm_close
Handler for comm_close messages
def comm_close(self, stream: ZMQStream, ident: str, msg: MessageType) -> None: # noqa: ARG002 """Handler for comm_close messages""" content = msg["content"] comm_id = content["comm_id"] comm = self.get_comm(comm_id) if comm is None: return self.comms[comm_id]._closed = True del self.comms[comm_id] try: comm.handle_close(msg) except Exception: logger.error("Exception in comm_close for %s", comm_id, exc_info=True)
(self, stream: 'ZMQStream', ident: 'str', msg: 'MessageType') -> 'None'
41,721
comm.base_comm
comm_msg
Handler for comm_msg messages
def comm_msg(self, stream: ZMQStream, ident: str, msg: MessageType) -> None: # noqa: ARG002 """Handler for comm_msg messages""" content = msg["content"] comm_id = content["comm_id"] comm = self.get_comm(comm_id) if comm is None: return try: comm.handle_msg(msg) except Exception: logger.error("Exception in comm_msg for %s", comm_id, exc_info=True)
(self, stream: 'ZMQStream', ident: 'str', msg: 'MessageType') -> 'None'
41,722
comm.base_comm
comm_open
Handler for comm_open messages
def comm_open(self, stream: ZMQStream, ident: str, msg: MessageType) -> None: # noqa: ARG002 """Handler for comm_open messages""" from comm import create_comm content = msg["content"] comm_id = content["comm_id"] target_name = content["target_name"] f = self.targets.get(target_name, None) comm = create_comm( comm_id=comm_id, primary=False, target_name=target_name, ) self.register_comm(comm) if f is None: logger.error("No such comm target registered: %s", target_name) else: try: f(comm, msg) return except Exception: logger.error("Exception opening comm with target: %s", target_name, exc_info=True) # Failure. try: comm.close() except Exception: logger.error( """Could not close comm during `comm_open` failure clean-up. The comm may not have been opened yet.""", exc_info=True, )
(self, stream: 'ZMQStream', ident: 'str', msg: 'MessageType') -> 'None'
41,723
comm.base_comm
get_comm
Get a comm with a particular id Returns the comm if found, otherwise None. This will not raise an error, it will log messages if the comm cannot be found.
def get_comm(self, comm_id: str) -> BaseComm | None: """Get a comm with a particular id Returns the comm if found, otherwise None. This will not raise an error, it will log messages if the comm cannot be found. """ try: return self.comms[comm_id] except KeyError: logger.warning("No such comm: %s", comm_id) if logger.isEnabledFor(logging.DEBUG): # don't create the list of keys if debug messages aren't enabled logger.debug("Current comms: %s", list(self.comms.keys())) return None
(self, comm_id: str) -> comm.base_comm.BaseComm | None
41,724
comm.base_comm
register_comm
Register a new comm
def register_comm(self, comm: BaseComm) -> str: """Register a new comm""" comm_id = comm.comm_id self.comms[comm_id] = comm return comm_id
(self, comm: comm.base_comm.BaseComm) -> str
41,725
comm.base_comm
register_target
Register a callable f for a given target name f will be called with two arguments when a comm_open message is received with `target`: - the Comm instance - the `comm_open` message itself. f can be a Python callable or an import string for one.
def register_target(self, target_name: str, f: CommTargetCallback | str) -> None: """Register a callable f for a given target name f will be called with two arguments when a comm_open message is received with `target`: - the Comm instance - the `comm_open` message itself. f can be a Python callable or an import string for one. """ if isinstance(f, str): f = import_item(f) self.targets[target_name] = t.cast(CommTargetCallback, f)
(self, target_name: str, f: Union[Callable[[comm.base_comm.BaseComm, Dict[str, Any]], NoneType], str]) -> NoneType
41,726
comm.base_comm
unregister_comm
Unregister a comm, and close its counterpart
def unregister_comm(self, comm: BaseComm) -> None: """Unregister a comm, and close its counterpart""" # unlike get_comm, this should raise a KeyError comm = self.comms.pop(comm.comm_id)
(self, comm: comm.base_comm.BaseComm) -> NoneType
41,727
comm.base_comm
unregister_target
Unregister a callable registered with register_target
def unregister_target(self, target_name: str, f: CommTargetCallback) -> CommTargetCallback: # noqa: ARG002 """Unregister a callable registered with register_target""" return self.targets.pop(target_name)
(self, target_name: str, f: Callable[[comm.base_comm.BaseComm, Dict[str, Any]], NoneType]) -> Callable[[comm.base_comm.BaseComm, Dict[str, Any]], NoneType]
41,728
comm
DummyComm
null
class DummyComm(BaseComm): def publish_msg( self, msg_type: str, data: MaybeDict = None, metadata: MaybeDict = None, buffers: BuffersType = None, **keys: Any, ) -> None: pass
(target_name: 'str' = 'comm', data: 'MaybeDict' = None, metadata: 'MaybeDict' = None, buffers: 'BuffersType' = None, comm_id: 'str | None' = None, primary: 'bool' = True, target_module: 'str | None' = None, topic: 'bytes | None' = None, _open_data: 'MaybeDict' = None, _close_data: 'MaybeDict' = None, **kwargs: 't.Any') -> 'None'
41,737
comm
publish_msg
null
def publish_msg( self, msg_type: str, data: MaybeDict = None, metadata: MaybeDict = None, buffers: BuffersType = None, **keys: Any, ) -> None: pass
(self, msg_type: str, data: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, buffers: Optional[List[bytes]] = None, **keys: Any) -> NoneType
41,739
comm
_create_comm
Create a Comm. This method is intended to be replaced, so that it returns your Comm instance.
def _create_comm(*args: Any, **kwargs: Any) -> BaseComm: """Create a Comm. This method is intended to be replaced, so that it returns your Comm instance. """ return DummyComm(*args, **kwargs)
(*args: Any, **kwargs: Any) -> comm.base_comm.BaseComm
41,740
comm
_get_comm_manager
Get the current Comm manager, creates one if there is none. This method is intended to be replaced if needed (if you want to manage multiple CommManagers).
def _get_comm_manager() -> CommManager: """Get the current Comm manager, creates one if there is none. This method is intended to be replaced if needed (if you want to manage multiple CommManagers). """ global _comm_manager # noqa: PLW0603 if _comm_manager is None: _comm_manager = CommManager() return _comm_manager
() -> comm.base_comm.CommManager
41,745
tokenize_rt
Offset
Offset(line, utf8_byte_offset)
class Offset(NamedTuple): line: int | None = None utf8_byte_offset: int | None = None
(line: int | None = None, utf8_byte_offset: int | None = None)
41,747
namedtuple_Offset
__new__
Create new instance of Offset(line, utf8_byte_offset)
from builtins import function
(_cls, line: ForwardRef('int | None') = None, utf8_byte_offset: ForwardRef('int | None') = None)
41,750
collections
_replace
Return a new Offset object replacing specified fields with new values
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None): """Returns a new subclass of tuple with named fields. >>> Point = namedtuple('Point', ['x', 'y']) >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords >>> p[0] + p[1] # indexable like a plain tuple 33 >>> x, y = p # unpack like a regular tuple >>> x, y (11, 22) >>> p.x + p.y # fields also accessible by name 33 >>> d = p._asdict() # convert to a dictionary >>> d['x'] 11 >>> Point(**d) # convert from a dictionary Point(x=11, y=22) >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields Point(x=100, y=22) """ # Validate the field names. At the user's option, either generate an error # message or automatically replace the field name with a valid name. if isinstance(field_names, str): field_names = field_names.replace(',', ' ').split() field_names = list(map(str, field_names)) typename = _sys.intern(str(typename)) if rename: seen = set() for index, name in enumerate(field_names): if (not name.isidentifier() or _iskeyword(name) or name.startswith('_') or name in seen): field_names[index] = f'_{index}' seen.add(name) for name in [typename] + field_names: if type(name) is not str: raise TypeError('Type names and field names must be strings') if not name.isidentifier(): raise ValueError('Type names and field names must be valid ' f'identifiers: {name!r}') if _iskeyword(name): raise ValueError('Type names and field names cannot be a ' f'keyword: {name!r}') seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: ' f'{name!r}') if name in seen: raise ValueError(f'Encountered duplicate field name: {name!r}') seen.add(name) field_defaults = {} if defaults is not None: defaults = tuple(defaults) if len(defaults) > len(field_names): raise TypeError('Got more default values than field names') field_defaults = dict(reversed(list(zip(reversed(field_names), reversed(defaults))))) # Variables used in the methods and docstrings field_names = tuple(map(_sys.intern, field_names)) num_fields = len(field_names) arg_list = ', '.join(field_names) if num_fields == 1: arg_list += ',' repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')' tuple_new = tuple.__new__ _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip # Create all the named tuple methods to be added to the class namespace namespace = { '_tuple_new': tuple_new, '__builtins__': {}, '__name__': f'namedtuple_{typename}', } code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' __new__ = eval(code, namespace) __new__.__name__ = '__new__' __new__.__doc__ = f'Create new instance of {typename}({arg_list})' if defaults is not None: __new__.__defaults__ = defaults @classmethod def _make(cls, iterable): result = tuple_new(cls, iterable) if _len(result) != num_fields: raise TypeError(f'Expected {num_fields} arguments, got {len(result)}') return result _make.__func__.__doc__ = (f'Make a new {typename} object from a sequence ' 'or iterable') def _replace(self, /, **kwds): result = self._make(_map(kwds.pop, field_names, self)) if kwds: raise ValueError(f'Got unexpected field names: {list(kwds)!r}') return result _replace.__doc__ = (f'Return a new {typename} object replacing specified ' 'fields with new values') def __repr__(self): 'Return a nicely formatted representation string' return self.__class__.__name__ + repr_fmt % self def _asdict(self): 'Return a new dict which maps field names to their values.' return _dict(_zip(self._fields, self)) def __getnewargs__(self): 'Return self as a plain tuple. Used by copy and pickle.' return _tuple(self) # Modify function metadata to help with introspection and debugging for method in ( __new__, _make.__func__, _replace, __repr__, _asdict, __getnewargs__, ): method.__qualname__ = f'{typename}.{method.__name__}' # Build-up the class namespace dictionary # and use type() to build the result class class_namespace = { '__doc__': f'{typename}({arg_list})', '__slots__': (), '_fields': field_names, '_field_defaults': field_defaults, '__new__': __new__, '_make': _make, '_replace': _replace, '__repr__': __repr__, '_asdict': _asdict, '__getnewargs__': __getnewargs__, '__match_args__': field_names, } for index, name in enumerate(field_names): doc = _sys.intern(f'Alias for field number {index}') class_namespace[name] = _tuplegetter(index, doc) result = type(typename, (tuple,), class_namespace) # For pickling to work, the __module__ variable needs to be set to the frame # where the named tuple is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython), or where the user has # specified a particular module. if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass if module is not None: result.__module__ = module return result
(self, /, **kwds)
41,751
tokenize_rt
Token
Token(name, src, line, utf8_byte_offset)
class Token(NamedTuple): name: str src: str line: int | None = None utf8_byte_offset: int | None = None @property def offset(self) -> Offset: return Offset(self.line, self.utf8_byte_offset) def matches(self, *, name: str, src: str) -> bool: return self.name == name and self.src == src
(name: str, src: str, line: int | None = None, utf8_byte_offset: int | None = None)
41,753
namedtuple_Token
__new__
Create new instance of Token(name, src, line, utf8_byte_offset)
from builtins import function
(_cls, name: ForwardRef('str'), src: ForwardRef('str'), line: ForwardRef('int | None') = None, utf8_byte_offset: ForwardRef('int | None') = None)
41,756
collections
_replace
Return a new Token object replacing specified fields with new values
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None): """Returns a new subclass of tuple with named fields. >>> Point = namedtuple('Point', ['x', 'y']) >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords >>> p[0] + p[1] # indexable like a plain tuple 33 >>> x, y = p # unpack like a regular tuple >>> x, y (11, 22) >>> p.x + p.y # fields also accessible by name 33 >>> d = p._asdict() # convert to a dictionary >>> d['x'] 11 >>> Point(**d) # convert from a dictionary Point(x=11, y=22) >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields Point(x=100, y=22) """ # Validate the field names. At the user's option, either generate an error # message or automatically replace the field name with a valid name. if isinstance(field_names, str): field_names = field_names.replace(',', ' ').split() field_names = list(map(str, field_names)) typename = _sys.intern(str(typename)) if rename: seen = set() for index, name in enumerate(field_names): if (not name.isidentifier() or _iskeyword(name) or name.startswith('_') or name in seen): field_names[index] = f'_{index}' seen.add(name) for name in [typename] + field_names: if type(name) is not str: raise TypeError('Type names and field names must be strings') if not name.isidentifier(): raise ValueError('Type names and field names must be valid ' f'identifiers: {name!r}') if _iskeyword(name): raise ValueError('Type names and field names cannot be a ' f'keyword: {name!r}') seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: ' f'{name!r}') if name in seen: raise ValueError(f'Encountered duplicate field name: {name!r}') seen.add(name) field_defaults = {} if defaults is not None: defaults = tuple(defaults) if len(defaults) > len(field_names): raise TypeError('Got more default values than field names') field_defaults = dict(reversed(list(zip(reversed(field_names), reversed(defaults))))) # Variables used in the methods and docstrings field_names = tuple(map(_sys.intern, field_names)) num_fields = len(field_names) arg_list = ', '.join(field_names) if num_fields == 1: arg_list += ',' repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')' tuple_new = tuple.__new__ _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip # Create all the named tuple methods to be added to the class namespace namespace = { '_tuple_new': tuple_new, '__builtins__': {}, '__name__': f'namedtuple_{typename}', } code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' __new__ = eval(code, namespace) __new__.__name__ = '__new__' __new__.__doc__ = f'Create new instance of {typename}({arg_list})' if defaults is not None: __new__.__defaults__ = defaults @classmethod def _make(cls, iterable): result = tuple_new(cls, iterable) if _len(result) != num_fields: raise TypeError(f'Expected {num_fields} arguments, got {len(result)}') return result _make.__func__.__doc__ = (f'Make a new {typename} object from a sequence ' 'or iterable') def _replace(self, /, **kwds): result = self._make(_map(kwds.pop, field_names, self)) if kwds: raise ValueError(f'Got unexpected field names: {list(kwds)!r}') return result _replace.__doc__ = (f'Return a new {typename} object replacing specified ' 'fields with new values') def __repr__(self): 'Return a nicely formatted representation string' return self.__class__.__name__ + repr_fmt % self def _asdict(self): 'Return a new dict which maps field names to their values.' return _dict(_zip(self._fields, self)) def __getnewargs__(self): 'Return self as a plain tuple. Used by copy and pickle.' return _tuple(self) # Modify function metadata to help with introspection and debugging for method in ( __new__, _make.__func__, _replace, __repr__, _asdict, __getnewargs__, ): method.__qualname__ = f'{typename}.{method.__name__}' # Build-up the class namespace dictionary # and use type() to build the result class class_namespace = { '__doc__': f'{typename}({arg_list})', '__slots__': (), '_fields': field_names, '_field_defaults': field_defaults, '__new__': __new__, '_make': _make, '_replace': _replace, '__repr__': __repr__, '_asdict': _asdict, '__getnewargs__': __getnewargs__, '__match_args__': field_names, } for index, name in enumerate(field_names): doc = _sys.intern(f'Alias for field number {index}') class_namespace[name] = _tuplegetter(index, doc) result = type(typename, (tuple,), class_namespace) # For pickling to work, the __module__ variable needs to be set to the frame # where the named tuple is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython), or where the user has # specified a particular module. if module is None: try: module = _sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass if module is not None: result.__module__ = module return result
(self, /, **kwds)
41,757
tokenize_rt
matches
null
def matches(self, *, name: str, src: str) -> bool: return self.name == name and self.src == src
(self, *, name: str, src: str) -> bool
41,758
tokenize_rt
_re_partition
null
def _re_partition(regex: Pattern[str], s: str) -> tuple[str, str, str]: match = regex.search(s) if match: return s[:match.start()], s[slice(*match.span())], s[match.end():] else: return (s, '', '')
(regex: Pattern[str], s: str) -> tuple[str, str, str]
41,762
tokenize_rt
main
null
def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filename') args = parser.parse_args(argv) with open(args.filename) as f: tokens = src_to_tokens(f.read()) for token in tokens: line, col = str(token.line), str(token.utf8_byte_offset) print(f'{line}:{col} {token.name} {token.src!r}') return 0
(argv: Optional[Sequence[str]] = None) -> int
41,763
tokenize_rt
parse_string_literal
parse a string literal's source into (prefix, string)
def parse_string_literal(src: str) -> tuple[str, str]: """parse a string literal's source into (prefix, string)""" match = _string_re.match(src) assert match is not None return match.group(1), match.group(2)
(src: str) -> tuple[str, str]
41,765
tokenize_rt
reversed_enumerate
null
def reversed_enumerate( tokens: Sequence[Token], ) -> Generator[tuple[int, Token], None, None]: for i in reversed(range(len(tokens))): yield i, tokens[i]
(tokens: Sequence[tokenize_rt.Token]) -> Generator[tuple[int, tokenize_rt.Token], NoneType, NoneType]
41,766
tokenize_rt
rfind_string_parts
find the indicies of the string parts of a (joined) string literal - `i` should start at the end of the string literal - returns `()` (an empty tuple) for things which are not string literals
def rfind_string_parts(tokens: Sequence[Token], i: int) -> tuple[int, ...]: """find the indicies of the string parts of a (joined) string literal - `i` should start at the end of the string literal - returns `()` (an empty tuple) for things which are not string literals """ ret = [] depth = 0 for i in range(i, -1, -1): token = tokens[i] if token.name == 'STRING': ret.append(i) elif token.name in NON_CODING_TOKENS: pass elif token.src == ')': depth += 1 elif depth and token.src == '(': depth -= 1 # if we closed the paren(s) make sure it was a parenthesized string # and not actually a call if depth == 0: for j in range(i - 1, -1, -1): tok = tokens[j] if tok.name in NON_CODING_TOKENS: pass # this was actually a call and not a parenthesized string elif ( tok.src in {']', ')'} or ( tok.name == 'NAME' and tok.src not in keyword.kwlist ) ): return () else: break break elif depth: # it looked like a string but wasn't return () else: break return tuple(reversed(ret))
(tokens: Sequence[tokenize_rt.Token], i: int) -> tuple[int, ...]
41,767
tokenize_rt
src_to_tokens
null
def src_to_tokens(src: str) -> list[Token]: tokenize_target = io.StringIO(src) lines = ('',) + tuple(tokenize_target) tokenize_target.seek(0) tokens = [] last_line = 1 last_col = 0 end_offset = 0 gen = tokenize.generate_tokens(tokenize_target.readline) for tok_type, tok_text, (sline, scol), (eline, ecol), line in gen: if sline > last_line: newtok = lines[last_line][last_col:] for lineno in range(last_line + 1, sline): newtok += lines[lineno] if scol > 0: newtok += lines[sline][:scol] # a multiline unimportant whitespace may contain escaped newlines while _escaped_nl_re.search(newtok): ws, nl, newtok = _re_partition(_escaped_nl_re, newtok) if ws: tokens.append( Token(UNIMPORTANT_WS, ws, last_line, end_offset), ) end_offset += len(ws.encode()) tokens.append(Token(ESCAPED_NL, nl, last_line, end_offset)) end_offset = 0 last_line += 1 if newtok: tokens.append(Token(UNIMPORTANT_WS, newtok, sline, 0)) end_offset = len(newtok.encode()) else: end_offset = 0 elif scol > last_col: newtok = line[last_col:scol] tokens.append(Token(UNIMPORTANT_WS, newtok, sline, end_offset)) end_offset += len(newtok.encode()) tok_name = tokenize.tok_name[tok_type] tokens.append(Token(tok_name, tok_text, sline, end_offset)) last_line, last_col = eline, ecol if sline != eline: end_offset = len(lines[last_line][:last_col].encode()) else: end_offset += len(tok_text.encode()) return tokens
(src: str) -> list[tokenize_rt.Token]
41,770
tokenize_rt
tokens_to_src
null
def tokens_to_src(tokens: Iterable[Token]) -> str: return ''.join(tok.src for tok in tokens)
(tokens: Iterable[tokenize_rt.Token]) -> str
41,771
wield.bunch.bunch
Bunch
Cookbook method for creating bunches Often we want to just collect a bunch of stuff together, naming each item of the bunch; a dictionary's OK for that, but a small do-nothing class is even handier, and prettier to use. Whenever you want to group a few variables: >>> point = Bunch(datum=2, squared=4, coord=12) >>> point.datum taken from matplotlib's cbook.py By: Alex Martelli From: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52308
class Bunch(object): """ Cookbook method for creating bunches Often we want to just collect a bunch of stuff together, naming each item of the bunch; a dictionary's OK for that, but a small do-nothing class is even handier, and prettier to use. Whenever you want to group a few variables: >>> point = Bunch(datum=2, squared=4, coord=12) >>> point.datum taken from matplotlib's cbook.py By: Alex Martelli From: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52308 """ def __init__(self, inner_dict=None, *args, **kwds): if inner_dict is None or args or kwds: if args: _mydict = dict(inner_dict, *args, **kwds) else: _mydict = dict(**kwds) else: _mydict = inner_dict self.__dict__["_mydict"] = _mydict return @classmethod def as_bunch(cls, data): if isinstance(data, cls): return data return cls(data) def __repr__(self): keys = list(self._mydict.keys()) return "{0}({1})".format( self.__class__.__name__, ", \n ".join( ["".join((str(k), "=", repr(self._mydict[k]))) for k in keys] ), ) def _repr_pretty_(self, p, cycle): if cycle: p.text("Bunch(<recurse>)") else: with p.group(4, "Bunch(", ")"): first = True for k, v in sorted(self._mydict.items()): if not first: p.text(",") p.breakable() else: p.breakable() first = False if isinstance(k, str): p.text(k) else: p.pretty(k) p.text(" = ") p.pretty(v) if not first: p.text(",") p.breakable() return def __dir__(self): items = [k for k in self._mydict.keys() if isinstance(k, str)] # items += dir(super(Bunch, self)) return items def __getitem__(self, key): if isinstance(key, (slice, np.ndarray, MutableSequence)): rebuild = dict() for vkey, val in self._mydict.items(): if isinstance(val, np.ndarray): val = val[key] rebuild[vkey] = val if not rebuild: raise RuntimeError("Not holding arrays to index by {0}".format(key)) return Bunch(rebuild) else: return self._mydict[key] def domain_sort(self, key): argsort = np.argsort(self[key]) return self[argsort] def __getattr__(self, key): try: item = self._mydict[key] except KeyError as E: raise AttributeError(E) if type(item) is dict: return self.__class__(item) return item def __setattr__(self, key, item): self._mydict[key] = item return def __delattr__(self, key): del self._mydict[key] def __delitem__(self, key): del self._mydict[key] def __deepcopy__(self, memo): return self.__class__(copy.deepcopy(self._mydict, memo)) def copy(self): return self.__class__(self._mydict.copy()) __contains__ = gen_func("__contains__") __eq__ = gen_func("__eq__") __format__ = gen_func("__format__") __ge__ = gen_func("__ge__") __gt__ = gen_func("__gt__") __iter__ = gen_func("__iter__") __le__ = gen_func("__le__") __len__ = gen_func("__len__") __lt__ = gen_func("__lt__") __ne__ = gen_func("__ne__") __setitem__ = gen_func("__setitem__") __sizeof__ = gen_func("__sizeof__") __str__ = gen_func("__str__") clear = gen_func("clear") fromkeys = gen_func("fromkeys") get = gen_func("get") items = gen_func("items") keys = gen_func("keys") pop = gen_func("pop") popitem = gen_func("popitem") setdefault = gen_func("setdefault") update = gen_func("update") values = gen_func("values")
(inner_dict=None, *args, **kwds)
41,772
wield.bunch.bunch
__contains__
True if the dictionary has the specified key, else False.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,773
wield.bunch.bunch
__deepcopy__
null
def __deepcopy__(self, memo): return self.__class__(copy.deepcopy(self._mydict, memo))
(self, memo)
41,774
wield.bunch.bunch
__delattr__
null
def __delattr__(self, key): del self._mydict[key]
(self, key)
41,775
wield.bunch.bunch
__delitem__
null
def __delitem__(self, key): del self._mydict[key]
(self, key)
41,776
wield.bunch.bunch
__dir__
null
def __dir__(self): items = [k for k in self._mydict.keys() if isinstance(k, str)] # items += dir(super(Bunch, self)) return items
(self)
41,777
wield.bunch.bunch
__eq__
Return self==value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,778
wield.bunch.bunch
__format__
Default object formatter.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,779
wield.bunch.bunch
__ge__
Return self>=value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,780
wield.bunch.bunch
__getattr__
null
def __getattr__(self, key): try: item = self._mydict[key] except KeyError as E: raise AttributeError(E) if type(item) is dict: return self.__class__(item) return item
(self, key)
41,781
wield.bunch.bunch
__getitem__
null
def __getitem__(self, key): if isinstance(key, (slice, np.ndarray, MutableSequence)): rebuild = dict() for vkey, val in self._mydict.items(): if isinstance(val, np.ndarray): val = val[key] rebuild[vkey] = val if not rebuild: raise RuntimeError("Not holding arrays to index by {0}".format(key)) return Bunch(rebuild) else: return self._mydict[key]
(self, key)
41,782
wield.bunch.bunch
__gt__
Return self>value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,783
wield.bunch.bunch
__init__
null
def __init__(self, inner_dict=None, *args, **kwds): if inner_dict is None or args or kwds: if args: _mydict = dict(inner_dict, *args, **kwds) else: _mydict = dict(**kwds) else: _mydict = inner_dict self.__dict__["_mydict"] = _mydict return
(self, inner_dict=None, *args, **kwds)
41,784
wield.bunch.bunch
__iter__
Implement iter(self).
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,785
wield.bunch.bunch
__le__
Return self<=value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,786
wield.bunch.bunch
__len__
Return len(self).
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,787
wield.bunch.bunch
__lt__
Return self<value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,788
wield.bunch.bunch
__ne__
Return self!=value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,789
wield.bunch.bunch
__repr__
null
def __repr__(self): keys = list(self._mydict.keys()) return "{0}({1})".format( self.__class__.__name__, ", \n ".join( ["".join((str(k), "=", repr(self._mydict[k]))) for k in keys] ), )
(self)
41,790
wield.bunch.bunch
__setattr__
null
def __setattr__(self, key, item): self._mydict[key] = item return
(self, key, item)
41,791
wield.bunch.bunch
__setitem__
Set self[key] to value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,792
wield.bunch.bunch
__sizeof__
D.__sizeof__() -> size of D in memory, in bytes
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,793
wield.bunch.bunch
__str__
Return str(self).
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,794
wield.bunch.bunch
_repr_pretty_
null
def _repr_pretty_(self, p, cycle): if cycle: p.text("Bunch(<recurse>)") else: with p.group(4, "Bunch(", ")"): first = True for k, v in sorted(self._mydict.items()): if not first: p.text(",") p.breakable() else: p.breakable() first = False if isinstance(k, str): p.text(k) else: p.pretty(k) p.text(" = ") p.pretty(v) if not first: p.text(",") p.breakable() return
(self, p, cycle)
41,795
wield.bunch.bunch
clear
D.clear() -> None. Remove all items from D.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,796
wield.bunch.bunch
copy
null
def copy(self): return self.__class__(self._mydict.copy())
(self)
41,797
wield.bunch.bunch
domain_sort
null
def domain_sort(self, key): argsort = np.argsort(self[key]) return self[argsort]
(self, key)
41,798
wield.bunch.bunch
fromkeys
Create a new dictionary with keys from iterable and values set to value.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,799
wield.bunch.bunch
get
Return the value for key if key is in the dictionary, else default.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,800
wield.bunch.bunch
items
D.items() -> a set-like object providing a view on D's items
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,801
wield.bunch.bunch
keys
D.keys() -> a set-like object providing a view on D's keys
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,802
wield.bunch.bunch
pop
D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If the key is not found, return the default if given; otherwise, raise a KeyError.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,803
wield.bunch.bunch
popitem
Remove and return a (key, value) pair as a 2-tuple. Pairs are returned in LIFO (last-in, first-out) order. Raises KeyError if the dict is empty.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,804
wield.bunch.bunch
setdefault
Insert key with a value of default if key is not in the dictionary. Return the value for key if key is in the dictionary, else default.
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,805
wield.bunch.bunch
update
D.update([E, ]**F) -> None. Update D from dict/iterable E and F. If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v In either case, this is followed by: for k in F: D[k] = F[k]
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,806
wield.bunch.bunch
values
D.values() -> an object providing a view on D's values
def gen_func(mname): def func(self, *args, **kwargs): return getattr(self._mydict, mname)(*args, **kwargs) orig_func = getattr(dict, mname) if orig_func is None: return func.__name__ = orig_func.__name__ func.__doc__ = orig_func.__doc__ return func
(self, *args, **kwargs)
41,807
wield.bunch.deep_bunch
DeepBunch
class DeepBunch(object): """ """ __slots__ = ( "_dict", "_vpath", ) # needed to not explode some serializers since this object generally "hasattr" almost anything __reduce_ex__ = None __reduce__ = None __copy__ = None __deepcopy__ = None def __init__( self, mydict=None, writeable=None, _vpath=None, ): if mydict is None: mydict = dict() # access through super is necessary because of the override on __setattr__ can't see these slots if not isinstance(mydict, dict): mydict = dict(mydict) super(DeepBunch, self).__setattr__("_dict", mydict) if _vpath is None: if writeable is None: writeable = True if writeable: _vpath = ((),) if _vpath is True: _vpath = () elif _vpath is False: pass elif _vpath is not None: _vpath = tuple(_vpath) assert _vpath is not None super(DeepBunch, self).__setattr__("_vpath", _vpath) return def _resolve_dict(self): if not self._vpath: return self._dict try: mydict = self._dict for idx, gname in enumerate(self._vpath): mydict = mydict[gname] # TODO: make better assert assert isinstance(mydict, Mapping) except KeyError: if idx != 0: self._dict = mydict self._vpath = self._vpath[idx:] return None self._dict = mydict self._vpath = () return mydict @property def mydict(self): return self._resolve_dict() @property def _mydict_resolved(self): d = self._resolve_dict() if d is None: return dict() return d def _require_dict(self): if not self._vpath: return self._dict mydict = self._dict for gname in self._vpath: mydict = mydict.setdefault(gname, {}) # TODO: make better assert assert isinstance(mydict, Mapping) self._vpath = () self._dict = mydict return mydict def __getitem__(self, key): mydict = self._resolve_dict() if mydict is None: if self._vpath is False: raise RuntimeError( "This DeepBunch cannot index sub-dictionaries which do not exist." ) return self.__class__( mydict=self._dict, _vpath=self._vpath + (key,), ) try: item = mydict[key] if isinstance(item, Mapping): return self.__class__( mydict=item, _vpath=self._vpath, ) return item except KeyError as E: if self._vpath is not False: return self.__class__( mydict=self._dict, _vpath=self._vpath + (key,), ) if str(E).lower().find("object not found") != -1: raise KeyError("key '{0}' not found in {1}".format(key, self)) raise def __getattr__(self, key): try: return self.__getitem__(key) except KeyError: raise AttributeError("'{1}' not in {0}".format(self, key)) def __setitem__(self, key, item): mydict = self._require_dict() try: mydict[key] = item return except TypeError: raise TypeError( "Can't insert {0} into {1} at key {2}".format(item, mydict, key) ) def __setattr__(self, key, item): if key in self.__slots__: return super(DeepBunch, self).__setattr__(key, item) return self.__setitem__(key, item) def __delitem__(self, key): mydict = self._resolve_dict() if mydict is None: return del self._dict[key] def __delattr__(self, key): return self.__delitem__(key) def get(self, key, default=NOARG): mydict = self._resolve_dict() if mydict is None: if default is not NOARG: return default else: raise KeyError("key '{0}' not found in {1}".format(key, self)) try: return mydict[key] except KeyError: if default is not NOARG: return default raise def setdefault(self, key, default): mydict = self._require_dict() return mydict.setdefault(key, default) def __contains__(self, key): mydict = self._resolve_dict() if mydict is None: return False return key in mydict def has_key(self, key): mydict = self._resolve_dict() if mydict is None: return False return key in mydict def require_deleted(self, key): mydict = self._resolve_dict() if mydict is None: return try: del self._dict[key] except KeyError: pass return def update_recursive(self, db=None, **kwargs): if self._vpath is False: def recursive_op(to_db, from_db): for key, val in list(from_db.items()): if isinstance(val, Mapping): try: rec_div = to_db[key] except KeyError: rec_div = dict() recursive_op(rec_div, val) if rec_div: to_db[key] = rec_div else: to_db[key] = val else: def recursive_op(to_db, from_db): for key, val in list(from_db.items()): if isinstance(val, Mapping): recursive_op(to_db[key], val) else: to_db[key] = val if db is not None: recursive_op(self, db) if kwargs: recursive_op(self, kwargs) return @classmethod def ensure_wrap(cls, item): if isinstance(item, cls): return item return cls(item) def __dir__(self): items = [k for k in self._dict.keys() if isinstance(k, str)] items += ["mydict"] # items.sort() # items += dir(super(Bunch, self)) return items def __repr__(self): if self._vpath is False: vpath = "False" elif self._vpath == (): vpath = "True" return ("{0}({1})").format( self.__class__.__name__, self._dict, ) else: vpath = self._vpath return ("{0}({1}, vpath={2},)").format( self.__class__.__name__, self._dict, vpath, ) def _repr_pretty_(self, p, cycle): if cycle: p.text(self.__class__.__name__ + "(<recurse>)") else: with p.group(4, self.__class__.__name__ + "(", ")"): first = True for k, v in sorted(list(self._dict.items())): if not first: p.text(",") p.breakable() else: p.breakable() first = False p.pretty(k) p.text(" = ") p.pretty(v) if not first: p.text(",") p.breakable() return def __eq__(self, other): try: return self._mydict_resolved == other._mydict_resolved except AttributeError: return False def __ne__(self, other): return not (self == other) def __iter__(self): mydict = self._resolve_dict() if mydict is None: return iter(()) return iter(mydict) def __len__(self): mydict = self._resolve_dict() if mydict is None: return 0 return len(mydict) def clear(self): mydict = self._resolve_dict() if mydict is None: return for key in list(mydict.keys()): del mydict[key] return def keys(self): mydict = self._resolve_dict() if mydict is None: return iter(()) return iter(list(mydict.keys())) def values(self): mydict = self._resolve_dict() if mydict is None: return for key in list(mydict.keys()): yield self[key] return def items(self): mydict = self._resolve_dict() if mydict is None: return for key in list(mydict.keys()): yield key, self[key] return def __bool__(self): mydict = self._resolve_dict() if mydict is None: return False return bool(mydict)
(mydict=None, writeable=None, _vpath=None)
41,808
wield.bunch.deep_bunch
__bool__
null
def __bool__(self): mydict = self._resolve_dict() if mydict is None: return False return bool(mydict)
(self)
41,809
wield.bunch.deep_bunch
__contains__
null
def __contains__(self, key): mydict = self._resolve_dict() if mydict is None: return False return key in mydict
(self, key)
41,810
wield.bunch.deep_bunch
__delattr__
null
def __delattr__(self, key): return self.__delitem__(key)
(self, key)
41,811
wield.bunch.deep_bunch
__delitem__
null
def __delitem__(self, key): mydict = self._resolve_dict() if mydict is None: return del self._dict[key]
(self, key)
41,812
wield.bunch.deep_bunch
__dir__
null
def __dir__(self): items = [k for k in self._dict.keys() if isinstance(k, str)] items += ["mydict"] # items.sort() # items += dir(super(Bunch, self)) return items
(self)
41,813
wield.bunch.deep_bunch
__eq__
null
def __eq__(self, other): try: return self._mydict_resolved == other._mydict_resolved except AttributeError: return False
(self, other)
41,814
wield.bunch.deep_bunch
__getattr__
null
def __getattr__(self, key): try: return self.__getitem__(key) except KeyError: raise AttributeError("'{1}' not in {0}".format(self, key))
(self, key)
41,815
wield.bunch.deep_bunch
__getitem__
null
def __getitem__(self, key): mydict = self._resolve_dict() if mydict is None: if self._vpath is False: raise RuntimeError( "This DeepBunch cannot index sub-dictionaries which do not exist." ) return self.__class__( mydict=self._dict, _vpath=self._vpath + (key,), ) try: item = mydict[key] if isinstance(item, Mapping): return self.__class__( mydict=item, _vpath=self._vpath, ) return item except KeyError as E: if self._vpath is not False: return self.__class__( mydict=self._dict, _vpath=self._vpath + (key,), ) if str(E).lower().find("object not found") != -1: raise KeyError("key '{0}' not found in {1}".format(key, self)) raise
(self, key)
41,816
wield.bunch.deep_bunch
__init__
null
def __init__( self, mydict=None, writeable=None, _vpath=None, ): if mydict is None: mydict = dict() # access through super is necessary because of the override on __setattr__ can't see these slots if not isinstance(mydict, dict): mydict = dict(mydict) super(DeepBunch, self).__setattr__("_dict", mydict) if _vpath is None: if writeable is None: writeable = True if writeable: _vpath = ((),) if _vpath is True: _vpath = () elif _vpath is False: pass elif _vpath is not None: _vpath = tuple(_vpath) assert _vpath is not None super(DeepBunch, self).__setattr__("_vpath", _vpath) return
(self, mydict=None, writeable=None, _vpath=None)
41,817
wield.bunch.deep_bunch
__iter__
null
def __iter__(self): mydict = self._resolve_dict() if mydict is None: return iter(()) return iter(mydict)
(self)
41,818
wield.bunch.deep_bunch
__len__
null
def __len__(self): mydict = self._resolve_dict() if mydict is None: return 0 return len(mydict)
(self)
41,820
wield.bunch.deep_bunch
__repr__
null
def __repr__(self): if self._vpath is False: vpath = "False" elif self._vpath == (): vpath = "True" return ("{0}({1})").format( self.__class__.__name__, self._dict, ) else: vpath = self._vpath return ("{0}({1}, vpath={2},)").format( self.__class__.__name__, self._dict, vpath, )
(self)
41,821
wield.bunch.deep_bunch
__setattr__
null
def __setattr__(self, key, item): if key in self.__slots__: return super(DeepBunch, self).__setattr__(key, item) return self.__setitem__(key, item)
(self, key, item)
41,822
wield.bunch.deep_bunch
__setitem__
null
def __setitem__(self, key, item): mydict = self._require_dict() try: mydict[key] = item return except TypeError: raise TypeError( "Can't insert {0} into {1} at key {2}".format(item, mydict, key) )
(self, key, item)
41,823
wield.bunch.deep_bunch
_repr_pretty_
null
def _repr_pretty_(self, p, cycle): if cycle: p.text(self.__class__.__name__ + "(<recurse>)") else: with p.group(4, self.__class__.__name__ + "(", ")"): first = True for k, v in sorted(list(self._dict.items())): if not first: p.text(",") p.breakable() else: p.breakable() first = False p.pretty(k) p.text(" = ") p.pretty(v) if not first: p.text(",") p.breakable() return
(self, p, cycle)
41,824
wield.bunch.deep_bunch
_require_dict
null
def _require_dict(self): if not self._vpath: return self._dict mydict = self._dict for gname in self._vpath: mydict = mydict.setdefault(gname, {}) # TODO: make better assert assert isinstance(mydict, Mapping) self._vpath = () self._dict = mydict return mydict
(self)
41,825
wield.bunch.deep_bunch
_resolve_dict
null
def _resolve_dict(self): if not self._vpath: return self._dict try: mydict = self._dict for idx, gname in enumerate(self._vpath): mydict = mydict[gname] # TODO: make better assert assert isinstance(mydict, Mapping) except KeyError: if idx != 0: self._dict = mydict self._vpath = self._vpath[idx:] return None self._dict = mydict self._vpath = () return mydict
(self)
41,826
wield.bunch.deep_bunch
clear
null
def clear(self): mydict = self._resolve_dict() if mydict is None: return for key in list(mydict.keys()): del mydict[key] return
(self)
41,827
wield.bunch.deep_bunch
get
null
def get(self, key, default=NOARG): mydict = self._resolve_dict() if mydict is None: if default is not NOARG: return default else: raise KeyError("key '{0}' not found in {1}".format(key, self)) try: return mydict[key] except KeyError: if default is not NOARG: return default raise
(self, key, default=(<function <lambda> at 0x7fedbfa82200>,))
41,828
wield.bunch.deep_bunch
has_key
null
def has_key(self, key): mydict = self._resolve_dict() if mydict is None: return False return key in mydict
(self, key)
41,829
wield.bunch.deep_bunch
items
null
def items(self): mydict = self._resolve_dict() if mydict is None: return for key in list(mydict.keys()): yield key, self[key] return
(self)
41,830
wield.bunch.deep_bunch
keys
null
def keys(self): mydict = self._resolve_dict() if mydict is None: return iter(()) return iter(list(mydict.keys()))
(self)
41,831
wield.bunch.deep_bunch
require_deleted
null
def require_deleted(self, key): mydict = self._resolve_dict() if mydict is None: return try: del self._dict[key] except KeyError: pass return
(self, key)
41,832
wield.bunch.deep_bunch
setdefault
null
def setdefault(self, key, default): mydict = self._require_dict() return mydict.setdefault(key, default)
(self, key, default)
41,833
wield.bunch.deep_bunch
update_recursive
null
def update_recursive(self, db=None, **kwargs): if self._vpath is False: def recursive_op(to_db, from_db): for key, val in list(from_db.items()): if isinstance(val, Mapping): try: rec_div = to_db[key] except KeyError: rec_div = dict() recursive_op(rec_div, val) if rec_div: to_db[key] = rec_div else: to_db[key] = val else: def recursive_op(to_db, from_db): for key, val in list(from_db.items()): if isinstance(val, Mapping): recursive_op(to_db[key], val) else: to_db[key] = val if db is not None: recursive_op(self, db) if kwargs: recursive_op(self, kwargs) return
(self, db=None, **kwargs)
41,834
wield.bunch.deep_bunch
values
null
def values(self): mydict = self._resolve_dict() if mydict is None: return for key in list(mydict.keys()): yield self[key] return
(self)
41,835
wield.bunch.bunch
FrozenBunch
class FrozenBunch(Bunch): """ """ def __init__(self, inner_dict=None, hash_ignore=(), *args, **kwds): if inner_dict is None or args or kwds: if args: _mydict = dict(inner_dict, *args, **kwds) else: _mydict = dict(**kwds) else: _mydict = dict(inner_dict) self.__dict__["hash_ignore"] = set(hash_ignore) self.__dict__["_mydict"] = _mydict return @classmethod def as_bunch(cls, data): if isinstance(data, cls): return data return cls(data) def __hash__(self): try: return self.__dict__["__hash"] except KeyError: pass d2 = dict(self._mydict) for k in self.hash_ignore: d2.pop(k) l = tuple(sorted(d2.items())) self.__dict__["__hash"] = hash(l) return self.__dict__["__hash"] def __pop__(self, key): raise RuntimeError("Bunch is Frozen") def __popitem__(self, key): raise RuntimeError("Bunch is Frozen") def __clear__(self, key): raise RuntimeError("Bunch is Frozen") def __setitem__(self, key, item): raise RuntimeError("Bunch is Frozen") def __setattr__(self, key, item): raise RuntimeError("Bunch is Frozen") def _insertion_hack(self, key, item): """ Allows one to insert an item even after construction. This violates the "frozen" immutability property, but allows constructing FrozenBunch's that link to each other. This should only be done immediately after construction, before hash is ever called """ self._mydict[key] = item def __delattr__(self, key): raise RuntimeError("Bunch is Frozen") def __deepcopy__(self, memo): return self.__class__(copy.deepcopy(self._mydict, memo))
(inner_dict=None, hash_ignore=(), *args, **kwds)
41,836
wield.bunch.bunch
__clear__
null
def __clear__(self, key): raise RuntimeError("Bunch is Frozen")
(self, key)
41,839
wield.bunch.bunch
__delattr__
null
def __delattr__(self, key): raise RuntimeError("Bunch is Frozen")
(self, key)