index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
43,464 |
mo_dots.utils
|
wrapper
| null |
def cache(func):
"""
DECORATOR TO CACHE THE RESULT OF A FUNCTION
"""
cache = {}
def wrapper(*args):
if args in cache:
return cache[args]
else:
result = func(*args)
cache[args] = result
return result
return wrapper
|
(*args)
|
43,465 |
mo_dots
|
_DeferManyTypes
| null |
class _DeferManyTypes:
@cache
def warning(self):
get_logger().warning("DEPRECATED: Use mo_dots.utils._data_types", stack_depth=2)
def __iter__(self):
yield from utils._many_types
|
()
|
43,466 |
mo_dots
|
__iter__
| null |
def __iter__(self):
yield from utils._many_types
|
(self)
|
43,468 |
builtins
|
zip
|
zip(*iterables, strict=False) --> Yield tuples until an input is exhausted.
>>> list(zip('abcdefg', range(3), range(4)))
[('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]
The zip object yields n-length tuples, where n is the number of iterables
passed as positional arguments to zip(). The i-th element in every tuple
comes from the i-th iterable argument to zip(). This continues until the
shortest argument is exhausted.
If strict is true and one of the arguments is exhausted before the others,
raise a ValueError.
|
from builtins import zip
| null |
43,470 |
mo_dots
|
_get_attr
| null |
def _get_attr(obj, path):
if not path:
return obj
attr_name = path[0]
if isinstance(obj, _module_type):
if attr_name in obj.__dict__:
return _get_attr(obj.__dict__[attr_name], path[1:])
elif attr_name in dir(obj):
return _get_attr(obj[attr_name], path[1:])
# TRY FILESYSTEM
File = get_module("mo_files").File
possible_error = None
python_file = (File(obj.__file__).parent / attr_name).set_extension("py")
python_module = File(obj.__file__).parent / attr_name / "__init__.py"
if python_file.exists or python_module.exists:
try:
# THIS CASE IS WHEN THE __init__.py DOES NOT IMPORT THE SUBDIR FILE
# WE CAN STILL PUT THE PATH TO THE FILE IN THE from CLAUSE
if len(path) == 1:
# GET MODULE OBJECT
output = __import__(
obj.__name__ + str(".") + str(attr_name), globals(), locals(), [str(attr_name)], 0,
)
return output
else:
# GET VARIABLE IN MODULE
output = __import__(
obj.__name__ + str(".") + str(attr_name), globals(), locals(), [str(path[1])], 0,
)
return _get_attr(output, path[1:])
except Exception as e:
Except = get_module("mo_logs.exceptions.Except")
possible_error = Except.wrap(e)
# TRY A CASE-INSENSITIVE MATCH
matched_attr_name = lower_match(attr_name, dir(obj))
if not matched_attr_name:
get_logger().warning(
PATH_NOT_FOUND + "({{name|quote}}) Returning None.", name=attr_name, cause=possible_error,
)
elif len(matched_attr_name) > 1:
get_logger().error(AMBIGUOUS_PATH_FOUND + " {{paths}}", paths=attr_name)
else:
return _get_attr(obj, matched_attr_name + path[1:])
try:
obj = obj[int(attr_name)]
return _get_attr(obj, path[1:])
except Exception:
pass
try:
obj = getattr(obj, attr_name)
return _get_attr(obj, path[1:])
except Exception:
pass
try:
obj = obj[attr_name]
return _get_attr(obj, path[1:])
except Exception as f:
return NullType(obj, attr_name)
|
(obj, path)
|
43,471 |
mo_dots
|
_getdefault
|
obj ANY OBJECT
key IS EXPECTED TO BE LITERAL (NO ESCAPING)
TRY BOTH ATTRIBUTE AND ITEM ACCESS, OR RETURN Null
|
def _getdefault(obj, key):
"""
obj ANY OBJECT
key IS EXPECTED TO BE LITERAL (NO ESCAPING)
TRY BOTH ATTRIBUTE AND ITEM ACCESS, OR RETURN Null
"""
try:
return obj[key]
except Exception as f:
pass
if is_sequence(obj):
return [_getdefault(o, key) for o in obj]
try:
if _get(obj, CLASS) is not dict:
return getattr(obj, key)
except Exception as f:
pass
try:
if float(key) == round(float(key), 0):
return obj[int(key)]
except Exception as f:
pass
# TODO: FIGURE OUT WHY THIS WAS EVER HERE (AND MAKE A TEST)
# try:
# return eval("obj."+str(key))
# except Exception as f:
# pass
return NullType(obj, key)
|
(obj, key)
|
43,473 |
mo_dots
|
_set_attr
| null |
def _set_attr(obj_, path, value):
obj = _get_attr(obj_, path[:-1])
if obj is None:
# DELIBERATE USE OF `is`: WE DO NOT WHAT TO CATCH Null HERE (THEY CAN BE SET)
get_logger().error(PATH_NOT_FOUND + " tried to get attribute of None")
attr_name = path[-1]
# ACTUAL SETTING OF VALUE
try:
old_value = _get_attr(obj, [attr_name])
old_type = _get(old_value, CLASS)
if is_null(old_value) or is_primitive(old_value):
old_value = None
new_value = value
elif is_null(value):
new_value = None
else:
new_value = _get(old_value, CLASS)(value) # TRY TO MAKE INSTANCE OF SAME CLASS
except Exception:
old_value = None
new_value = value
try:
setattr(obj, attr_name, new_value)
return old_value
except Exception as e:
try:
obj[attr_name] = new_value
return old_value
except Exception as f:
get_logger().error(PATH_NOT_FOUND, cause=[f, e])
|
(obj_, path, value)
|
43,474 |
mo_dots
|
_set_default
|
ANY VALUE NOT SET WILL BE SET BY THE default
THIS IS RECURSIVE
|
def _set_default(d, default, seen=None):
"""
ANY VALUE NOT SET WILL BE SET BY THE default
THIS IS RECURSIVE
"""
if default is None:
return
for k, default_value in default.items():
raw_value = from_data(default_value) # TWO DIFFERENT Dicts CAN SHARE id() BECAUSE THEY ARE SHORT LIVED
if is_data(d):
existing_value = d.get(k)
else:
existing_value = _get_attr(d, [k])
if is_null(existing_value):
if default_value != None:
if is_data(default_value):
df = seen.get(id(raw_value))
if df is not None:
_set_attr(d, [k], df)
else:
copy_dict = {}
seen[id(raw_value)] = copy_dict
_set_attr(d, [k], copy_dict)
_set_default(copy_dict, default_value, seen)
else:
# ASSUME PRIMITIVE (OR LIST, WHICH WE DO NOT COPY)
try:
_set_attr(d, [k], default_value)
except Exception as e:
if PATH_NOT_FOUND not in e:
get_logger().error("Can not set attribute {{name}}", name=k, cause=e)
elif is_list(existing_value) or is_list(default_value):
_set_attr(d, [k], None)
_set_attr(d, [k], listwrap(existing_value) + listwrap(default_value))
elif (hasattr(existing_value, "__setattr__") or is_data(existing_value)) and is_data(default_value):
df = seen.get(id(raw_value))
if df is not None:
_set_attr(d, [k], df)
else:
seen[id(raw_value)] = existing_value
_set_default(existing_value, default_value, seen)
|
(d, default, seen=None)
|
43,475 |
mo_dots.utils
|
cache
|
DECORATOR TO CACHE THE RESULT OF A FUNCTION
|
def cache(func):
"""
DECORATOR TO CACHE THE RESULT OF A FUNCTION
"""
cache = {}
def wrapper(*args):
if args in cache:
return cache[args]
else:
result = func(*args)
cache[args] = result
return result
return wrapper
|
(func)
|
43,476 |
mo_dots
|
coalesce
| null |
def coalesce(*args):
# pick the first not null value
# http://en.wikipedia.org/wiki/Null_coalescing_operator
for a in args:
if a != None:
return to_data(a)
return Null
|
(*args)
|
43,477 |
mo_dots.fields
|
concat_field
| null |
def concat_field(*fields):
return join_field(flatten(split_field(f) for f in fields))
|
(*fields)
|
43,483 |
mo_imports
|
delay_import
|
RETURN AN OBJECT THAT WILL CONVERT TO GIVEN INSTANCE WHEN "USED"
"USED" MEANS CALLING ONE OF __call__, __getattribute__, __getitem__
:param module: FULL MODULE NAME
:return: A DelayedImport OBJECT THAT WILL REPLACE ITSELF WHEN USED
|
def delay_import(module):
"""
RETURN AN OBJECT THAT WILL CONVERT TO GIVEN INSTANCE WHEN "USED"
"USED" MEANS CALLING ONE OF __call__, __getattribute__, __getitem__
:param module: FULL MODULE NAME
:return: A DelayedImport OBJECT THAT WILL REPLACE ITSELF WHEN USED
"""
globals = sys._getframe(1).f_globals
caller_name = globals["__name__"]
return DelayedImport(caller_name, module)
|
(module)
|
43,484 |
mo_dots.datas
|
dict_to_data
|
FASTEST WAY TO MAKE Data, DO NOT CHECK TYPE
:param d: dict
:return: Data
|
def dict_to_data(d):
"""
FASTEST WAY TO MAKE Data, DO NOT CHECK TYPE
:param d: dict
:return: Data
"""
m = _new(Data)
_set(m, SLOT, d)
return m
|
(d)
|
43,485 |
mo_dots.fields
|
endswith_field
|
RETURN True IF field PATH STRING ENDS WITH suffix PATH STRING
|
def endswith_field(field, suffix):
"""
RETURN True IF field PATH STRING ENDS WITH suffix PATH STRING
"""
if is_null(suffix):
return False
if suffix == ".":
return True
if field.endswith(suffix):
ls = len(suffix)
if len(field) == ls or field[-ls - 1] in (".", "\b") and field[-ls - 2] not in (".", "\b"):
return True
return False
|
(field, suffix)
|
43,486 |
mo_dots.utils
|
exists
| null |
def exists(value) -> bool:
return not is_missing(value)
|
(value) -> bool
|
43,487 |
mo_imports
|
expect
|
EXPECT A LATE EXPORT INTO CALLING MODULE
:param names: MODULE VARIABLES THAT WILL BE FILLED BY ANOTHER MODULE
:return: PLACEHOLDERS THAT CAN BE USED UNTIL FILL HAPPENS len(output)==len(names)
|
def expect(*names):
"""
EXPECT A LATE EXPORT INTO CALLING MODULE
:param names: MODULE VARIABLES THAT WILL BE FILLED BY ANOTHER MODULE
:return: PLACEHOLDERS THAT CAN BE USED UNTIL FILL HAPPENS len(output)==len(names)
"""
global _monitor, _expiry, _event
if not names:
_error("expecting at least one name")
# GET MODULE OF THE CALLER
globals = sys._getframe(1).f_globals
caller_name = globals["__name__"]
caller = importlib.import_module(caller_name)
# REGISTER DESIRED EXPORT
output = []
for name in names:
desc = Expecting(caller, name)
setattr(caller, name, desc)
output.append(desc)
with _locker:
_expiry = time() + WAIT_FOR_EXPORT
_expectations.append(desc)
if not _monitor:
_event = Event()
_monitor = Thread(target=worker, args=[_event])
_monitor.daemon = True
_monitor.start()
if DEBUG:
for name in names:
print(f">>> {desc.module.__name__} is expecting {name}")
if len(output) == 1:
return output[0]
else:
return output
|
(*names)
|
43,488 |
mo_imports
|
export
|
MUCH LIKE setattr(module, name, value) BUT WITH CONSISTENCY CHECKS AND MORE CONVENIENCE
## COMMON USAGE:
export("full.path.to.module", value) # read `full.path.to.module.value = value`
## RENAME
export("full.path.to.module", "name", value) # read `full.path.to.module.name = value`
## KNOWN MODULE
export(myModule, value) # read `myModule.value = value`
:param module: MODULE, OR STRING WITH FULL PATH OF MODULE
:param name: THE VARIABLE TO SET IN MODULE (OR VALUE, IF THERE IS NO NAME CHANGE)
:param value: (optional) THE VALUE TO ASSIGN
|
def export(module, name, value=_nothing):
"""
MUCH LIKE setattr(module, name, value) BUT WITH CONSISTENCY CHECKS AND MORE CONVENIENCE
## COMMON USAGE:
export("full.path.to.module", value) # read `full.path.to.module.value = value`
## RENAME
export("full.path.to.module", "name", value) # read `full.path.to.module.name = value`
## KNOWN MODULE
export(myModule, value) # read `myModule.value = value`
:param module: MODULE, OR STRING WITH FULL PATH OF MODULE
:param name: THE VARIABLE TO SET IN MODULE (OR VALUE, IF THERE IS NO NAME CHANGE)
:param value: (optional) THE VALUE TO ASSIGN
"""
global _monitor, _expiry, _event
if isinstance(module, str):
try:
module = importlib.import_module(module)
except Exception as cause:
_error(f"{module} can not be found")
if not isinstance(name, str):
# GET MODULE OF THE CALLER TO FIND NAME OF OBJECT
value = name
globals = sys._getframe(1).f_globals
caller_name = globals["__name__"]
caller = importlib.import_module(caller_name)
for n in dir(caller):
try:
if getattr(caller, n) is value:
name = n
break
except Exception:
pass
else:
_error(f"Can not find variable holding a {value.__class__.__name__}")
if value is _nothing:
# ASSUME CALLER MODULE IS USED
globals = sys._getframe(1).f_globals
caller_name = globals["__name__"]
value = importlib.import_module(caller_name)
desc = getattr(module, name, None)
if isinstance(desc, Expecting):
with _locker:
for i, e in enumerate(_expectations):
if desc is e:
del _expectations[i]
if not _expectations:
_event.set()
_monitor.join()
_monitor = None
break
else:
_error(f"{module.__name__} is not expecting an export to {name}")
if DEBUG:
print(f">>> {module.__name__} got expected {name}")
else:
_error(f"{module.__name__} is not expecting an export to {name}")
setattr(module, name, value)
|
(module, name, value=<object object at 0x7f91c354da30>)
|
43,490 |
mo_future
|
first
| null |
def first(values):
try:
return iter(values).__next__()
except StopIteration:
return None
|
(values)
|
43,491 |
mo_future
|
flatten
| null |
def flatten(items):
return (vv for v in items for vv in v)
|
(items)
|
43,492 |
mo_dots
|
from_data
| null |
def from_data(v):
if v is None:
return None
_type = _get(v, CLASS)
if _type is NullType:
return None
elif _type is Data:
d = _get(v, SLOT)
return d
elif _type is FlatList:
return _get(v, SLOT)
elif _type is DataObject:
return _get(v, SLOT)
elif _type in generator_types:
return (from_data(vv) for vv in v)
elif _type is float:
if isnan(v):
return None
return v
return v
|
(v)
|
43,493 |
mo_dots
|
fromkeys
| null |
def fromkeys(keys, value=None):
if is_null(value):
return Data()
return dict_to_data(dict.fromkeys(keys, value))
|
(keys, value=None)
|
43,494 |
mo_dots
|
get_attr
|
SAME AS object.__getattr__(), BUT USES DOT-DELIMITED path
|
def get_attr(obj, path):
"""
SAME AS object.__getattr__(), BUT USES DOT-DELIMITED path
"""
try:
return _get_attr(obj, split_field(path))
except Exception as cause:
Log = get_logger()
if PATH_NOT_FOUND in cause:
Log.error(PATH_NOT_FOUND + ": {{path}}", path=path, cause=cause)
else:
Log.error("Problem setting value", cause=cause)
|
(obj, path)
|
43,495 |
mo_dots.logging
|
get_logger
| null |
def get_logger():
global _Log
if _Log:
return _Log
try:
from mo_logs import Log as _Log
return _Log
except Exception as e:
_Log = PoorLogger()
_Log.warning("`pip install mo-logs` for better logging.", cause=e)
return _Log
|
()
|
43,496 |
mo_dots.utils
|
get_module
| null |
def get_module(name):
try:
return importlib.import_module(name)
except Exception as e:
get_logger().error(
"`pip install " + name.split(".")[0].replace("_", "-") + "` to enable this feature", cause=e,
)
|
(name)
|
43,497 |
mo_dots.datas
|
hash_value
| null |
def hash_value(v):
if is_many(v):
return hash_value(first(v))
elif is_data(v):
return hash_value(first(v.values()))
else:
return hash(v)
|
(v)
|
43,499 |
mo_dots
|
inverse
|
reverse the k:v pairs
|
def inverse(d):
"""
reverse the k:v pairs
"""
output = {}
for k, v in from_data(d).items():
output[v] = output.get(v, [])
output[v].append(k)
return output
|
(d)
|
43,500 |
mo_dots.utils
|
is_container
| null |
def is_container(l):
# CAN ADD AND REMOVE ELEMENTS
return _get(l, CLASS) in container_types
|
(l)
|
43,501 |
mo_dots.utils
|
is_data
|
:param d:
:return: True IF d IS A TYPE THAT HOLDS DATA
|
def is_data(d):
"""
:param d:
:return: True IF d IS A TYPE THAT HOLDS DATA
"""
return _get(d, CLASS) in _data_types
|
(d)
|
43,502 |
mo_dots.utils
|
is_data_object
| null |
def is_data_object(obj):
return isinstance(obj, _known_data_types) or is_namedtuple(obj) or is_dataclass(obj)
|
(obj)
|
43,503 |
dataclasses
|
is_dataclass
|
Returns True if obj is a dataclass or an instance of a
dataclass.
|
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
return hasattr(cls, _FIELDS)
|
(obj)
|
43,504 |
mo_dots.utils
|
is_finite
| null |
def is_finite(l):
# CAN PERFORM len(l); NOT A GENERATOR
return _get(l, CLASS) in finite_types
|
(l)
|
43,505 |
mo_dots.utils
|
is_known_data_type
| null |
def is_known_data_type(_class):
return _class in _known_data_types
|
(_class)
|
43,506 |
mo_dots.utils
|
is_list
| null |
def is_list(l):
# ORDERED, AND CAN CHANGE CONTENTS
return _get(l, CLASS) in list_types
|
(l)
|
43,507 |
mo_dots.utils
|
is_many
| null |
def is_many(value):
# REPRESENTS MULTIPLE VALUES
# TODO: CLEAN UP THIS LOGIC
# THIS IS COMPLICATED BECAUSE I AM UNSURE ABOUT ALL THE "PRIMITIVE TYPES"
# I WOULD LIKE TO POSITIVELY CATCH many_types, BUT MAYBE IT IS EASIER TO DETECT: Iterable, BUT NOT PRIMITIVE
# UNTIL WE HAVE A COMPLETE SLOT, WE KEEP ALL THIS warning() CODE
global _many_types
type_ = _get(value, CLASS)
if type_ in _many_types:
return True
if issubclass(type_, types.GeneratorType):
_many_types = _many_types + (type_,)
get_logger.warning("is_many() can not detect generator {type}", type=type_.__name__)
return True
return False
|
(value)
|
43,508 |
mo_dots.utils
|
is_missing
| null |
def is_missing(t) -> bool:
# RETURN True IF EFFECTIVELY NOTHING
_class = _get(t, CLASS)
if _class in _null_types:
return True
elif _class in _data_types or _class in _known_data_types:
return False
elif _class in finite_types and not t:
return True
elif _class is str and not t:
return True
return False
|
(t) -> bool
|
43,509 |
mo_dots.utils
|
is_namedtuple
| null |
def is_namedtuple(obj):
return isinstance(obj, tuple) and hasattr(obj, '_fields')
|
(obj)
|
43,510 |
mo_dots.utils
|
is_not_null
| null |
def is_not_null(value):
_class = _get(value, CLASS)
if _class in _null_types:
return False
if _class in _known_data_types:
return True
# if _class in finite_types:
# return bool(value)
return True
|
(value)
|
43,511 |
mo_dots.utils
|
is_null
| null |
def is_null(value):
# RETURN True IF EFFECTIVELY NOTHING
_class = _get(value, CLASS)
if _class in _null_types:
return True
if _class in _known_data_types:
return False
# if _class in finite_types:
# return not value
return False
|
(value)
|
43,512 |
mo_dots.utils
|
is_primitive
| null |
def is_primitive(value):
return isinstance(value, _primitive_types)
|
(value)
|
43,513 |
mo_dots.utils
|
is_sequence
| null |
def is_sequence(l):
# HAS AN ORDER, INCLUDES GENERATORS
return _get(l, CLASS) in sequence_types
|
(l)
|
43,514 |
mo_dots.fields
|
join_field
|
RETURN field SEQUENCE AS STRING
|
def join_field(path):
"""
RETURN field SEQUENCE AS STRING
"""
if _get(path, CLASS) in generator_types:
path = list(path)
if not path:
return "."
prefix = ""
while True:
try:
i = path.index("..")
if i == 0:
prefix += "."
path = path[1:]
else:
path = path[: i - 1] + path[i + 1 :]
except ValueError:
return ("." if prefix else "") + prefix + ".".join(literal_field(f) for f in path)
|
(path)
|
43,515 |
mo_dots.lists
|
last
| null |
def last(values):
if is_many(values):
if not values:
return Null
if isinstance(values, FlatList):
return values.last()
elif is_list(values):
if not values:
return Null
return values[-1]
elif is_sequence(values):
l = Null
for i in values:
l = i
return l
else:
return first(values)
return values
|
(values)
|
43,516 |
mo_dots.datas
|
leaves
|
LIKE items() BUT RECURSIVE, AND ONLY FOR THE LEAVES (non dict) VALUES
SEE leaves_to_data FOR THE INVERSE
:param value: THE Mapping TO TRAVERSE
:param prefix: OPTIONAL PREFIX GIVEN TO EACH KEY
:return: Data, WHICH EACH KEY BEING A PATH INTO value TREE
|
def leaves(value, prefix=None):
"""
LIKE items() BUT RECURSIVE, AND ONLY FOR THE LEAVES (non dict) VALUES
SEE leaves_to_data FOR THE INVERSE
:param value: THE Mapping TO TRAVERSE
:param prefix: OPTIONAL PREFIX GIVEN TO EACH KEY
:return: Data, WHICH EACH KEY BEING A PATH INTO value TREE
"""
if not prefix:
yield from _leaves(".", value, tuple())
else:
for k, v in _leaves(".", value, tuple()):
yield prefix + k, v
|
(value, prefix=None)
|
43,517 |
mo_dots.datas
|
leaves_to_data
|
dict WITH DOTS IN KEYS IS INTERPRETED AS A PATH
|
def leaves_to_data(value):
"""
dict WITH DOTS IN KEYS IS INTERPRETED AS A PATH
"""
return to_data(_leaves_to_data(value))
|
(value)
|
43,518 |
mo_dots.lists
|
list_to_data
|
to_data, BUT WITHOUT CHECKS
|
def list_to_data(v):
"""
to_data, BUT WITHOUT CHECKS
"""
output = _new(FlatList)
_set(output, SLOT, v)
return output
|
(v)
|
43,520 |
mo_dots
|
listwrap
|
PERFORMS THE FOLLOWING TRANSLATION
None -> []
value -> [value]
[...] -> [...] (unchanged list)
## MOTIVATION ##
OFTEN IT IS NICE TO ALLOW FUNCTION PARAMETERS TO BE ASSIGNED A VALUE,
OR A list-OF-VALUES, OR NULL. CHECKING FOR WHICH THE CALLER USED IS
TEDIOUS. INSTEAD WE CAST FROM THOSE THREE CASES TO THE SINGLE CASE
OF A LIST
# BEFORE
def do_it(a):
if a is None:
return
if not isinstance(a, list):
a=[a]
for x in a:
# do something
# AFTER
def do_it(a):
for x in listwrap(a):
# do something
|
def listwrap(value):
"""
PERFORMS THE FOLLOWING TRANSLATION
None -> []
value -> [value]
[...] -> [...] (unchanged list)
## MOTIVATION ##
OFTEN IT IS NICE TO ALLOW FUNCTION PARAMETERS TO BE ASSIGNED A VALUE,
OR A list-OF-VALUES, OR NULL. CHECKING FOR WHICH THE CALLER USED IS
TEDIOUS. INSTEAD WE CAST FROM THOSE THREE CASES TO THE SINGLE CASE
OF A LIST
# BEFORE
def do_it(a):
if a is None:
return
if not isinstance(a, list):
a=[a]
for x in a:
# do something
# AFTER
def do_it(a):
for x in listwrap(a):
# do something
"""
if is_null(value):
return FlatList()
elif is_list(value):
if isinstance(value, list):
return list_to_data(value)
else:
return value
elif is_many(value):
return list_to_data(list(value))
else:
return list_to_data([from_data(value)])
|
(value)
|
43,521 |
mo_dots.fields
|
literal_field
|
RETURN SAME WITH DOTS (`.`) ESCAPED
|
def literal_field(field):
"""
RETURN SAME WITH DOTS (`.`) ESCAPED
"""
try:
return ESCAPE_DOTS2.sub("..", ESCAPE_DOTS1.sub("\b", field))
except Exception as e:
get_logger().error("bad literal", e)
|
(field)
|
43,523 |
mo_dots
|
lower_match
| null |
def lower_match(value, candidates):
return [v for v in candidates if v.lower() == value.lower()]
|
(value, candidates)
|
43,524 |
mo_dots
|
missing
| null |
def missing(value):
raise NotImplementedError("use is_missing")
|
(value)
|
43,527 |
mo_dots.objects
|
object_to_data
| null |
def object_to_data(v):
try:
if is_null(v):
return Null
except Exception:
pass
if is_primitive(v):
return v
_class = _get(v, CLASS)
if _class in (dict, OrderedDict):
m = _new(Data)
_set(m, SLOT, v)
return m
elif _class in (tuple, list):
return list_to_data(v)
elif _class in (Data, DataObject, FlatList, NullType):
return v
elif _class in generator_types:
return (to_data(vv) for vv in v)
elif is_known_data_type(_class):
return DataObject(v)
else:
return v
|
(v)
|
43,530 |
mo_dots.utils
|
register_data
|
:param type_: ADD OTHER TYPE THAT HOLDS DATA
:return:
|
def register_data(type_):
"""
:param type_: ADD OTHER TYPE THAT HOLDS DATA
:return:
"""
global _data_types
_data_types = tuple(set(_data_types + (type_,)))
|
(type_)
|
43,531 |
mo_dots.utils
|
register_list
| null |
def register_list(_type):
# lists belong to all categories
global list_types, container_types, finite_types, sequence_types, _many_types, many_types
list_types = tuple(set(list_types + (_type,)))
container_types = tuple(set(container_types + (_type,)))
finite_types = tuple(set(finite_types + (_type,)))
sequence_types = tuple(set(sequence_types + (_type,)))
_many_types = many_types = tuple(set(_many_types + (_type,)))
|
(_type)
|
43,532 |
mo_dots.utils
|
register_many
| null |
def register_many(_type):
global _many_types
_many_types = _many_types + (_type,)
|
(_type)
|
43,533 |
mo_dots.utils
|
register_null_type
| null |
def register_null_type(_type):
global _null_types
_null_types = tuple(set(_null_types + (_type,)))
|
(_type)
|
43,534 |
mo_dots.utils
|
register_primitive
| null |
def register_primitive(_type):
global _primitive_types
_primitive_types = tuple(set(_primitive_types + (_type,)))
|
(_type)
|
43,535 |
mo_dots.utils
|
register_type
| null |
def register_type(*_classes):
_known_data_types.update(_classes)
|
(*_classes)
|
43,536 |
mo_dots.fields
|
relative_field
|
RETURN field PATH WITH RESPECT TO parent
|
def relative_field(field, parent):
"""
RETURN field PATH WITH RESPECT TO parent
"""
if parent == ".":
return field
field_path = split_field(field)
parent_path = split_field(parent)
common = 0
for f, p in _builtin_zip(field_path, parent_path):
if f != p:
break
common += 1
tail = join_field(field_path[common:])
if len(parent_path) <= common:
return join_field(field_path[common:])
dots = "." * (len(parent_path) - common)
if tail == ".":
return "." + dots
else:
return "." + dots + tail
|
(field, parent)
|
43,537 |
mo_dots
|
set_attr
|
SAME AS object.__setattr__(), BUT USES DOT-DELIMITED path
RETURN OLD VALUE
|
def set_attr(obj, path, value):
"""
SAME AS object.__setattr__(), BUT USES DOT-DELIMITED path
RETURN OLD VALUE
"""
try:
return _set_attr(obj, split_field(path), value)
except Exception as cause:
Log = get_logger()
if PATH_NOT_FOUND in cause:
Log.warning(PATH_NOT_FOUND + ": {{path}}", path=path, cause=cause)
else:
Log.error("Problem setting value", cause=cause)
|
(obj, path, value)
|
43,538 |
mo_dots
|
set_default
|
RECURSIVE MERGE OF MULTIPLE dicts MOST IMPORTANT FIRST
UPDATES d WITH THE MERGE RESULT, WHERE MERGE RESULT IS DEFINED AS:
FOR EACH LEAF, RETURN THE FIRST NOT-NULL LEAF VALUE
:param dicts: dicts IN PRIORITY ORDER, HIGHEST TO LOWEST
:return: d
|
def set_default(d, *dicts):
"""
RECURSIVE MERGE OF MULTIPLE dicts MOST IMPORTANT FIRST
UPDATES d WITH THE MERGE RESULT, WHERE MERGE RESULT IS DEFINED AS:
FOR EACH LEAF, RETURN THE FIRST NOT-NULL LEAF VALUE
:param dicts: dicts IN PRIORITY ORDER, HIGHEST TO LOWEST
:return: d
"""
agg = d if d or is_data(d) else {}
for p in dicts:
_set_default(agg, p, seen={})
return to_data(agg)
|
(d, *dicts)
|
43,539 |
mo_dots.fields
|
split_field
|
RETURN field AS ARRAY OF DOT-SEPARATED FIELDS
|
def split_field(field):
"""
RETURN field AS ARRAY OF DOT-SEPARATED FIELDS
"""
if ILLEGAL_DOTS.search(field):
get_logger().error("Odd number of dots is not allowed")
if field.startswith(".."):
remainder = field.lstrip(".")
back = len(field) - len(remainder) - 1
return [".."] * back + [UNESCAPE_DOTS.sub(".", k) for k in SPLIT_DOTS.split(remainder) if k]
else:
return [UNESCAPE_DOTS.sub(".", k) for k in SPLIT_DOTS.split(field) if k]
|
(field)
|
43,540 |
mo_dots.fields
|
startswith_field
|
RETURN True IF field PATH STRING STARTS WITH prefix PATH STRING
|
def startswith_field(field, prefix):
"""
RETURN True IF field PATH STRING STARTS WITH prefix PATH STRING
"""
if prefix == None:
return False
if prefix.startswith("."):
return True
# f_back = len(field) - len(field.strip("."))
# p_back = len(prefix) - len(prefix.strip("."))
# if f_back > p_back:
# return False
# else:
# return True
if field.startswith(prefix):
lp = len(prefix)
if len(field) == len(prefix) or field[lp] in (".", "\b") and field[lp + 1] not in (".", "\b"):
return True
return False
|
(field, prefix)
|
43,542 |
mo_dots.fields
|
tail_field
|
RETURN THE FIRST STEP IN PATH, ALONG WITH THE REMAINING TAILf
IN (first, rest) PAIR
|
def tail_field(field):
"""
RETURN THE FIRST STEP IN PATH, ALONG WITH THE REMAINING TAILf
IN (first, rest) PAIR
"""
if field == "." or is_missing(field):
return ".", "."
elif "." in field:
path = split_field(field)
if path[0].startswith("."):
return path[0], join_field(path[1:])
return literal_field(path[0]), join_field(path[1:])
else:
return field, "."
|
(field)
|
43,543 |
datetime
|
time
|
time([hour[, minute[, second[, microsecond[, tzinfo]]]]]) --> a time object
All arguments are optional. tzinfo may be None, or an instance of
a tzinfo subclass. The remaining arguments may be ints.
|
class time:
"""Time with time zone.
Constructors:
__new__()
Operators:
__repr__, __str__
__eq__, __le__, __lt__, __ge__, __gt__, __hash__
Methods:
strftime()
isoformat()
utcoffset()
tzname()
dst()
Properties (readonly):
hour, minute, second, microsecond, tzinfo, fold
"""
__slots__ = '_hour', '_minute', '_second', '_microsecond', '_tzinfo', '_hashcode', '_fold'
def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0):
"""Constructor.
Arguments:
hour, minute (required)
second, microsecond (default to zero)
tzinfo (default to None)
fold (keyword only, default to zero)
"""
if (isinstance(hour, (bytes, str)) and len(hour) == 6 and
ord(hour[0:1])&0x7F < 24):
# Pickle support
if isinstance(hour, str):
try:
hour = hour.encode('latin1')
except UnicodeEncodeError:
# More informative error message.
raise ValueError(
"Failed to encode latin1 string when unpickling "
"a time object. "
"pickle.load(data, encoding='latin1') is assumed.")
self = object.__new__(cls)
self.__setstate(hour, minute or None)
self._hashcode = -1
return self
hour, minute, second, microsecond, fold = _check_time_fields(
hour, minute, second, microsecond, fold)
_check_tzinfo_arg(tzinfo)
self = object.__new__(cls)
self._hour = hour
self._minute = minute
self._second = second
self._microsecond = microsecond
self._tzinfo = tzinfo
self._hashcode = -1
self._fold = fold
return self
# Read-only field accessors
@property
def hour(self):
"""hour (0-23)"""
return self._hour
@property
def minute(self):
"""minute (0-59)"""
return self._minute
@property
def second(self):
"""second (0-59)"""
return self._second
@property
def microsecond(self):
"""microsecond (0-999999)"""
return self._microsecond
@property
def tzinfo(self):
"""timezone info object"""
return self._tzinfo
@property
def fold(self):
return self._fold
# Standard conversions, __hash__ (and helpers)
# Comparisons of time objects with other.
def __eq__(self, other):
if isinstance(other, time):
return self._cmp(other, allow_mixed=True) == 0
else:
return NotImplemented
def __le__(self, other):
if isinstance(other, time):
return self._cmp(other) <= 0
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, time):
return self._cmp(other) < 0
else:
return NotImplemented
def __ge__(self, other):
if isinstance(other, time):
return self._cmp(other) >= 0
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, time):
return self._cmp(other) > 0
else:
return NotImplemented
def _cmp(self, other, allow_mixed=False):
assert isinstance(other, time)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self.utcoffset()
otoff = other.utcoffset()
base_compare = myoff == otoff
if base_compare:
return _cmp((self._hour, self._minute, self._second,
self._microsecond),
(other._hour, other._minute, other._second,
other._microsecond))
if myoff is None or otoff is None:
if allow_mixed:
return 2 # arbitrary non-zero value
else:
raise TypeError("cannot compare naive and aware times")
myhhmm = self._hour * 60 + self._minute - myoff//timedelta(minutes=1)
othhmm = other._hour * 60 + other._minute - otoff//timedelta(minutes=1)
return _cmp((myhhmm, self._second, self._microsecond),
(othhmm, other._second, other._microsecond))
def __hash__(self):
"""Hash."""
if self._hashcode == -1:
if self.fold:
t = self.replace(fold=0)
else:
t = self
tzoff = t.utcoffset()
if not tzoff: # zero or None
self._hashcode = hash(t._getstate()[0])
else:
h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff,
timedelta(hours=1))
assert not m % timedelta(minutes=1), "whole minute"
m //= timedelta(minutes=1)
if 0 <= h < 24:
self._hashcode = hash(time(h, m, self.second, self.microsecond))
else:
self._hashcode = hash((h, m, self.second, self.microsecond))
return self._hashcode
# Conversion to string
def _tzstr(self):
"""Return formatted timezone offset (+xx:xx) or an empty string."""
off = self.utcoffset()
return _format_offset(off)
def __repr__(self):
"""Convert to formal string, for repr()."""
if self._microsecond != 0:
s = ", %d, %d" % (self._second, self._microsecond)
elif self._second != 0:
s = ", %d" % self._second
else:
s = ""
s= "%s.%s(%d, %d%s)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._hour, self._minute, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
if self._fold:
assert s[-1:] == ")"
s = s[:-1] + ", fold=1)"
return s
def isoformat(self, timespec='auto'):
"""Return the time formatted according to ISO.
The full format is 'HH:MM:SS.mmmmmm+zz:zz'. By default, the fractional
part is omitted if self.microsecond == 0.
The optional argument timespec specifies the number of additional
terms of the time to include. Valid options are 'auto', 'hours',
'minutes', 'seconds', 'milliseconds' and 'microseconds'.
"""
s = _format_time(self._hour, self._minute, self._second,
self._microsecond, timespec)
tz = self._tzstr()
if tz:
s += tz
return s
__str__ = isoformat
@classmethod
def fromisoformat(cls, time_string):
"""Construct a time from the output of isoformat()."""
if not isinstance(time_string, str):
raise TypeError('fromisoformat: argument must be str')
try:
return cls(*_parse_isoformat_time(time_string))
except Exception:
raise ValueError(f'Invalid isoformat string: {time_string!r}')
def strftime(self, fmt):
"""Format using strftime(). The date part of the timestamp passed
to underlying strftime should not be used.
"""
# The year must be >= 1000 else Python's strftime implementation
# can raise a bogus exception.
timetuple = (1900, 1, 1,
self._hour, self._minute, self._second,
0, 1, -1)
return _wrap_strftime(self, fmt, timetuple)
def __format__(self, fmt):
if not isinstance(fmt, str):
raise TypeError("must be str, not %s" % type(fmt).__name__)
if len(fmt) != 0:
return self.strftime(fmt)
return str(self)
# Timezone functions
def utcoffset(self):
"""Return the timezone offset as timedelta, positive east of UTC
(negative west of UTC)."""
if self._tzinfo is None:
return None
offset = self._tzinfo.utcoffset(None)
_check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
if self._tzinfo is None:
return None
name = self._tzinfo.tzname(None)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (as timedelta
positive eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
if self._tzinfo is None:
return None
offset = self._tzinfo.dst(None)
_check_utc_offset("dst", offset)
return offset
def replace(self, hour=None, minute=None, second=None, microsecond=None,
tzinfo=True, *, fold=None):
"""Return a new time with new values for the specified fields."""
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
if fold is None:
fold = self._fold
return type(self)(hour, minute, second, microsecond, tzinfo, fold=fold)
# Pickle support.
def _getstate(self, protocol=3):
us2, us3 = divmod(self._microsecond, 256)
us1, us2 = divmod(us2, 256)
h = self._hour
if self._fold and protocol > 3:
h += 128
basestate = bytes([h, self._minute, self._second,
us1, us2, us3])
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class):
raise TypeError("bad tzinfo state arg")
h, self._minute, self._second, us1, us2, us3 = string
if h > 127:
self._fold = 1
self._hour = h - 128
else:
self._fold = 0
self._hour = h
self._microsecond = (((us1 << 8) | us2) << 8) | us3
self._tzinfo = tzinfo
def __reduce_ex__(self, protocol):
return (self.__class__, self._getstate(protocol))
def __reduce__(self):
return self.__reduce_ex__(2)
| null |
43,545 |
mo_dots
|
to_data
|
WRAP AS Data OBJECT FOR DATA PROCESSING: https://github.com/klahnakoski/mo-dots/tree/dev/docs
:param v: THE VALUE TO WRAP
:return: Data INSTANCE
|
def to_data(v=None) -> object:
"""
WRAP AS Data OBJECT FOR DATA PROCESSING: https://github.com/klahnakoski/mo-dots/tree/dev/docs
:param v: THE VALUE TO WRAP
:return: Data INSTANCE
"""
type_ = _get(v, CLASS)
if type_ in (dict, OrderedDict):
m = _new(Data)
_set(m, SLOT, v)
return m
elif type_ is none_type:
return Null
elif type_ is tuple:
return list_to_data(v)
elif type_ is list:
return list_to_data(v)
elif type_ in generator_types:
return list_to_data(list(from_data(vv) for vv in v))
else:
return v
|
(v=None) -> object
|
43,546 |
mo_dots
|
tuplewrap
|
INTENDED TO TURN lists INTO tuples FOR USE AS KEYS
|
def tuplewrap(value):
"""
INTENDED TO TURN lists INTO tuples FOR USE AS KEYS
"""
if is_null(value):
return tuple()
elif is_many(value):
return tuple(tuplewrap(v) if is_sequence(v) else v for v in value)
else:
return (from_data(value),)
|
(value)
|
43,548 |
mo_dots.fields
|
unliteral_field
|
DUE TO PATHOLOGY IN MY CODE WE HAVE A path WITH ESCAPED DOTS BUT WE WANT OT USE IT ON A dict, NOT A Data
a = dict()
b = Data(a)
a[unliteral_field(k)]==b[k] (for all k)
:param field: THE STRING TO DE-literal IZE
:return: SIMPLER STRING
|
def unliteral_field(field):
"""
DUE TO PATHOLOGY IN MY CODE WE HAVE A path WITH ESCAPED DOTS BUT WE WANT OT USE IT ON A dict, NOT A Data
a = dict()
b = Data(a)
a[unliteral_field(k)]==b[k] (for all k)
:param field: THE STRING TO DE-literal IZE
:return: SIMPLER STRING
"""
return UNESCAPE_DOTS.sub(".", field)
|
(field)
|
43,550 |
mo_dots
|
unwraplist
|
LISTS WITH ZERO AND ONE element MAP TO None AND element RESPECTIVELY
|
def unwraplist(v):
"""
LISTS WITH ZERO AND ONE element MAP TO None AND element RESPECTIVELY
"""
if is_list(v):
if len(v) == 0:
return None
elif len(v) == 1:
return from_data(v[0])
else:
return from_data(v)
else:
return from_data(v)
|
(v)
|
43,554 |
mo_dots
|
zip
|
CONVERT LIST OF KEY/VALUE PAIRS TO Data
PLEASE `import mo_dots`, AND CALL `mo_dots.zip()`
|
def zip(keys, values):
"""
CONVERT LIST OF KEY/VALUE PAIRS TO Data
PLEASE `import mo_dots`, AND CALL `mo_dots.zip()`
"""
output = Data()
for k, v in _dict_zip(keys, values):
output[k] = v
return output
|
(keys, values)
|
43,671 |
sphinx_basic_ng
|
setup
|
Entry point for sphinx theming.
|
def setup(app: Sphinx) -> Dict[str, Any]:
"""Entry point for sphinx theming."""
app.require_sphinx("4.0")
app.add_html_theme("basic-ng", str(_THEME_PATH))
return {
"parallel_read_safe": True,
"parallel_write_safe": True,
"version": __version__,
}
|
(app: sphinx.application.Sphinx) -> Dict[str, Any]
|
43,672 |
piexif._exif
|
ExifIFD
|
Exif tag number reference - Exif IFD
|
class ExifIFD:
"""Exif tag number reference - Exif IFD"""
ExposureTime = 33434
FNumber = 33437
ExposureProgram = 34850
SpectralSensitivity = 34852
ISOSpeedRatings = 34855
OECF = 34856
SensitivityType = 34864
StandardOutputSensitivity = 34865
RecommendedExposureIndex = 34866
ISOSpeed = 34867
ISOSpeedLatitudeyyy = 34868
ISOSpeedLatitudezzz = 34869
ExifVersion = 36864
DateTimeOriginal = 36867
DateTimeDigitized = 36868
OffsetTime = 36880
OffsetTimeOriginal = 36881
OffsetTimeDigitized = 36882
ComponentsConfiguration = 37121
CompressedBitsPerPixel = 37122
ShutterSpeedValue = 37377
ApertureValue = 37378
BrightnessValue = 37379
ExposureBiasValue = 37380
MaxApertureValue = 37381
SubjectDistance = 37382
MeteringMode = 37383
LightSource = 37384
Flash = 37385
FocalLength = 37386
Temperature = 37888
Humidity = 37889
Pressure = 37890
WaterDepth = 37891
Acceleration = 37892
CameraElevationAngle = 37893
SubjectArea = 37396
MakerNote = 37500
UserComment = 37510
SubSecTime = 37520
SubSecTimeOriginal = 37521
SubSecTimeDigitized = 37522
FlashpixVersion = 40960
ColorSpace = 40961
PixelXDimension = 40962
PixelYDimension = 40963
RelatedSoundFile = 40964
InteroperabilityTag = 40965
FlashEnergy = 41483
SpatialFrequencyResponse = 41484
FocalPlaneXResolution = 41486
FocalPlaneYResolution = 41487
FocalPlaneResolutionUnit = 41488
SubjectLocation = 41492
ExposureIndex = 41493
SensingMethod = 41495
FileSource = 41728
SceneType = 41729
CFAPattern = 41730
CustomRendered = 41985
ExposureMode = 41986
WhiteBalance = 41987
DigitalZoomRatio = 41988
FocalLengthIn35mmFilm = 41989
SceneCaptureType = 41990
GainControl = 41991
Contrast = 41992
Saturation = 41993
Sharpness = 41994
DeviceSettingDescription = 41995
SubjectDistanceRange = 41996
ImageUniqueID = 42016
CameraOwnerName = 42032
BodySerialNumber = 42033
LensSpecification = 42034
LensMake = 42035
LensModel = 42036
LensSerialNumber = 42037
Gamma = 42240
|
()
|
43,673 |
piexif._exif
|
GPSIFD
|
Exif tag number reference - GPS IFD
|
class GPSIFD:
"""Exif tag number reference - GPS IFD"""
GPSVersionID = 0
GPSLatitudeRef = 1
GPSLatitude = 2
GPSLongitudeRef = 3
GPSLongitude = 4
GPSAltitudeRef = 5
GPSAltitude = 6
GPSTimeStamp = 7
GPSSatellites = 8
GPSStatus = 9
GPSMeasureMode = 10
GPSDOP = 11
GPSSpeedRef = 12
GPSSpeed = 13
GPSTrackRef = 14
GPSTrack = 15
GPSImgDirectionRef = 16
GPSImgDirection = 17
GPSMapDatum = 18
GPSDestLatitudeRef = 19
GPSDestLatitude = 20
GPSDestLongitudeRef = 21
GPSDestLongitude = 22
GPSDestBearingRef = 23
GPSDestBearing = 24
GPSDestDistanceRef = 25
GPSDestDistance = 26
GPSProcessingMethod = 27
GPSAreaInformation = 28
GPSDateStamp = 29
GPSDifferential = 30
GPSHPositioningError = 31
|
()
|
43,674 |
piexif._exif
|
ImageIFD
|
Exif tag number reference - 0th IFD
|
class ImageIFD:
"""Exif tag number reference - 0th IFD"""
ProcessingSoftware = 11
NewSubfileType = 254
SubfileType = 255
ImageWidth = 256
ImageLength = 257
BitsPerSample = 258
Compression = 259
PhotometricInterpretation = 262
Threshholding = 263
CellWidth = 264
CellLength = 265
FillOrder = 266
DocumentName = 269
ImageDescription = 270
Make = 271
Model = 272
StripOffsets = 273
Orientation = 274
SamplesPerPixel = 277
RowsPerStrip = 278
StripByteCounts = 279
XResolution = 282
YResolution = 283
PlanarConfiguration = 284
GrayResponseUnit = 290
GrayResponseCurve = 291
T4Options = 292
T6Options = 293
ResolutionUnit = 296
TransferFunction = 301
Software = 305
DateTime = 306
Artist = 315
HostComputer = 316
Predictor = 317
WhitePoint = 318
PrimaryChromaticities = 319
ColorMap = 320
HalftoneHints = 321
TileWidth = 322
TileLength = 323
TileOffsets = 324
TileByteCounts = 325
SubIFDs = 330
InkSet = 332
InkNames = 333
NumberOfInks = 334
DotRange = 336
TargetPrinter = 337
ExtraSamples = 338
SampleFormat = 339
SMinSampleValue = 340
SMaxSampleValue = 341
TransferRange = 342
ClipPath = 343
XClipPathUnits = 344
YClipPathUnits = 345
Indexed = 346
JPEGTables = 347
OPIProxy = 351
JPEGProc = 512
JPEGInterchangeFormat = 513
JPEGInterchangeFormatLength = 514
JPEGRestartInterval = 515
JPEGLosslessPredictors = 517
JPEGPointTransforms = 518
JPEGQTables = 519
JPEGDCTables = 520
JPEGACTables = 521
YCbCrCoefficients = 529
YCbCrSubSampling = 530
YCbCrPositioning = 531
ReferenceBlackWhite = 532
XMLPacket = 700
Rating = 18246
RatingPercent = 18249
ImageID = 32781
CFARepeatPatternDim = 33421
CFAPattern = 33422
BatteryLevel = 33423
Copyright = 33432
ExposureTime = 33434
ImageResources = 34377
ExifTag = 34665
InterColorProfile = 34675
GPSTag = 34853
Interlace = 34857
TimeZoneOffset = 34858
SelfTimerMode = 34859
FlashEnergy = 37387
SpatialFrequencyResponse = 37388
Noise = 37389
FocalPlaneXResolution = 37390
FocalPlaneYResolution = 37391
FocalPlaneResolutionUnit = 37392
ImageNumber = 37393
SecurityClassification = 37394
ImageHistory = 37395
ExposureIndex = 37397
TIFFEPStandardID = 37398
SensingMethod = 37399
XPTitle = 40091
XPComment = 40092
XPAuthor = 40093
XPKeywords = 40094
XPSubject = 40095
PrintImageMatching = 50341
DNGVersion = 50706
DNGBackwardVersion = 50707
UniqueCameraModel = 50708
LocalizedCameraModel = 50709
CFAPlaneColor = 50710
CFALayout = 50711
LinearizationTable = 50712
BlackLevelRepeatDim = 50713
BlackLevel = 50714
BlackLevelDeltaH = 50715
BlackLevelDeltaV = 50716
WhiteLevel = 50717
DefaultScale = 50718
DefaultCropOrigin = 50719
DefaultCropSize = 50720
ColorMatrix1 = 50721
ColorMatrix2 = 50722
CameraCalibration1 = 50723
CameraCalibration2 = 50724
ReductionMatrix1 = 50725
ReductionMatrix2 = 50726
AnalogBalance = 50727
AsShotNeutral = 50728
AsShotWhiteXY = 50729
BaselineExposure = 50730
BaselineNoise = 50731
BaselineSharpness = 50732
BayerGreenSplit = 50733
LinearResponseLimit = 50734
CameraSerialNumber = 50735
LensInfo = 50736
ChromaBlurRadius = 50737
AntiAliasStrength = 50738
ShadowScale = 50739
DNGPrivateData = 50740
MakerNoteSafety = 50741
CalibrationIlluminant1 = 50778
CalibrationIlluminant2 = 50779
BestQualityScale = 50780
RawDataUniqueID = 50781
OriginalRawFileName = 50827
OriginalRawFileData = 50828
ActiveArea = 50829
MaskedAreas = 50830
AsShotICCProfile = 50831
AsShotPreProfileMatrix = 50832
CurrentICCProfile = 50833
CurrentPreProfileMatrix = 50834
ColorimetricReference = 50879
CameraCalibrationSignature = 50931
ProfileCalibrationSignature = 50932
AsShotProfileName = 50934
NoiseReductionApplied = 50935
ProfileName = 50936
ProfileHueSatMapDims = 50937
ProfileHueSatMapData1 = 50938
ProfileHueSatMapData2 = 50939
ProfileToneCurve = 50940
ProfileEmbedPolicy = 50941
ProfileCopyright = 50942
ForwardMatrix1 = 50964
ForwardMatrix2 = 50965
PreviewApplicationName = 50966
PreviewApplicationVersion = 50967
PreviewSettingsName = 50968
PreviewSettingsDigest = 50969
PreviewColorSpace = 50970
PreviewDateTime = 50971
RawImageDigest = 50972
OriginalRawFileDigest = 50973
SubTileBlockSize = 50974
RowInterleaveFactor = 50975
ProfileLookTableDims = 50981
ProfileLookTableData = 50982
OpcodeList1 = 51008
OpcodeList2 = 51009
OpcodeList3 = 51022
NoiseProfile = 51041
ZZZTestSlong1 = 60606
ZZZTestSlong2 = 60607
ZZZTestSByte = 60608
ZZZTestSShort = 60609
ZZZTestDFloat = 60610
|
()
|
43,675 |
piexif._exif
|
InteropIFD
|
Exif tag number reference - Interoperability IFD
|
class InteropIFD:
"""Exif tag number reference - Interoperability IFD"""
InteroperabilityIndex = 1
|
()
|
43,676 |
piexif._exceptions
|
InvalidImageDataError
| null |
class InvalidImageDataError(ValueError):
pass
| null |
43,677 |
piexif._exif
|
TYPES
| null |
class TYPES:
Byte = 1
Ascii = 2
Short = 3
Long = 4
Rational = 5
SByte = 6
Undefined = 7
SShort = 8
SLong = 9
SRational = 10
Float = 11
DFloat = 12
|
()
|
43,687 |
piexif._dump
|
dump
|
py:function:: piexif.load(data)
Return exif as bytes.
:param dict exif: Exif data({"0th":dict, "Exif":dict, "GPS":dict, "Interop":dict, "1st":dict, "thumbnail":bytes})
:return: Exif
:rtype: bytes
|
def dump(exif_dict_original):
"""
py:function:: piexif.load(data)
Return exif as bytes.
:param dict exif: Exif data({"0th":dict, "Exif":dict, "GPS":dict, "Interop":dict, "1st":dict, "thumbnail":bytes})
:return: Exif
:rtype: bytes
"""
exif_dict = copy.deepcopy(exif_dict_original)
header = b"Exif\x00\x00\x4d\x4d\x00\x2a\x00\x00\x00\x08"
exif_is = False
gps_is = False
interop_is = False
first_is = False
if "0th" in exif_dict:
zeroth_ifd = exif_dict["0th"]
else:
zeroth_ifd = {}
if (("Exif" in exif_dict) and len(exif_dict["Exif"]) or
("Interop" in exif_dict) and len(exif_dict["Interop"]) ):
zeroth_ifd[ImageIFD.ExifTag] = 1
exif_is = True
exif_ifd = exif_dict["Exif"]
if ("Interop" in exif_dict) and len(exif_dict["Interop"]):
exif_ifd[ExifIFD. InteroperabilityTag] = 1
interop_is = True
interop_ifd = exif_dict["Interop"]
elif ExifIFD. InteroperabilityTag in exif_ifd:
exif_ifd.pop(ExifIFD.InteroperabilityTag)
elif ImageIFD.ExifTag in zeroth_ifd:
zeroth_ifd.pop(ImageIFD.ExifTag)
if ("GPS" in exif_dict) and len(exif_dict["GPS"]):
zeroth_ifd[ImageIFD.GPSTag] = 1
gps_is = True
gps_ifd = exif_dict["GPS"]
elif ImageIFD.GPSTag in zeroth_ifd:
zeroth_ifd.pop(ImageIFD.GPSTag)
if (("1st" in exif_dict) and
("thumbnail" in exif_dict) and
(exif_dict["thumbnail"] is not None)):
first_is = True
exif_dict["1st"][ImageIFD.JPEGInterchangeFormat] = 1
exif_dict["1st"][ImageIFD.JPEGInterchangeFormatLength] = 1
first_ifd = exif_dict["1st"]
zeroth_set = _dict_to_bytes(zeroth_ifd, "0th", 0)
zeroth_length = (len(zeroth_set[0]) + exif_is * 12 + gps_is * 12 + 4 +
len(zeroth_set[1]))
if exif_is:
exif_set = _dict_to_bytes(exif_ifd, "Exif", zeroth_length)
exif_length = len(exif_set[0]) + interop_is * 12 + len(exif_set[1])
else:
exif_bytes = b""
exif_length = 0
if gps_is:
gps_set = _dict_to_bytes(gps_ifd, "GPS", zeroth_length + exif_length)
gps_bytes = b"".join(gps_set)
gps_length = len(gps_bytes)
else:
gps_bytes = b""
gps_length = 0
if interop_is:
offset = zeroth_length + exif_length + gps_length
interop_set = _dict_to_bytes(interop_ifd, "Interop", offset)
interop_bytes = b"".join(interop_set)
interop_length = len(interop_bytes)
else:
interop_bytes = b""
interop_length = 0
if first_is:
offset = zeroth_length + exif_length + gps_length + interop_length
first_set = _dict_to_bytes(first_ifd, "1st", offset)
thumbnail = _get_thumbnail(exif_dict["thumbnail"])
thumbnail_max_size = 64000
if len(thumbnail) > thumbnail_max_size:
raise ValueError("Given thumbnail is too large. max 64kB")
else:
first_bytes = b""
if exif_is:
pointer_value = TIFF_HEADER_LENGTH + zeroth_length
pointer_str = struct.pack(">I", pointer_value)
key = ImageIFD.ExifTag
key_str = struct.pack(">H", key)
type_str = struct.pack(">H", TYPES.Long)
length_str = struct.pack(">I", 1)
exif_pointer = key_str + type_str + length_str + pointer_str
else:
exif_pointer = b""
if gps_is:
pointer_value = TIFF_HEADER_LENGTH + zeroth_length + exif_length
pointer_str = struct.pack(">I", pointer_value)
key = ImageIFD.GPSTag
key_str = struct.pack(">H", key)
type_str = struct.pack(">H", TYPES.Long)
length_str = struct.pack(">I", 1)
gps_pointer = key_str + type_str + length_str + pointer_str
else:
gps_pointer = b""
if interop_is:
pointer_value = (TIFF_HEADER_LENGTH +
zeroth_length + exif_length + gps_length)
pointer_str = struct.pack(">I", pointer_value)
key = ExifIFD.InteroperabilityTag
key_str = struct.pack(">H", key)
type_str = struct.pack(">H", TYPES.Long)
length_str = struct.pack(">I", 1)
interop_pointer = key_str + type_str + length_str + pointer_str
else:
interop_pointer = b""
if first_is:
pointer_value = (TIFF_HEADER_LENGTH + zeroth_length +
exif_length + gps_length + interop_length)
first_ifd_pointer = struct.pack(">L", pointer_value)
thumbnail_pointer = (pointer_value + len(first_set[0]) + 24 +
4 + len(first_set[1]))
thumbnail_p_bytes = (b"\x02\x01\x00\x04\x00\x00\x00\x01" +
struct.pack(">L", thumbnail_pointer))
thumbnail_length_bytes = (b"\x02\x02\x00\x04\x00\x00\x00\x01" +
struct.pack(">L", len(thumbnail)))
first_bytes = (first_set[0] + thumbnail_p_bytes +
thumbnail_length_bytes + b"\x00\x00\x00\x00" +
first_set[1] + thumbnail)
else:
first_ifd_pointer = b"\x00\x00\x00\x00"
zeroth_bytes = (zeroth_set[0] + exif_pointer + gps_pointer +
first_ifd_pointer + zeroth_set[1])
if exif_is:
exif_bytes = exif_set[0] + interop_pointer + exif_set[1]
return (header + zeroth_bytes + exif_bytes + gps_bytes +
interop_bytes + first_bytes)
|
(exif_dict_original)
|
43,688 |
piexif._insert
|
insert
|
py:function:: piexif.insert(exif_bytes, filename)
Insert exif into JPEG.
:param bytes exif_bytes: Exif as bytes
:param str filename: JPEG
|
def insert(exif, image, new_file=None):
"""
py:function:: piexif.insert(exif_bytes, filename)
Insert exif into JPEG.
:param bytes exif_bytes: Exif as bytes
:param str filename: JPEG
"""
if exif[0:6] != b"\x45\x78\x69\x66\x00\x00":
raise ValueError("Given data is not exif data")
output_file = False
# Prevents "UnicodeWarning: Unicode equal comparison failed" warnings on Python 2
maybe_image = sys.version_info >= (3,0,0) or isinstance(image, str)
if maybe_image and image[0:2] == b"\xff\xd8":
image_data = image
file_type = "jpeg"
elif maybe_image and image[0:4] == b"RIFF" and image[8:12] == b"WEBP":
image_data = image
file_type = "webp"
else:
with open(image, 'rb') as f:
image_data = f.read()
if image_data[0:2] == b"\xff\xd8":
file_type = "jpeg"
elif image_data[0:4] == b"RIFF" and image_data[8:12] == b"WEBP":
file_type = "webp"
else:
raise InvalidImageDataError
output_file = True
if file_type == "jpeg":
exif = b"\xff\xe1" + struct.pack(">H", len(exif) + 2) + exif
segments = split_into_segments(image_data)
new_data = merge_segments(segments, exif)
elif file_type == "webp":
exif = exif[6:]
new_data = _webp.insert(image_data, exif)
if isinstance(new_file, io.BytesIO):
new_file.write(new_data)
new_file.seek(0)
elif new_file:
with open(new_file, "wb+") as f:
f.write(new_data)
elif output_file:
with open(image, "wb+") as f:
f.write(new_data)
else:
raise ValueError("Give a 3rd argument to 'insert' to output file")
|
(exif, image, new_file=None)
|
43,689 |
piexif._load
|
load
|
py:function:: piexif.load(filename)
Return exif data as dict. Keys(IFD name), be contained, are "0th", "Exif", "GPS", "Interop", "1st", and "thumbnail". Without "thumbnail", the value is dict(tag name/tag value). "thumbnail" value is JPEG as bytes.
:param str filename: JPEG or TIFF
:return: Exif data({"0th":dict, "Exif":dict, "GPS":dict, "Interop":dict, "1st":dict, "thumbnail":bytes})
:rtype: dict
|
def load(input_data, key_is_name=False):
"""
py:function:: piexif.load(filename)
Return exif data as dict. Keys(IFD name), be contained, are "0th", "Exif", "GPS", "Interop", "1st", and "thumbnail". Without "thumbnail", the value is dict(tag name/tag value). "thumbnail" value is JPEG as bytes.
:param str filename: JPEG or TIFF
:return: Exif data({"0th":dict, "Exif":dict, "GPS":dict, "Interop":dict, "1st":dict, "thumbnail":bytes})
:rtype: dict
"""
exif_dict = {"0th":{},
"Exif":{},
"GPS":{},
"Interop":{},
"1st":{},
"thumbnail":None}
exifReader = _ExifReader(input_data)
if exifReader.tiftag is None:
return exif_dict
if exifReader.tiftag[0:2] == LITTLE_ENDIAN:
exifReader.endian_mark = "<"
else:
exifReader.endian_mark = ">"
pointer = struct.unpack(exifReader.endian_mark + "L",
exifReader.tiftag[4:8])[0]
exif_dict["0th"] = exifReader.get_ifd_dict(pointer, "0th")
first_ifd_pointer = exif_dict["0th"].pop("first_ifd_pointer")
if ImageIFD.ExifTag in exif_dict["0th"]:
pointer = exif_dict["0th"][ImageIFD.ExifTag]
exif_dict["Exif"] = exifReader.get_ifd_dict(pointer, "Exif")
if ImageIFD.GPSTag in exif_dict["0th"]:
pointer = exif_dict["0th"][ImageIFD.GPSTag]
exif_dict["GPS"] = exifReader.get_ifd_dict(pointer, "GPS")
if ExifIFD.InteroperabilityTag in exif_dict["Exif"]:
pointer = exif_dict["Exif"][ExifIFD.InteroperabilityTag]
exif_dict["Interop"] = exifReader.get_ifd_dict(pointer, "Interop")
if first_ifd_pointer != b"\x00\x00\x00\x00":
pointer = struct.unpack(exifReader.endian_mark + "L",
first_ifd_pointer)[0]
exif_dict["1st"] = exifReader.get_ifd_dict(pointer, "1st")
if (ImageIFD.JPEGInterchangeFormat in exif_dict["1st"] and
ImageIFD.JPEGInterchangeFormatLength in exif_dict["1st"]):
end = (exif_dict["1st"][ImageIFD.JPEGInterchangeFormat] +
exif_dict["1st"][ImageIFD.JPEGInterchangeFormatLength])
thumb = exifReader.tiftag[exif_dict["1st"][ImageIFD.JPEGInterchangeFormat]:end]
exif_dict["thumbnail"] = thumb
if key_is_name:
exif_dict = _get_key_name_dict(exif_dict)
return exif_dict
|
(input_data, key_is_name=False)
|
43,690 |
piexif._remove
|
remove
|
py:function:: piexif.remove(filename)
Remove exif from JPEG.
:param str filename: JPEG
|
def remove(src, new_file=None):
"""
py:function:: piexif.remove(filename)
Remove exif from JPEG.
:param str filename: JPEG
"""
output_is_file = False
if src[0:2] == b"\xff\xd8":
src_data = src
file_type = "jpeg"
elif src[0:4] == b"RIFF" and src[8:12] == b"WEBP":
src_data = src
file_type = "webp"
else:
with open(src, 'rb') as f:
src_data = f.read()
output_is_file = True
if src_data[0:2] == b"\xff\xd8":
file_type = "jpeg"
elif src_data[0:4] == b"RIFF" and src_data[8:12] == b"WEBP":
file_type = "webp"
if file_type == "jpeg":
segments = split_into_segments(src_data)
exif = get_exif_seg(segments)
if exif:
new_data = src_data.replace(exif, b"")
else:
new_data = src_data
elif file_type == "webp":
try:
new_data = _webp.remove(src_data)
except ValueError:
new_data = src_data
except e:
print(e.args)
raise ValueError("Error occurred.")
if isinstance(new_file, io.BytesIO):
new_file.write(new_data)
new_file.seek(0)
elif new_file:
with open(new_file, "wb+") as f:
f.write(new_data)
elif output_is_file:
with open(src, "wb+") as f:
f.write(new_data)
else:
raise ValueError("Give a second argument to 'remove' to output file")
|
(src, new_file=None)
|
43,691 |
piexif._transplant
|
transplant
|
py:function:: piexif.transplant(filename1, filename2)
Transplant exif from filename1 to filename2.
:param str filename1: JPEG
:param str filename2: JPEG
|
def transplant(exif_src, image, new_file=None):
"""
py:function:: piexif.transplant(filename1, filename2)
Transplant exif from filename1 to filename2.
:param str filename1: JPEG
:param str filename2: JPEG
"""
if exif_src[0:2] == b"\xff\xd8":
src_data = exif_src
else:
with open(exif_src, 'rb') as f:
src_data = f.read()
segments = split_into_segments(src_data)
exif = get_exif_seg(segments)
if exif is None:
raise ValueError("not found exif in input")
output_file = False
if image[0:2] == b"\xff\xd8":
image_data = image
else:
with open(image, 'rb') as f:
image_data = f.read()
output_file = True
segments = split_into_segments(image_data)
new_data = merge_segments(segments, exif)
if isinstance(new_file, io.BytesIO):
new_file.write(new_data)
new_file.seek(0)
elif new_file:
with open(new_file, "wb+") as f:
f.write(new_data)
elif output_file:
with open(image, "wb+") as f:
f.write(new_data)
else:
raise ValueError("Give a 3rd argument to 'transplant' to output file")
|
(exif_src, image, new_file=None)
|
43,692 |
distutils.version
|
LooseVersion
|
Version numbering for anarchists and software realists.
Implements the standard interface for version number classes as
described above. A version number consists of a series of numbers,
separated by either periods or strings of letters. When comparing
version numbers, the numeric components will be compared
numerically, and the alphabetic components lexically. The following
are all valid version numbers, in no particular order:
1.5.1
1.5.2b2
161
3.10a
8.02
3.4j
1996.07.12
3.2.pl0
3.1.1.6
2g6
11g
0.960923
2.2beta29
1.13++
5.5.kw
2.0b1pl0
In fact, there is no such thing as an invalid version number under
this scheme; the rules for comparison are simple and predictable,
but may not always give the results you want (for some definition
of "want").
|
class LooseVersion(Version):
"""Version numbering for anarchists and software realists.
Implements the standard interface for version number classes as
described above. A version number consists of a series of numbers,
separated by either periods or strings of letters. When comparing
version numbers, the numeric components will be compared
numerically, and the alphabetic components lexically. The following
are all valid version numbers, in no particular order:
1.5.1
1.5.2b2
161
3.10a
8.02
3.4j
1996.07.12
3.2.pl0
3.1.1.6
2g6
11g
0.960923
2.2beta29
1.13++
5.5.kw
2.0b1pl0
In fact, there is no such thing as an invalid version number under
this scheme; the rules for comparison are simple and predictable,
but may not always give the results you want (for some definition
of "want").
"""
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
def parse(self, vstring):
# I've given up on thinking I can reconstruct the version string
# from the parsed tuple -- so I just store the string here for
# use by __str__
self.vstring = vstring
components = [x for x in self.component_re.split(vstring) if x and x != '.']
for i, obj in enumerate(components):
try:
components[i] = int(obj)
except ValueError:
pass
self.version = components
def __str__(self):
return self.vstring
def __repr__(self):
return "LooseVersion ('%s')" % str(self)
def _cmp(self, other):
if isinstance(other, str):
other = LooseVersion(other)
elif not isinstance(other, LooseVersion):
return NotImplemented
if self.version == other.version:
return 0
if self.version < other.version:
return -1
if self.version > other.version:
return 1
|
(vstring=None)
|
43,693 |
distutils.version
|
__eq__
| null |
def __eq__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c == 0
|
(self, other)
|
43,694 |
distutils.version
|
__ge__
| null |
def __ge__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c >= 0
|
(self, other)
|
43,695 |
distutils.version
|
__gt__
| null |
def __gt__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c > 0
|
(self, other)
|
43,696 |
distutils.version
|
__init__
| null |
def __init__(self, vstring=None):
if vstring:
self.parse(vstring)
warnings.warn(
"distutils Version classes are deprecated. "
"Use packaging.version instead.",
DeprecationWarning,
stacklevel=2,
)
|
(self, vstring=None)
|
43,697 |
distutils.version
|
__le__
| null |
def __le__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c <= 0
|
(self, other)
|
43,698 |
distutils.version
|
__lt__
| null |
def __lt__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c < 0
|
(self, other)
|
43,699 |
distutils.version
|
__repr__
| null |
def __repr__(self):
return "LooseVersion ('%s')" % str(self)
|
(self)
|
43,700 |
distutils.version
|
__str__
| null |
def __str__(self):
return self.vstring
|
(self)
|
43,701 |
distutils.version
|
_cmp
| null |
def _cmp(self, other):
if isinstance(other, str):
other = LooseVersion(other)
elif not isinstance(other, LooseVersion):
return NotImplemented
if self.version == other.version:
return 0
if self.version < other.version:
return -1
if self.version > other.version:
return 1
|
(self, other)
|
43,702 |
distutils.version
|
parse
| null |
def parse(self, vstring):
# I've given up on thinking I can reconstruct the version string
# from the parsed tuple -- so I just store the string here for
# use by __str__
self.vstring = vstring
components = [x for x in self.component_re.split(vstring) if x and x != '.']
for i, obj in enumerate(components):
try:
components[i] = int(obj)
except ValueError:
pass
self.version = components
|
(self, vstring)
|
43,703 |
freezegun.api
|
freeze_time
| null |
def freeze_time(time_to_freeze: Optional[_Freezable]=None, tz_offset: Union[int, datetime.timedelta]=0, ignore: Optional[List[str]]=None, tick: bool=False, as_arg: bool=False, as_kwarg: str='',
auto_tick_seconds: float=0, real_asyncio: bool=False) -> _freeze_time:
acceptable_times: Any = (type(None), str, datetime.date, datetime.timedelta,
types.FunctionType, types.GeneratorType)
if MayaDT is not None:
acceptable_times += MayaDT,
if not isinstance(time_to_freeze, acceptable_times):
raise TypeError(('freeze_time() expected None, a string, date instance, datetime '
'instance, MayaDT, timedelta instance, function or a generator, but got '
'type {}.').format(type(time_to_freeze)))
if tick and not _is_cpython:
raise SystemError('Calling freeze_time with tick=True is only compatible with CPython')
if isinstance(time_to_freeze, types.FunctionType):
return freeze_time(time_to_freeze(), tz_offset, ignore, tick, as_arg, as_kwarg, auto_tick_seconds, real_asyncio=real_asyncio)
if isinstance(time_to_freeze, types.GeneratorType):
return freeze_time(next(time_to_freeze), tz_offset, ignore, tick, as_arg, as_kwarg, auto_tick_seconds, real_asyncio=real_asyncio)
if MayaDT is not None and isinstance(time_to_freeze, MayaDT):
return freeze_time(time_to_freeze.datetime(), tz_offset, ignore,
tick, as_arg, as_kwarg, auto_tick_seconds, real_asyncio=real_asyncio)
if ignore is None:
ignore = []
ignore = ignore[:]
if config.settings.default_ignore_list:
ignore.extend(config.settings.default_ignore_list)
return _freeze_time(
time_to_freeze_str=time_to_freeze,
tz_offset=tz_offset,
ignore=ignore,
tick=tick,
as_arg=as_arg,
as_kwarg=as_kwarg,
auto_tick_seconds=auto_tick_seconds,
real_asyncio=real_asyncio,
)
|
(time_to_freeze: Union[str, datetime.datetime, datetime.date, datetime.timedelta, function, Callable[[], Union[str, datetime.datetime, datetime.date, datetime.timedelta]], Iterator[datetime.datetime], NoneType] = None, tz_offset: Union[int, datetime.timedelta] = 0, ignore: Optional[List[str]] = None, tick: bool = False, as_arg: bool = False, as_kwarg: str = '', auto_tick_seconds: float = 0, real_asyncio: bool = False) -> freezegun.api._freeze_time
|
43,704 |
pytest_freezegun
|
freezer_fixture
|
Freeze time and make it available to the test
| null |
(request)
|
43,705 |
pytest_freezegun
|
get_closest_marker
|
Get our marker, regardless of pytest version
|
def get_closest_marker(node, name):
"""
Get our marker, regardless of pytest version
"""
if LooseVersion(pytest.__version__) < LooseVersion('3.6.0'):
return node.get_marker('freeze_time')
else:
return node.get_closest_marker('freeze_time')
|
(node, name)
|
43,707 |
pytest_freezegun
|
pytest_collection_modifyitems
|
Inject our fixture into any tests with our marker
|
def pytest_collection_modifyitems(items):
"""
Inject our fixture into any tests with our marker
"""
for item in items:
if get_closest_marker(item, MARKER_NAME):
item.fixturenames.insert(0, FIXTURE_NAME)
|
(items)
|
43,708 |
pytest_freezegun
|
pytest_configure
|
Register our marker
|
def pytest_configure(config):
"""
Register our marker
"""
config.addinivalue_line(
"markers", "{}(...): use freezegun to freeze time".format(MARKER_NAME)
)
|
(config)
|
43,710 |
prov
|
Error
|
Base class for all errors in this package.
|
class Error(Exception):
"""Base class for all errors in this package."""
pass
| null |
43,711 |
prov
|
read
|
Convenience function returning a ProvDocument instance.
It does a lazy format detection by simply using try/except for all known
formats. The deserializers should fail fairly early when data of the
wrong type is passed to them thus the try/except is likely cheap. One
could of course also do some more advanced format auto-detection but I am
not sure that is necessary.
The downside is that no proper error messages will be produced, use the
format parameter to get the actual traceback.
|
def read(source, format=None):
"""
Convenience function returning a ProvDocument instance.
It does a lazy format detection by simply using try/except for all known
formats. The deserializers should fail fairly early when data of the
wrong type is passed to them thus the try/except is likely cheap. One
could of course also do some more advanced format auto-detection but I am
not sure that is necessary.
The downside is that no proper error messages will be produced, use the
format parameter to get the actual traceback.
"""
# Lazy imports to not globber the namespace.
from prov.model import ProvDocument
from prov.serializers import Registry
Registry.load_serializers()
serializers = Registry.serializers.keys()
if format:
return ProvDocument.deserialize(source=source, format=format.lower())
for format in serializers:
try:
return ProvDocument.deserialize(source=source, format=format)
except:
pass
else:
raise TypeError(
"Could not read from the source. To get a proper "
"error message, specify the format with the 'format' "
"parameter."
)
|
(source, format=None)
|
43,712 |
pyqt5_tools
|
_import_it
| null |
def _import_it(*segments):
import importlib
m = {
"pyqt_tools": "pyqt{major}_tools".format(major=major),
"pyqt_plugins": "pyqt{major}_plugins".format(major=major),
"qt_tools": "qt{major}_tools".format(major=major),
"qt_applications": "qt{major}_applications".format(major=major),
"PyQt": "PyQt{major}".format(major=major),
}
majored = [m[segments[0]], *segments[1:]]
return importlib.import_module(".".join(majored))
|
(*segments)
|
43,717 |
sapero_math.lcm
|
LCM
| null |
def LCM(x, y):
#not my code, math.lcm didnt exist????
if x > y:
greater = x
else:
greater = y
while(True):
if((greater % x == 0) and (greater % y == 0)):
lcm = greater
break
greater += 1
return lcm
|
(x, y)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.