code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
root = ET.fromstring(content) if anyconfig.compat.IS_PYTHON_3: stream = BytesIO(content) else: stream = anyconfig.compat.StringIO(content) nspaces = _namespaces_from_file(stream) return root_to_container(root, container=container, nspaces=nspaces, **opts)
def load_from_string(self, content, container, **opts)
Load config from XML snippet (a string 'content'). :param content: XML snippet string of str (python 2) or bytes (python 3) type :param container: callble to make a container object :param opts: optional keyword parameters passed to :return: Dict-like object holding config parameters
5.292384
5.372616
0.985066
root = ET.parse(filepath).getroot() nspaces = _namespaces_from_file(filepath) return root_to_container(root, container=container, nspaces=nspaces, **opts)
def load_from_path(self, filepath, container, **opts)
:param filepath: XML file path :param container: callble to make a container object :param opts: optional keyword parameters to be sanitized :return: Dict-like object holding config parameters
4.453027
5.098033
0.873479
root = ET.parse(stream).getroot() path = anyconfig.utils.get_path_from_stream(stream) nspaces = _namespaces_from_file(path) return root_to_container(root, container=container, nspaces=nspaces, **opts)
def load_from_stream(self, stream, container, **opts)
:param stream: XML file or file-like object :param container: callble to make a container object :param opts: optional keyword parameters to be sanitized :return: Dict-like object holding config parameters
5.123688
5.355992
0.956627
tree = container_to_etree(cnf, **opts) buf = BytesIO() etree_write(tree, buf) return buf.getvalue()
def dump_to_string(self, cnf, **opts)
:param cnf: Configuration data to dump :param opts: optional keyword parameters :return: string represents the configuration
5.358605
7.21691
0.742507
tree = container_to_etree(cnf, **opts) etree_write(tree, stream)
def dump_to_stream(self, cnf, stream, **opts)
:param cnf: Configuration data to dump :param stream: Config file or file like object write to :param opts: optional keyword parameters
8.684388
12.109083
0.71718
return anyconfig.utils.filter_options([k for k in options.keys() if k != key], options)
def filter_from_options(key, options)
:param key: Key str in options :param options: Mapping object :return: New mapping object from 'options' in which the item with 'key' filtered >>> filter_from_options('a', dict(a=1, b=2)) {'b': 2}
10.095901
14.142682
0.71386
def construct_mapping(loader, node, deep=False): loader.flatten_mapping(node) if not isinstance(node, yaml.MappingNode): msg = "expected a mapping node, but found %s" % node.id raise yaml.constructor.ConstructorError(None, None, msg, node.start_mark) mapping = container() for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) try: hash(key) except TypeError as exc: eargs = ("while constructing a mapping", node.start_mark, "found unacceptable key (%s)" % exc, key_node.start_mark) raise yaml.constructor.ConstructorError(*eargs) value = loader.construct_object(value_node, deep=deep) mapping[key] = value return mapping tag = "tag:yaml.org,2002:python/unicode" def construct_ustr(loader, node): return loader.construct_scalar(node) try: loader.add_constructor(tag, construct_ustr) except NameError: pass if type(container) != dict: loader.add_constructor(mapping_tag, construct_mapping) return loader
def _customized_loader(container, loader=Loader, mapping_tag=_MAPPING_TAG)
Create or update loader with making given callble 'container' to make mapping objects such as dict and OrderedDict, used to construct python object from yaml mapping node internally. :param container: Set container used internally
2.027966
2.078822
0.975536
def container_representer(dumper, data, mapping_tag=_MAPPING_TAG): return dumper.represent_mapping(mapping_tag, data.items()) def ustr_representer(dumper, data): tag = "tag:yaml.org,2002:python/unicode" return dumper.represent_scalar(tag, data) try: dumper.add_representer(unicode, ustr_representer) except NameError: pass if type(container) != dict: dumper.add_representer(container, container_representer) return dumper
def _customized_dumper(container, dumper=Dumper)
Coutnerpart of :func:`_customized_loader` for dumpers.
2.928876
2.9417
0.99564
key = "ac_safe" fnc = getattr(yaml, r"safe_" + fname if options.get(key) else fname) return fnc(*args, **common.filter_from_options(key, options))
def yml_fnc(fname, *args, **options)
An wrapper of yaml.safe_load, yaml.load, yaml.safe_dump and yaml.dump. :param fname: "load" or "dump", not checked but it should be OK. see also :func:`yml_load` and :func:`yml_dump` :param args: [stream] for load or [cnf, stream] for dump :param options: keyword args may contain "ac_safe" to load/dump safely
10.738453
6.903293
1.555555
if options.get("ac_safe", False): options = {} # yaml.safe_load does not process Loader opts. elif not options.get("Loader"): maybe_container = options.get("ac_dict", False) if maybe_container and callable(maybe_container): container = maybe_container options["Loader"] = _customized_loader(container) ret = yml_fnc("load", stream, **common.filter_from_options("ac_dict", options)) if ret is None: return container() return ret
def yml_load(stream, container, yml_fnc=yml_fnc, **options)
An wrapper of yaml.safe_load and yaml.load. :param stream: a file or file-like object to load YAML content :param container: callble to make a container object :return: Mapping object
7.051744
6.849753
1.029489
_is_dict = anyconfig.utils.is_dict_like(data) if options.get("ac_safe", False): options = {} elif not options.get("Dumper", False) and _is_dict: # TODO: Any other way to get its constructor? maybe_container = options.get("ac_dict", type(data)) options["Dumper"] = _customized_dumper(maybe_container) if _is_dict: # Type information and the order of items are lost on dump currently. data = anyconfig.dicts.convert_to(data, ac_dict=dict) options = common.filter_from_options("ac_dict", options) return yml_fnc("dump", data, stream, **options)
def yml_dump(data, stream, yml_fnc=yml_fnc, **options)
An wrapper of yaml.safe_dump and yaml.dump. :param data: Some data to dump :param stream: a file or file-like object to dump YAML data
7.32132
7.997176
0.915488
if not path: return [] for sep in seps: if sep in path: if path == sep: # Special case, '/' or '.' only. return [''] return [x for x in path.split(sep) if x] return [path]
def _split_path(path, seps=PATH_SEPS)
Parse path expression and return list of path items. :param path: Path expression may contain separator chars. :param seps: Separator char candidates. :return: A list of keys to fetch object[s] later. >>> assert _split_path('') == [] >>> assert _split_path('/') == [''] # JSON Pointer spec expects this. >>> for p in ('/a', '.a', 'a', 'a.'): ... assert _split_path(p) == ['a'], p >>> assert _split_path('/a/b/c') == _split_path('a.b.c') == ['a', 'b', 'c'] >>> assert _split_path('abc') == ['abc']
3.883903
4.576641
0.848636
ret = None for key in reversed(_split_path(path, seps)): ret = {key: val if ret is None else ret.copy()} return ret
def mk_nested_dic(path, val, seps=PATH_SEPS)
Make a nested dict iteratively. :param path: Path expression to make a nested dict :param val: Value to set :param seps: Separator char candidates >>> mk_nested_dic("a.b.c", 1) {'a': {'b': {'c': 1}}} >>> mk_nested_dic("/a/b/c", 1) {'a': {'b': {'c': 1}}}
5.149137
8.498191
0.60591
items = [_jsnp_unescape(p) for p in _split_path(path, seps)] if not items: return (dic, '') try: if len(items) == 1: return (dic[items[0]], '') prnt = functools.reduce(operator.getitem, items[:-1], dic) arr = anyconfig.utils.is_list_like(prnt) and idx_reg.match(items[-1]) return (prnt[int(items[-1])], '') if arr else (prnt[items[-1]], '') except (TypeError, KeyError, IndexError) as exc: return (None, str(exc))
def get(dic, path, seps=PATH_SEPS, idx_reg=_JSNP_GET_ARRAY_IDX_REG)
getter for nested dicts. :param dic: a dict[-like] object :param path: Path expression to point object wanted :param seps: Separator char candidates :return: A tuple of (result_object, error_message) >>> d = {'a': {'b': {'c': 0, 'd': [1, 2]}}, '': 3} >>> assert get(d, '/') == (3, '') # key becomes '' (empty string). >>> assert get(d, "/a/b/c") == (0, '') >>> sorted(get(d, "a.b")[0].items()) [('c', 0), ('d', [1, 2])] >>> (get(d, "a.b.d"), get(d, "/a/b/d/1")) (([1, 2], ''), (2, '')) >>> get(d, "a.b.key_not_exist") # doctest: +ELLIPSIS (None, "'...'") >>> get(d, "/a/b/d/2") (None, 'list index out of range') >>> get(d, "/a/b/d/-") # doctest: +ELLIPSIS (None, 'list indices must be integers...')
3.01755
3.392284
0.889533
merge(dic, mk_nested_dic(path, val, seps), ac_merge=MS_DICTS)
def set_(dic, path, val, seps=PATH_SEPS)
setter for nested dicts. :param dic: a dict[-like] object support recursive merge operations :param path: Path expression to point object wanted :param seps: Separator char candidates >>> d = dict(a=1, b=dict(c=2, )) >>> set_(d, 'a.b.d', 3) >>> d['a']['b']['d'] 3
22.736368
42.519005
0.534734
self[key] = other[key] if val is None else val
def _update_with_replace(self, other, key, val=None, **options)
Replace value of a mapping object 'self' with 'other' has if both have same keys on update. Otherwise, just keep the value of 'self'. :param self: mapping object to update with 'other' :param other: mapping object to update 'self' :param key: key of mapping object to update :param val: value to update self alternatively :return: None but 'self' will be updated
6.24052
7.051966
0.884933
self[key] += [x for x in lst if x not in self[key]]
def _merge_list(self, key, lst)
:param key: self[key] will be updated :param lst: Other list to merge
5.24784
4.176446
1.256532
if val is None: val = other[key] if key in self: val0 = self[key] # Original value if anyconfig.utils.is_dict_like(val0): # It needs recursive updates. merge(self[key], val, merge_lists=merge_lists, **options) elif merge_lists and _are_list_like(val, val0): _merge_list(self, key, val) else: _merge_other(self, key, val) else: self[key] = val
def _update_with_merge(self, other, key, val=None, merge_lists=False, **options)
Merge the value of self with other's recursively. Behavior of merge will be vary depends on types of original and new values. - mapping vs. mapping -> merge recursively - list vs. list -> vary depends on 'merge_lists'. see its description. :param other: a dict[-like] object or a list of (key, value) tuples :param key: key of mapping object to update :param val: value to update self[key] :param merge_lists: Merge not only dicts but also lists. For example, [1, 2, 3], [3, 4] ==> [1, 2, 3, 4] [1, 2, 2], [2, 4] ==> [1, 2, 2, 4] :return: None but 'self' will be updated
3.996053
4.022808
0.993349
_update_with_merge(self, other, key, val=val, merge_lists=True, **options)
def _update_with_merge_lists(self, other, key, val=None, **options)
Similar to _update_with_merge but merge lists always. :param self: mapping object to update with 'other' :param other: mapping object to update 'self' :param key: key of mapping object to update :param val: value to update self alternatively :return: None but 'self' will be updated
4.830817
3.969266
1.217055
if strategy is None: strategy = MS_DICTS try: return _MERGE_FNS[strategy] except KeyError: if callable(strategy): return strategy raise ValueError("Wrong merge strategy: %r" % strategy)
def _get_update_fn(strategy)
Select dict-like class based on merge strategy and orderness of keys. :param merge: Specify strategy from MERGE_STRATEGIES of how to merge dicts. :return: Callable to update objects
7.609179
7.672966
0.991687
_update_fn = _get_update_fn(ac_merge) if hasattr(other, "keys"): for key in other: _update_fn(self, other, key, **options) else: try: for key, val in other: _update_fn(self, other, key, val=val, **options) except (ValueError, TypeError) as exc: # Re-raise w/ info. raise type(exc)("%s other=%r" % (str(exc), other))
def merge(self, other, ac_merge=MS_DICTS, **options)
Update (merge) a mapping object 'self' with other mapping object or an iterable yields (key, value) tuples based on merge strategy 'ac_merge'. :param others: a list of dict[-like] objects or (key, value) tuples :param another: optional keyword arguments to update self more :param ac_merge: Merge strategy to choose
3.638455
3.912412
0.929977
if ac_dict is None: ac_dict = anyconfig.compat.OrderedDict if ac_ordered else dict return ac_dict((k, None if v is None else make_fn(v, **options)) for k, v in obj.items())
def _make_recur(obj, make_fn, ac_ordered=False, ac_dict=None, **options)
:param obj: A mapping objects or other primitive object :param make_fn: Function to make/convert to :param ac_ordered: Use OrderedDict instead of dict to keep order of items :param ac_dict: Callable to convert 'obj' to mapping object :param options: Optional keyword arguments. :return: Mapping object
3.126314
3.662921
0.853503
return type(obj)(make_fn(v, **options) for v in obj)
def _make_iter(obj, make_fn, **options)
:param obj: A mapping objects or other primitive object :param make_fn: Function to make/convert to :param options: Optional keyword arguments. :return: Mapping object
4.088606
9.734611
0.420007
options.update(ac_ordered=ac_ordered, ac_dict=ac_dict) if anyconfig.utils.is_dict_like(obj): return _make_recur(obj, convert_to, **options) if anyconfig.utils.is_list_like(obj): return _make_iter(obj, convert_to, **options) return obj
def convert_to(obj, ac_ordered=False, ac_dict=None, **options)
Convert a mapping objects to a dict or object of 'to_type' recursively. Borrowed basic idea and implementation from bunch.unbunchify. (bunch is distributed under MIT license same as this.) :param obj: A mapping objects or other primitive object :param ac_ordered: Use OrderedDict instead of dict to keep order of items :param ac_dict: Callable to convert 'obj' to mapping object :param options: Optional keyword arguments. :return: A dict or OrderedDict or object of 'cls' >>> OD = anyconfig.compat.OrderedDict >>> convert_to(OD((('a', 1) ,)), cls=dict) {'a': 1} >>> convert_to(OD((('a', OD((('b', OD((('c', 1), ))), ))), )), cls=dict) {'a': {'b': {'c': 1}}}
2.901579
3.654885
0.793891
pair = re.split(r"(?:\s+)?(?:(?<!\\)[=:])", line.strip(), 1) key = pair[0].rstrip() if len(pair) < 2: LOGGER.warning("Invalid line found: %s", line) return (key or None, '') return (key, pair[1].strip())
def _parseline(line)
Parse a line of Java properties file. :param line: A string to parse, must not start with ' ', '#' or '!' (comment) :return: A tuple of (key, value), both key and value may be None >>> _parseline(" ") (None, '') >>> _parseline("aaa:") ('aaa', '') >>> _parseline(" aaa:") ('aaa', '') >>> _parseline("aaa") ('aaa', '') >>> _parseline("url = http://localhost") ('url', 'http://localhost') >>> _parseline("calendar.japanese.type: LocalGregorianCalendar") ('calendar.japanese.type', 'LocalGregorianCalendar')
5.073298
5.824672
0.871001
if not line: return None if any(c in line for c in comment_markers): if line.startswith(comment_markers): return None return line
def _pre_process_line(line, comment_markers=_COMMENT_MARKERS)
Preprocess a line in properties; strip comments, etc. :param line: A string not starting w/ any white spaces and ending w/ line breaks. It may be empty. see also: :func:`load`. :param comment_markers: Comment markers, e.g. '#' (hash) >>> _pre_process_line('') is None True >>> s0 = "calendar.japanese.type: LocalGregorianCalendar" >>> _pre_process_line("# " + s0) is None True >>> _pre_process_line("! " + s0) is None True >>> _pre_process_line(s0 + "# comment") 'calendar.japanese.type: LocalGregorianCalendar# comment'
3.475222
4.684774
0.741812
ret = container() prev = "" for line in stream.readlines(): line = _pre_process_line(prev + line.strip().rstrip(), comment_markers) # I don't think later case may happen but just in case. if line is None or not line: continue prev = "" # re-initialize for later use. if line.endswith("\\"): prev += line.rstrip(" \\") continue (key, val) = _parseline(line) if key is None: LOGGER.warning("Failed to parse the line: %s", line) continue ret[key] = unescape(val) return ret
def load(stream, container=dict, comment_markers=_COMMENT_MARKERS)
Load and parse Java properties file given as a fiel or file-like object 'stream'. :param stream: A file or file like object of Java properties files :param container: Factory function to create a dict-like object to store properties :param comment_markers: Comment markers, e.g. '#' (hash) :return: Dict-like object holding properties >>> to_strm = anyconfig.compat.StringIO >>> s0 = "calendar.japanese.type: LocalGregorianCalendar" >>> load(to_strm('')) {} >>> load(to_strm("# " + s0)) {} >>> load(to_strm("! " + s0)) {} >>> load(to_strm("calendar.japanese.type:")) {'calendar.japanese.type': ''} >>> load(to_strm(s0)) {'calendar.japanese.type': 'LocalGregorianCalendar'} >>> load(to_strm(s0 + "# ...")) {'calendar.japanese.type': 'LocalGregorianCalendar# ...'} >>> s1 = r"key=a\\:b" >>> load(to_strm(s1)) {'key': 'a:b'} >>> s2 = '''application/postscript: \\ ... x=Postscript File;y=.eps,.ps ... ''' >>> load(to_strm(s2)) {'application/postscript': 'x=Postscript File;y=.eps,.ps'}
5.032166
5.662945
0.888613
if encoding is None: encoding = locale.getdefaultlocale()[1] return codecs.open(filepath, flag, encoding)
def copen(filepath, flag='r', encoding=None)
FIXME: How to test this ? >>> c = copen(__file__) >>> c is not None True
2.584073
3.197935
0.808044
tmpldir = os.path.abspath(os.path.dirname(template_file)) return [tmpldir] if paths is None else paths + [tmpldir]
def make_template_paths(template_file, paths=None)
Make up a list of template search paths from given 'template_file' (absolute or relative path to the template file) and/or 'paths' (a list of template search paths given by user). NOTE: User-given 'paths' will take higher priority over a dir of template_file. :param template_file: Absolute or relative path to the template file :param paths: A list of template search paths :return: List of template paths ([str]) >>> make_template_paths("/path/to/a/template") ['/path/to/a'] >>> make_template_paths("/path/to/a/template", ["/tmp"]) ['/tmp', '/path/to/a'] >>> os.chdir("/tmp") >>> make_template_paths("./path/to/a/template") ['/tmp/path/to/a'] >>> make_template_paths("./path/to/a/template", ["/tmp"]) ['/tmp', '/tmp/path/to/a']
3.098998
4.540938
0.682458
if paths is None: paths = [os.curdir] env = tmpl_env(paths) if env is None: return tmpl_s if filters is not None: env.filters.update(filters) if ctx is None: ctx = {} return tmpl_env(paths).from_string(tmpl_s).render(**ctx)
def render_s(tmpl_s, ctx=None, paths=None, filters=None)
Compile and render given template string 'tmpl_s' with context 'context'. :param tmpl_s: Template string :param ctx: Context dict needed to instantiate templates :param paths: Template search paths :param filters: Custom filters to add into template engine :return: Compiled result (str) >>> render_s("aaa") == "aaa" True >>> s = render_s('a = {{ a }}, b = "{{ b }}"', {'a': 1, 'b': 'bbb'}) >>> if SUPPORTED: ... assert s == 'a = 1, b = "bbb"'
2.686325
3.980246
0.674914
env = tmpl_env(make_template_paths(template_file, paths)) if env is None: return copen(template_file).read() if filters is not None: env.filters.update(filters) if ctx is None: ctx = {} return env.get_template(os.path.basename(template_file)).render(**ctx)
def render_impl(template_file, ctx=None, paths=None, filters=None)
:param template_file: Absolute or relative path to the template file :param ctx: Context dict needed to instantiate templates :param filters: Custom filters to add into template engine :return: Compiled result (str)
3.081078
3.957344
0.778572
try: return render_impl(filepath, ctx, paths, filters) except TemplateNotFound as mtmpl: if not ask: raise usr_tmpl = anyconfig.compat.raw_input(os.linesep + "" "*** Missing template " "'%s'. Please enter absolute " "or relative path starting from " "'.' to the template file: " % mtmpl) usr_tmpl = os.path.normpath(usr_tmpl.strip()) paths = make_template_paths(usr_tmpl, paths) return render_impl(usr_tmpl, ctx, paths, filters)
def render(filepath, ctx=None, paths=None, ask=False, filters=None)
Compile and render template and return the result as a string. :param template_file: Absolute or relative path to the template file :param ctx: Context dict needed to instantiate templates :param paths: Template search paths :param ask: Ask user for missing template location if True :param filters: Custom filters to add into template engine :return: Compiled result (str)
5.034038
5.049458
0.996946
if filepath is None and content is None: raise ValueError("Either 'path' or 'content' must be some value!") tmpl_s = filepath or content[:10] + " ..." LOGGER.debug("Compiling: %s", tmpl_s) try: if content is None: render_opts = anyconfig.utils.filter_options(RENDER_OPTS, options) return render(filepath, **render_opts) render_s_opts = anyconfig.utils.filter_options(RENDER_S_OPTS, options) return render_s(content, **render_s_opts) except Exception as exc: LOGGER.warning("Failed to compile '%s'. It may not be a template.%s" "exc=%r", tmpl_s, os.linesep, exc) return None
def try_render(filepath=None, content=None, **options)
Compile and render template and return the result as a string. :param filepath: Absolute or relative path to the template file :param content: Template content (str) :param options: Keyword options passed to :func:`render` defined above. :return: Compiled result (str) or None
4.215547
4.137407
1.018886
if (val_s.startswith('"') and val_s.endswith('"')) or \ (val_s.startswith("'") and val_s.endswith("'")): return val_s[1:-1] if sep in val_s: return [P.parse(x) for x in P.parse_list(val_s)] return P.parse(val_s)
def _parse(val_s, sep=_SEP)
FIXME: May be too naive implementation. :param val_s: A string represents some value to parse :param sep: separator between values >>> _parse(r'"foo string"') 'foo string' >>> _parse("a, b, c") ['a', 'b', 'c'] >>> _parse("aaa") 'aaa'
2.505124
2.741596
0.913747
if anyconfig.utils.is_iterable(val): return sep.join(str(x) for x in val) return str(val)
def _to_s(val, sep=", ")
Convert any to string. :param val: An object :param sep: separator between values >>> _to_s([1, 2, 3]) '1, 2, 3' >>> _to_s("aaa") 'aaa'
4.202553
6.852732
0.613267
parse = _parse if options.get("ac_parse_value") else anyconfig.utils.noop for key, val in items: yield (key, parse(val, sep))
def _parsed_items(items, sep=_SEP, **options)
:param items: List of pairs, [(key, value)], or generator yields pairs :param sep: Seprator string :return: Generator to yield (key, value) pair of 'dic'
11.846108
12.290353
0.963854
# Optional arguements for configparser.SafeConfigParser{,readfp} kwargs_0 = filter_options(("defaults", "dict_type", "allow_no_value"), kwargs) kwargs_1 = filter_options(("filename", ), kwargs) try: parser = configparser.SafeConfigParser(**kwargs_0) except TypeError: # .. note:: # It seems ConfigParser.*ConfigParser in python 2.6 does not support # 'allow_no_value' option parameter, and TypeError will be thrown. kwargs_0 = filter_options(("defaults", "dict_type"), kwargs) parser = configparser.SafeConfigParser(**kwargs_0) return (kwargs_1, parser)
def _make_parser(**kwargs)
:return: (keyword args to be used, parser object)
5.808303
5.330718
1.089591
(kwargs_1, psr) = _make_parser(**kwargs) if IS_PYTHON_3: psr.read_file(stream, **kwargs_1) else: psr.readfp(stream, **kwargs_1) cnf = container() kwargs["sep"] = sep defaults = psr.defaults() if defaults: cnf[dkey] = container(_parsed_items(iteritems(defaults), **kwargs)) for sect in psr.sections(): cnf[sect] = container(_parsed_items(psr.items(sect), **kwargs)) return cnf
def _load(stream, container, sep=_SEP, dkey=DEFAULTSECT, **kwargs)
:param stream: File or file-like object provides ini-style conf :param container: any callable to make container :param sep: Seprator string :param dkey: Default section name :return: Dict or dict-like object represents config values
4.093264
4.540836
0.901434
for sect, params in iteritems(cnf): yield "[%s]" % sect for key, val in iteritems(params): if sect != dkey and dkey in cnf and cnf[dkey].get(key) == val: continue # It should be in [DEFAULT] section. yield "%s = %s" % (key, _to_s(val)) yield ''
def _dumps_itr(cnf, dkey=DEFAULTSECT)
:param cnf: Configuration data to dump
4.634859
4.388885
1.056045
return os.linesep.join(l for l in _dumps_itr(cnf))
def _dumps(cnf, **kwargs)
:param cnf: Configuration data to dump :param kwargs: optional keyword parameters to be sanitized :: dict :return: String representation of 'cnf' object in INI format
9.704336
15.170405
0.639689
expression = options.get("ac_query", None) if expression is None or not expression: return data try: pexp = jmespath.compile(expression) return pexp.search(data) except ValueError as exc: # jmespath.exceptions.*Error inherit from it. LOGGER.warning("Failed to compile or search: exp=%s, exc=%r", expression, exc) except (NameError, AttributeError): LOGGER.warning("Filter module (jmespath) is not available. " "Do nothing.") return data
def query(data, **options)
Filter data with given JMESPath expression. See also: https://github.com/jmespath/jmespath.py and http://jmespath.org. :param data: Target object (a dict or a dict-like object) to query :param options: Keyword option may include 'ac_query' which is a string represents JMESPath expression. :return: Maybe queried result data, primitive (int, str, ...) or dict
7.132015
5.924632
1.20379
return itertools.groupby(sorted(itr, key=key_fn), key=key_fn)
def groupby(itr, key_fn=None)
An wrapper function around itertools.groupby to sort each results. :param itr: Iterable object, a list/tuple/genrator, etc. :param key_fn: Key function to sort 'itr'. >>> import operator >>> itr = [("a", 1), ("b", -1), ("c", 1)] >>> res = groupby(itr, operator.itemgetter(1)) >>> [(key, tuple(grp)) for key, grp in res] [(-1, (('b', -1),)), (1, (('a', 1), ('c', 1)))]
2.946923
4.852078
0.607353
_ext = os.path.splitext(file_path)[-1] if _ext: return _ext[1:] if _ext.startswith('.') else _ext return ""
def get_file_extension(file_path)
>>> get_file_extension("/a/b/c") '' >>> get_file_extension("/a/b.txt") 'txt' >>> get_file_extension("/a/b/c.tar.xz") 'xz'
3.329692
3.855441
0.863635
return isinstance(obj, (list, tuple, types.GeneratorType)) or \ (not isinstance(obj, (int, str, dict)) and bool(getattr(obj, "next", False)))
def is_iterable(obj)
>>> is_iterable([]) True >>> is_iterable(()) True >>> is_iterable([x for x in range(10)]) True >>> is_iterable((1, 2, 3)) True >>> g = (x for x in range(10)) >>> is_iterable(g) True >>> is_iterable("abc") False >>> is_iterable(0) False >>> is_iterable({}) False
4.213009
5.422601
0.776935
if keys is None: keys = anyconfig.globals.IOI_KEYS if isinstance(obj, tuple) and getattr(obj, "_asdict", False): return all(k in obj._asdict() for k in keys) return False
def is_ioinfo(obj, keys=None)
:return: True if given 'obj' is a 'IOInfo' namedtuple object. >>> assert not is_ioinfo(1) >>> assert not is_ioinfo("aaa") >>> assert not is_ioinfo({}) >>> assert not is_ioinfo(('a', 1, {})) >>> inp = anyconfig.globals.IOInfo("/etc/hosts", "path", "/etc/hosts", ... None, open) >>> assert is_ioinfo(inp)
5.159624
5.398166
0.955811
return ((is_path(obj) and marker not in obj) or (is_path_obj(obj) and marker not in obj.as_posix()) or is_file_stream(obj) or is_ioinfo(obj))
def is_path_like_object(obj, marker='*')
Is given object 'obj' a path string, a pathlib.Path, a file / file-like (stream) or IOInfo namedtuple object? :param obj: a path string, pathlib.Path object, a file / file-like or 'IOInfo' object :return: True if 'obj' is a path string or a pathlib.Path object or a file (stream) object >>> assert is_path_like_object(__file__) >>> assert not is_path_like_object("/a/b/c/*.json", '*') >>> from anyconfig.compat import pathlib >>> if pathlib is not None: ... assert is_path_like_object(pathlib.Path("a.ini")) ... assert not is_path_like_object(pathlib.Path("x.ini"), 'x') >>> assert is_path_like_object(open(__file__))
4.711252
5.374665
0.876567
return ((is_path(maybe_paths) and marker in maybe_paths) or # Path str (is_path_obj(maybe_paths) and marker in maybe_paths.as_posix()) or (is_iterable(maybe_paths) and all(is_path(p) or is_ioinfo(p) for p in maybe_paths)))
def is_paths(maybe_paths, marker='*')
Does given object 'maybe_paths' consist of path or path pattern strings?
3.299178
3.176947
1.038474
if not is_file_stream(strm): raise ValueError("Given object does not look a file/file-like " "object: %r" % strm) path = getattr(strm, "name", None) if path is not None: try: return normpath(path) except (TypeError, ValueError): pass return None
def get_path_from_stream(strm)
Try to get file path from given file or file-like object 'strm'. :param strm: A file or file-like object :return: Path of given file or file-like object or None :raises: ValueError >>> assert __file__ == get_path_from_stream(open(__file__, 'r')) >>> assert get_path_from_stream(anyconfig.compat.StringIO()) is None >>> get_path_from_stream(__file__) # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ...
3.899582
4.15224
0.939151
if is_path(obj): path = obj elif is_path_obj(obj): return obj.suffix[1:] elif is_file_stream(obj): try: path = get_path_from_stream(obj) except ValueError: return None elif is_ioinfo(obj): path = obj.path else: return None if path: return get_file_extension(path) return None
def _try_to_get_extension(obj)
Try to get file extension from given path or file object. :param obj: a file, file-like object or something :return: File extension or None >>> _try_to_get_extension("a.py") 'py'
3.189211
3.466094
0.920117
if not objs: return False ext = _try_to_get_extension(objs[0]) if ext is None: return False return all(_try_to_get_extension(p) == ext for p in objs[1:])
def are_same_file_types(objs)
Are given (maybe) file objs same type (extension) ? :param objs: A list of file path or file(-like) objects >>> are_same_file_types([]) False >>> are_same_file_types(["a.conf"]) True >>> are_same_file_types(["a.conf", "b.conf"]) True >>> are_same_file_types(["a.yml", "b.yml"]) True >>> are_same_file_types(["a.yml", "b.json"]) False >>> strm = anyconfig.compat.StringIO() >>> are_same_file_types(["a.yml", "b.yml", strm]) False
3.185437
4.248878
0.749713
for path in paths: if is_path(path): if marker in path: # glob path pattern for ppath in sglob(path): yield ppath else: yield path # a simple file path elif is_path_obj(path): if marker in path.as_posix(): for ppath in sglob(path.as_posix()): yield normpath(ppath) else: yield normpath(path.as_posix()) elif is_ioinfo(path): yield path.path else: # A file or file-like object yield path
def _expand_paths_itr(paths, marker='*')
Iterator version of :func:`expand_paths`.
3.420606
3.369488
1.015171
if is_path(paths) and marker in paths: return sglob(paths) if is_path_obj(paths) and marker in paths.as_posix(): # TBD: Is it better to return [p :: pathlib.Path] instead? return [normpath(p) for p in sglob(paths.as_posix())] return list(_expand_paths_itr(paths, marker=marker))
def expand_paths(paths, marker='*')
:param paths: A glob path pattern string or pathlib.Path object holding such path, or a list consists of path strings or glob path pattern strings or pathlib.Path object holding such ones, or file objects :param marker: Glob marker character or string, e.g. '*' :return: List of path strings >>> expand_paths([]) [] >>> expand_paths("/usr/lib/a/b.conf /etc/a/b.conf /run/a/b.conf".split()) ['/usr/lib/a/b.conf', '/etc/a/b.conf', '/run/a/b.conf'] >>> paths_s = os.path.join(os.path.dirname(__file__), "u*.py") >>> ref = sglob(paths_s) >>> assert expand_paths(paths_s) == ref >>> ref = ["/etc/a.conf"] + ref >>> assert expand_paths(["/etc/a.conf", paths_s]) == ref >>> strm = anyconfig.compat.StringIO() >>> assert expand_paths(["/etc/a.conf", strm]) == ["/etc/a.conf", strm]
5.742288
5.921287
0.96977
return isinstance(obj, _LIST_LIKE_TYPES) and \ not (isinstance(obj, anyconfig.compat.STR_TYPES) or is_dict_like(obj))
def is_list_like(obj)
>>> is_list_like([]) True >>> is_list_like(()) True >>> is_list_like([x for x in range(10)]) True >>> is_list_like((1, 2, 3)) True >>> g = (x for x in range(10)) >>> is_list_like(g) True >>> is_list_like("abc") False >>> is_list_like(0) False >>> is_list_like({}) False
6.579438
7.62247
0.863163
return dict((k, options[k]) for k in keys if k in options)
def filter_options(keys, options)
Filter 'options' with given 'keys'. :param keys: key names of optional keyword arguments :param options: optional keyword arguments to filter with 'keys' >>> filter_options(("aaa", ), dict(aaa=1, bbb=2)) {'aaa': 1} >>> filter_options(("aaa", ), dict(bbb=2)) {}
3.147402
5.151615
0.610955
cache = dict() @functools.wraps(fnc) def wrapped(*args, **kwargs): key = repr(args) + repr(kwargs) if key not in cache: cache[key] = fnc(*args, **kwargs) return cache[key] return wrapped
def memoize(fnc)
memoization function. >>> import random >>> imax = 100 >>> def fnc1(arg=True): ... return arg and random.choice((True, False)) >>> fnc2 = memoize(fnc1) >>> (ret1, ret2) = (fnc1(), fnc2()) >>> assert any(fnc1() != ret1 for i in range(imax)) >>> assert all(fnc2() == ret2 for i in range(imax))
1.891209
2.66049
0.71085
valid = True if schema: (valid, msg) = validate(cnf, schema, **options) if msg: LOGGER.warning(msg) if valid: return cnf return None
def _try_validate(cnf, schema, **options)
:param cnf: Mapping object represents configuration data :param schema: JSON schema object :param options: Keyword options passed to :func:`jsonschema.validate` :return: Given 'cnf' as it is if validation succeeds else None
4.053784
4.721679
0.858547
ac_schema = options.get("ac_schema", None) if ac_schema is not None: # Try to detect the appropriate parser to load the schema data as it # may be different from the original config file's format, perhaps. options["ac_parser"] = None options["ac_schema"] = None # Avoid infinite loop. LOGGER.info("Loading schema: %s", ac_schema) return load(ac_schema, **options) return None
def _maybe_schema(**options)
:param options: Optional keyword arguments such as - ac_template: Assume configuration file may be a template file and try to compile it AAR if True - ac_context: Mapping object presents context to instantiate template - ac_schema: JSON schema file path to validate configuration files :return: Mapping object or None means some errors
6.498748
6.873446
0.945486
psr = find(path, forced_type=ac_parser) if mode is not None and mode.startswith('w'): return psr.wopen(path, **options) return psr.ropen(path, **options)
def open(path, mode=None, ac_parser=None, **options)
Open given configuration file with appropriate open flag. :param path: Configuration file path :param mode: Can be 'r' and 'rb' for reading (default) or 'w', 'wb' for writing. Please note that even if you specify 'r' or 'w', it will be changed to 'rb' or 'wb' if selected backend, xml and configobj for example, for given config file prefer that. :param options: Optional keyword arguments passed to the internal file opening APIs of each backends such like 'buffering' optional parameter passed to builtin 'open' function. :return: A file object or None on any errors :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError
5.477201
7.953749
0.688631
ioi = anyconfig.ioinfo.make(input_) psr = find(ioi, forced_type=ac_parser) filepath = ioi.path # .. note:: # This will be kept for backward compatibility until 'ignore_missing' # option is deprecated and removed completely. if "ignore_missing" in options: warnings.warn("keyword option 'ignore_missing' is deprecated, use " "'ac_ignore_missing' instead", DeprecationWarning) options["ac_ignore_missing"] = options["ignore_missing"] LOGGER.info("Loading: %s", filepath) if ac_template and filepath is not None: content = anyconfig.template.try_render(filepath=filepath, ctx=ac_context, **options) if content is not None: return psr.loads(content, **options) return psr.load(ioi, **options)
def _single_load(input_, ac_parser=None, ac_template=False, ac_context=None, **options)
:param input_: File path or file or file-like object or pathlib.Path object represents the file or a namedtuple 'anyconfig.globals.IOInfo' object represents some input to load some data from :param ac_parser: Forced parser type or parser object itself :param ac_template: Assume configuration file may be a template file and try to compile it AAR if True :param ac_context: A dict presents context to instantiate template :param options: Optional keyword arguments :func:`single_load` supports except for ac_schema and ac_query :return: Mapping object :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError
4.496057
4.264569
1.054282
r cnf = _single_load(input_, ac_parser=ac_parser, ac_template=ac_template, ac_context=ac_context, **options) schema = _maybe_schema(ac_template=ac_template, ac_context=ac_context, **options) cnf = _try_validate(cnf, schema, **options) return anyconfig.query.query(cnf, **options)
def single_load(input_, ac_parser=None, ac_template=False, ac_context=None, **options)
r""" Load single configuration file. .. note:: :func:`load` is a preferable alternative and this API should be used only if there is a need to emphasize given input 'input\_' is single one. :param input\_: File path or file or file-like object or pathlib.Path object represents the file or a namedtuple 'anyconfig.globals.IOInfo' object represents some input to load some data from :param ac_parser: Forced parser type or parser object itself :param ac_template: Assume configuration file may be a template file and try to compile it AAR if True :param ac_context: A dict presents context to instantiate template :param options: Optional keyword arguments such as: - Options common in :func:`single_load`, :func:`multi_load`, :func:`load` and :func:`loads`: - ac_dict: callable (function or class) to make mapping objects from loaded data if the selected backend can customize that such as JSON which supports that with 'object_pairs_hook' option, or None. If this option was not given or None, dict or :class:`collections.OrderedDict` will be used to make result as mapping object depends on if ac_ordered (see below) is True and selected backend can keep the order of items loaded. See also :meth:`_container_factory` of :class:`anyconfig.backend.base.Parser` for more implementation details. - ac_ordered: True if you want to keep resuls ordered. Please note that order of items may be lost depends on the selected backend. - ac_schema: JSON schema file path to validate given config file - ac_query: JMESPath expression to query data - Common backend options: - ac_ignore_missing: Ignore and just return empty result if given file 'input\_' does not exist actually. - Backend specific options such as {"indent": 2} for JSON backend :return: Mapping object :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError
4.025952
4.252645
0.946694
r marker = options.setdefault("ac_marker", options.get("marker", '*')) schema = _maybe_schema(ac_template=ac_template, ac_context=ac_context, **options) options["ac_schema"] = None # Avoid to load schema more than twice. paths = anyconfig.utils.expand_paths(inputs, marker=marker) if anyconfig.utils.are_same_file_types(paths): ac_parser = find(paths[0], forced_type=ac_parser) cnf = ac_context for path in paths: opts = options.copy() cups = _single_load(path, ac_parser=ac_parser, ac_template=ac_template, ac_context=cnf, **opts) if cups: if cnf is None: cnf = cups else: merge(cnf, cups, **options) if cnf is None: return anyconfig.dicts.convert_to({}, **options) cnf = _try_validate(cnf, schema, **options) return anyconfig.query.query(cnf, **options)
def multi_load(inputs, ac_parser=None, ac_template=False, ac_context=None, **options)
r""" Load multiple config files. .. note:: :func:`load` is a preferable alternative and this API should be used only if there is a need to emphasize given inputs are multiple ones. The first argument 'inputs' may be a list of a file paths or a glob pattern specifying them or a pathlib.Path object represents file[s] or a namedtuple 'anyconfig.globals.IOInfo' object represents some inputs to load some data from. About glob patterns, for example, is, if a.yml, b.yml and c.yml are in the dir /etc/foo/conf.d/, the followings give same results:: multi_load(["/etc/foo/conf.d/a.yml", "/etc/foo/conf.d/b.yml", "/etc/foo/conf.d/c.yml", ]) multi_load("/etc/foo/conf.d/*.yml") :param inputs: A list of file path or a glob pattern such as r'/a/b/\*.json'to list of files, file or file-like object or pathlib.Path object represents the file or a namedtuple 'anyconfig.globals.IOInfo' object represents some inputs to load some data from :param ac_parser: Forced parser type or parser object :param ac_template: Assume configuration file may be a template file and try to compile it AAR if True :param ac_context: Mapping object presents context to instantiate template :param options: Optional keyword arguments: - ac_dict, ac_ordered, ac_schema and ac_query are the options common in :func:`single_load`, :func:`multi_load`, :func:`load`: and :func:`loads`. See the descriptions of them in :func:`single_load`. - Options specific to this function and :func:`load`: - ac_merge (merge): Specify strategy of how to merge results loaded from multiple configuration files. See the doc of :mod:`anyconfig.dicts` for more details of strategies. The default is anyconfig.dicts.MS_DICTS. - ac_marker (marker): Globbing marker to detect paths patterns. - Common backend options: - ignore_missing: Ignore and just return empty result if given file 'path' does not exist. - Backend specific options such as {"indent": 2} for JSON backend :return: Mapping object or any query result might be primitive objects :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError
4.814737
4.478627
1.075048
r marker = options.setdefault("ac_marker", options.get("marker", '*')) if anyconfig.utils.is_path_like_object(path_specs, marker): return single_load(path_specs, ac_parser=ac_parser, ac_dict=ac_dict, ac_template=ac_template, ac_context=ac_context, **options) if not anyconfig.utils.is_paths(path_specs, marker): raise ValueError("Possible invalid input %r" % path_specs) return multi_load(path_specs, ac_parser=ac_parser, ac_dict=ac_dict, ac_template=ac_template, ac_context=ac_context, **options)
def load(path_specs, ac_parser=None, ac_dict=None, ac_template=False, ac_context=None, **options)
r""" Load single or multiple config files or multiple config files specified in given paths pattern or pathlib.Path object represents config files or a namedtuple 'anyconfig.globals.IOInfo' object represents some inputs. :param path_specs: A list of file path or a glob pattern such as r'/a/b/\*.json'to list of files, file or file-like object or pathlib.Path object represents the file or a namedtuple 'anyconfig.globals.IOInfo' object represents some inputs to load some data from. :param ac_parser: Forced parser type or parser object :param ac_dict: callable (function or class) to make mapping object will be returned as a result or None. If not given or ac_dict is None, default mapping object used to store resutls is dict or :class:`collections.OrderedDict` if ac_ordered is True and selected backend can keep the order of items in mapping objects. :param ac_template: Assume configuration file may be a template file and try to compile it AAR if True :param ac_context: A dict presents context to instantiate template :param options: Optional keyword arguments. See also the description of 'options' in :func:`single_load` and :func:`multi_load` :return: Mapping object or any query result might be primitive objects :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError
2.783025
2.591292
1.073991
if ac_parser is None: LOGGER.warning("ac_parser was not given but it's must to find correct " "parser to load configurations from string.") return None psr = find(None, forced_type=ac_parser) schema = None ac_schema = options.get("ac_schema", None) if ac_schema is not None: options["ac_schema"] = None schema = loads(ac_schema, ac_parser=psr, ac_dict=ac_dict, ac_template=ac_template, ac_context=ac_context, **options) if ac_template: compiled = anyconfig.template.try_render(content=content, ctx=ac_context, **options) if compiled is not None: content = compiled cnf = psr.loads(content, ac_dict=ac_dict, **options) cnf = _try_validate(cnf, schema, **options) return anyconfig.query.query(cnf, **options)
def loads(content, ac_parser=None, ac_dict=None, ac_template=False, ac_context=None, **options)
:param content: Configuration file's content (a string) :param ac_parser: Forced parser type or ID or parser object :param ac_dict: callable (function or class) to make mapping object will be returned as a result or None. If not given or ac_dict is None, default mapping object used to store resutls is dict or :class:`collections.OrderedDict` if ac_ordered is True and selected backend can keep the order of items in mapping objects. :param ac_template: Assume configuration file may be a template file and try to compile it AAR if True :param ac_context: Context dict to instantiate template :param options: Optional keyword arguments. See also the description of 'options' in :func:`single_load` function. :return: Mapping object or any query result might be primitive objects :raises: ValueError, UnknownProcessorTypeError
3.906476
3.892424
1.00361
ioi = anyconfig.ioinfo.make(out) psr = find(ioi, forced_type=ac_parser) LOGGER.info("Dumping: %s", ioi.path) psr.dump(data, ioi, **options)
def dump(data, out, ac_parser=None, **options)
Save 'data' to 'out'. :param data: A mapping object may have configurations data to dump :param out: An output file path, a file, a file-like object, :class:`pathlib.Path` object represents the file or a namedtuple 'anyconfig.globals.IOInfo' object represents output to dump some data to. :param ac_parser: Forced parser type or parser object :param options: Backend specific optional arguments, e.g. {"indent": 2} for JSON loader/dumper backend :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError
8.279087
7.032505
1.17726
psr = find(None, forced_type=ac_parser) return psr.dumps(data, **options)
def dumps(data, ac_parser=None, **options)
Return string representation of 'data' in forced type format. :param data: Config data object to dump :param ac_parser: Forced parser type or ID or parser object :param options: see :func:`dump` :return: Backend-specific string representation for the given data :raises: ValueError, UnknownProcessorTypeError
10.655027
16.392685
0.649987
return anyconfig.query.query(data, ac_query=expression)
def query(data, expression, **options)
API just wraps :func:`anyconfig.query.query`. :param data: Config data object to query :param expression: JMESPath expression to query data :param options: Ignored in current implementation :return: Query result object may be a primitive (int, str, etc.) or dict.
44.45163
19.265392
2.307331
logger = logging.getLogger(name) logger.addHandler(anyconfig.compat.NullHandler()) return logger
def getLogger(name="anyconfig")
See: "Configuring Logging for a Library" in python standard logging howto, e.g. https://docs.python.org/2/howto/logging.html#library-config.
3.998263
4.030547
0.99199
if str_ is None: return '' str_ = str_.strip() if not str_: return '' if BOOL_PATTERN.match(str_) is not None: return bool(str_) if INT_PATTERN.match(str_) is not None: return int(str_) if STR_PATTERN.match(str_) is not None: return str_[1:-1] return str_
def parse_single(str_)
Very simple parser to parse expressions represent some single values. :param str_: a string to parse :return: Int | Bool | String >>> parse_single(None) '' >>> parse_single("0") 0 >>> parse_single("123") 123 >>> parse_single("True") True >>> parse_single("a string") 'a string' >>> parse_single('"a string"') 'a string' >>> parse_single("'a string'") 'a string' >>> parse_single("0.1") '0.1' >>> parse_single(" a string contains extra whitespaces ") 'a string contains extra whitespaces'
2.220208
2.262966
0.981105
return [parse_single(x) for x in str_.split(sep) if x]
def parse_list(str_, sep=",")
Simple parser to parse expressions reprensent some list values. :param str_: a string to parse :param sep: Char to separate items of list :return: [Int | Bool | String] >>> parse_list("") [] >>> parse_list("1") [1] >>> parse_list("a,b") ['a', 'b'] >>> parse_list("1,2") [1, 2] >>> parse_list("a,b,") ['a', 'b']
4.062583
6.781742
0.599047
for rel in parse_list(str_, as_sep): if avs_sep not in rel or rel.endswith(avs_sep): continue (_attr, _values) = parse_list(rel, avs_sep) if vs_sep in str(_values): _values = parse_list(_values, vs_sep) if _values: yield (_attr, _values)
def attr_val_itr(str_, avs_sep=":", vs_sep=",", as_sep=";")
Atrribute and value pair parser. :param str_: String represents a list of pairs of attribute and value :param avs_sep: char to separate attribute and values :param vs_sep: char to separate values :param as_sep: char to separate attributes
3.737509
4.329186
0.863328
return [(a, vs) for a, vs in attr_val_itr(str_, avs_sep, vs_sep, as_sep)]
def parse_attrlist_0(str_, avs_sep=":", vs_sep=",", as_sep=";")
Simple parser to parse expressions in the form of [ATTR1:VAL0,VAL1,...;ATTR2:VAL0,VAL2,..]. :param str_: input string :param avs_sep: char to separate attribute and values :param vs_sep: char to separate values :param as_sep: char to separate attributes :return: a list of tuples of (key, value | [value]) where key = (Int | String | ...), value = (Int | Bool | String | ...) | [Int | Bool | String | ...] >>> parse_attrlist_0("a:1") [('a', 1)] >>> parse_attrlist_0("a:1;b:xyz") [('a', 1), ('b', 'xyz')] >>> parse_attrlist_0("requires:bash,zsh") [('requires', ['bash', 'zsh'])] >>> parse_attrlist_0("obsoletes:sysdata;conflicts:sysdata-old") [('obsoletes', 'sysdata'), ('conflicts', 'sysdata-old')]
4.603652
6.459274
0.71272
return dict(parse_attrlist_0(str_, avs_sep, vs_sep, as_sep))
def parse_attrlist(str_, avs_sep=":", vs_sep=",", as_sep=";")
Simple parser to parse expressions in the form of [ATTR1:VAL0,VAL1,...;ATTR2:VAL0,VAL2,..]. :param str_: input string :param avs_sep: char to separate attribute and values :param vs_sep: char to separate values :param as_sep: char to separate attributes >>> parse_attrlist("requires:bash,zsh") {'requires': ['bash', 'zsh']}
3.851478
7.804648
0.493485
if avsep in str_: return parse_attrlist(str_, avsep, vssep, avssep) if lsep in str_: return parse_list(str_, lsep) return parse_single(str_)
def parse(str_, lsep=",", avsep=":", vssep=",", avssep=";")
Generic parser
2.709161
2.731463
0.991835
if level < 0 or level >= 3: raise ValueError("wrong log level passed: " + str(level)) return [logging.WARN, logging.INFO, logging.DEBUG][level]
def to_log_level(level)
:param level: Logging level in int = 0 .. 2 >>> to_log_level(0) == logging.WARN True >>> to_log_level(5) # doctest: +IGNORE_EXCEPTION_DETAIL, +ELLIPSIS Traceback (most recent call last): ... ValueError: wrong log level passed: 5 >>>
4.190121
3.631558
1.153808
if defaults is None: defaults = DEFAULTS ctypes = API.list_types() ctypes_s = ", ".join(ctypes) type_help = "Select type of %s config files from " + \ ctypes_s + " [Automatically detected by file ext]" mts = API.MERGE_STRATEGIES mts_s = ", ".join(mts) mt_help = "Select strategy to merge multiple configs from " + \ mts_s + " [%(merge)s]" % defaults parser = argparse.ArgumentParser(usage=USAGE) parser.set_defaults(**defaults) parser.add_argument("inputs", type=str, nargs='*', help="Input files") parser.add_argument("--version", action="version", version="%%(prog)s %s" % anyconfig.globals.VERSION) lpog = parser.add_argument_group("List specific options") lpog.add_argument("-L", "--list", action="store_true", help="List supported config types") spog = parser.add_argument_group("Schema specific options") spog.add_argument("--validate", action="store_true", help="Only validate input files and do not output. " "You must specify schema file with -S/--schema " "option.") spog.add_argument("--gen-schema", action="store_true", help="Generate JSON schema for givne config file[s] " "and output it instead of (merged) configuration.") gspog = parser.add_argument_group("Query/Get/set options") gspog.add_argument("-Q", "--query", help=_QUERY_HELP) gspog.add_argument("--get", help=_GET_HELP) gspog.add_argument("--set", help=_SET_HELP) parser.add_argument("-o", "--output", help="Output file path") parser.add_argument("-I", "--itype", choices=ctypes, metavar="ITYPE", help=(type_help % "Input")) parser.add_argument("-O", "--otype", choices=ctypes, metavar="OTYPE", help=(type_help % "Output")) parser.add_argument("-M", "--merge", choices=mts, metavar="MERGE", help=mt_help) parser.add_argument("-A", "--args", help="Argument configs to override") parser.add_argument("--atype", choices=ctypes, metavar="ATYPE", help=_ATYPE_HELP_FMT % ctypes_s) cpog = parser.add_argument_group("Common options") cpog.add_argument("-x", "--ignore-missing", action="store_true", help="Ignore missing input files") cpog.add_argument("-T", "--template", action="store_true", help="Enable template config support") cpog.add_argument("-E", "--env", action="store_true", help="Load configuration defaults from " "environment values") cpog.add_argument("-S", "--schema", help="Specify Schema file[s] path") cpog.add_argument("-e", "--extra-opts", help="Extra options given to the API call, " "--extra-options indent:2 (specify the " "indent for pretty-printing of JSON outputs) " "for example") cpog.add_argument("-v", "--verbose", action="count", dest="loglevel", help="Verbose mode; -v or -vv (more verbose)") return parser
def make_parser(defaults=None)
:param defaults: Default option values
3.598359
3.586344
1.00335
(sys.stdout if exit_code == 0 else sys.stderr).write(content + os.linesep) sys.exit(exit_code)
def _exit_with_output(content, exit_code=0)
Exit the program with printing out messages. :param content: content to print out :param exit_code: Exit code
3.115045
5.508171
0.565532
sep = os.linesep types = "Supported types: " + ", ".join(API.list_types()) cids = "IDs: " + ", ".join(c for c, _ps in API.list_by_cid()) x_vs_ps = [" %s: %s" % (x, ", ".join(p.cid() for p in ps)) for x, ps in API.list_by_extension()] exts = "File extensions:" + sep + sep.join(x_vs_ps) _exit_with_output(sep.join([types, exts, cids]))
def _show_psrs()
Show list of info of parsers available
5.941962
5.645003
1.052606
parser = make_parser() args = parser.parse_args(argv) LOGGER.setLevel(to_log_level(args.loglevel)) if args.inputs: if '-' in args.inputs: args.inputs = sys.stdin else: if args.list: _show_psrs() elif args.env: cnf = os.environ.copy() _output_result(cnf, args.output, args.otype or "json", None, None) sys.exit(0) else: parser.print_usage() sys.exit(1) if args.validate and args.schema is None: _exit_with_output("--validate option requires --scheme option", 1) return args
def _parse_args(argv)
Show supported config format types or usage. :param argv: Argument list to parse or None (sys.argv will be set). :return: argparse.Namespace object or None (exit before return)
4.917933
5.154986
0.954015
(cnf, err) = API.get(cnf, get_path) if cnf is None: # Failed to get the result. _exit_with_output("Failed to get result: err=%s" % err, 1) return cnf
def _do_get(cnf, get_path)
:param cnf: Configuration object to print out :param get_path: key path given in --get option :return: updated Configuration object if no error
7.111256
7.274333
0.977582
msg = ("Specify inpath and/or outpath type[s] with -I/--itype " "or -O/--otype option explicitly") if itype is None: try: otype = API.find(inpaths[0]).type() except API.UnknownFileTypeError: _exit_with_output((fmsg % inpaths[0]) + msg, 1) except (ValueError, IndexError): _exit_with_output(msg, 1) else: otype = itype return otype
def _output_type_by_input_path(inpaths, itype, fmsg)
:param inpaths: List of input file paths :param itype: Input type or None :param fmsg: message if it cannot detect otype by 'inpath' :return: Output type :: str
5.627272
5.587003
1.007208
if extra_opts is None: extra_opts = {} try: API.dump(cnf, outpath, otype, **extra_opts) except API.UnknownFileTypeError: _exit_with_output(fmsg % outpath, 1) except API.UnknownProcessorTypeError: _exit_with_output("Invalid output type '%s'" % otype, 1)
def _try_dump(cnf, outpath, otype, fmsg, extra_opts=None)
:param cnf: Configuration object to print out :param outpath: Output file path or None :param otype: Output type or None :param fmsg: message if it cannot detect otype by 'inpath' :param extra_opts: Map object will be given to API.dump as extra options
3.48401
3.355266
1.038371
fmsg = ("Uknown file type and cannot detect appropriate backend " "from its extension, '%s'") if not anyconfig.utils.is_dict_like(cnf): _exit_with_output(str(cnf)) # Print primitive types as it is. if not outpath or outpath == "-": outpath = sys.stdout if otype is None: otype = _output_type_by_input_path(inpaths, itype, fmsg) _try_dump(cnf, outpath, otype, fmsg, extra_opts=extra_opts)
def _output_result(cnf, outpath, otype, inpaths, itype, extra_opts=None)
:param cnf: Configuration object to print out :param outpath: Output file path or None :param otype: Output type or None :param inpaths: List of input file paths :param itype: Input type or None :param extra_opts: Map object will be given to API.dump as extra options
7.821344
7.47591
1.046206
try: diff = API.load(args.inputs, args.itype, ac_ignore_missing=args.ignore_missing, ac_merge=args.merge, ac_template=args.template, ac_schema=args.schema, **extra_opts) except API.UnknownProcessorTypeError: _exit_with_output("Wrong input type '%s'" % args.itype, 1) except API.UnknownFileTypeError: _exit_with_output("No appropriate backend was found for given file " "'%s'" % args.itype, 1) _exit_if_load_failure(diff, "Failed to load: args=%s" % ", ".join(args.inputs)) return diff
def _load_diff(args, extra_opts)
:param args: :class:`argparse.Namespace` object :param extra_opts: Map object given to API.load as extra options
4.973752
4.812267
1.033557
if args.query: cnf = API.query(cnf, args.query) elif args.get: cnf = _do_get(cnf, args.get) elif args.set: (key, val) = args.set.split('=') API.set_(cnf, key, anyconfig.parser.parse(val)) return cnf
def _do_filter(cnf, args)
:param cnf: Mapping object represents configuration data :param args: :class:`argparse.Namespace` object :return: 'cnf' may be updated
4.411635
4.580132
0.963212
args = _parse_args((argv if argv else sys.argv)[1:]) cnf = os.environ.copy() if args.env else {} extra_opts = dict() if args.extra_opts: extra_opts = anyconfig.parser.parse(args.extra_opts) diff = _load_diff(args, extra_opts) if cnf: API.merge(cnf, diff) else: cnf = diff if args.args: diff = anyconfig.parser.parse(args.args) API.merge(cnf, diff) if args.validate: _exit_with_output("Validation succeds") cnf = API.gen_schema(cnf) if args.gen_schema else _do_filter(cnf, args) _output_result(cnf, args.output, args.otype, args.inputs, args.itype, extra_opts=extra_opts)
def main(argv=None)
:param argv: Argument list to parse or None (sys.argv will be set).
5.055624
4.861135
1.040009
vldtr = jsonschema.Draft4Validator(schema) # :raises: SchemaError, ... errors = list(vldtr.iter_errors(data)) return (not errors, [err.message for err in errors])
def _validate_all(data, schema)
See the descritpion of :func:`validate` for more details of parameters and return value. :seealso: https://python-jsonschema.readthedocs.io/en/latest/validate/, a section of 'iter_errors' especially
5.947872
6.478167
0.918141
try: jsonschema.validate(data, schema, **options) except (jsonschema.ValidationError, jsonschema.SchemaError, Exception) as exc: if ac_schema_safe: return (False, str(exc)) # Validation was failed. raise return (True, '')
def _validate(data, schema, ac_schema_safe=True, **options)
See the descritpion of :func:`validate` for more details of parameters and return value. Validate target object 'data' with given schema object.
3.980037
3.962849
1.004337
if not JSONSCHEMA_IS_AVAIL: return (True, _NA_MSG) options = anyconfig.utils.filter_options(("cls", ), options) if ac_schema_errors: return _validate_all(data, schema, **options) return _validate(data, schema, ac_schema_safe, **options)
def validate(data, schema, ac_schema_safe=True, ac_schema_errors=False, **options)
Validate target object with given schema object, loaded from JSON schema. See also: https://python-jsonschema.readthedocs.org/en/latest/validate/ :parae data: Target object (a dict or a dict-like object) to validate :param schema: Schema object (a dict or a dict-like object) instantiated from schema JSON file or schema JSON string :param options: Other keyword options such as: - ac_schema_safe: Exception (jsonschema.ValidationError or jsonschema.SchemaError or others) will be thrown during validation process due to any validation or related errors. However, these will be catched by default, and will be re-raised if 'ac_safe' is False. - ac_schema_errors: Lazily yield each of the validation errors and returns all of them if validation fails. :return: (True if validation succeeded else False, error message[s])
7.105512
8.147958
0.87206
(typemap, strict) = _process_options(**options) arr = list(arr) scm = dict(type=typemap[list], items=gen_schema(arr[0] if arr else "str", **options)) if strict: nitems = len(arr) scm["minItems"] = nitems scm["uniqueItems"] = len(set(arr)) == nitems return scm
def array_to_schema(arr, **options)
Generate a JSON schema object with type annotation added for given object. :param arr: Array of mapping objects like dicts :param options: Other keyword options such as: - ac_schema_strict: True if more strict (precise) schema is needed - ac_schema_typemap: Type to JSON schema type mappings :return: Another mapping objects represents JSON schema of items
4.616898
6.196883
0.745036
(typemap, strict) = _process_options(**options) props = dict((k, gen_schema(v, **options)) for k, v in obj.items()) scm = dict(type=typemap[dict], properties=props) if strict: scm["required"] = sorted(props.keys()) return scm
def object_to_schema(obj, **options)
Generate a node represents JSON schema object with type annotation added for given object node. :param obj: mapping object such like a dict :param options: Other keyword options such as: - ac_schema_strict: True if more strict (precise) schema is needed - ac_schema_typemap: Type to JSON schema type mappings :yield: Another mapping objects represents JSON schema of object
4.682556
5.971895
0.784099
if data is None: return dict(type="null") _type = type(data) if _type in _SIMPLE_TYPES: typemap = options.get("ac_schema_typemap", _SIMPLETYPE_MAP) scm = dict(type=typemap[_type]) elif anyconfig.utils.is_dict_like(data): scm = object_to_schema(data, **options) elif anyconfig.utils.is_list_like(data): scm = array_to_schema(data, **options) return scm
def gen_schema(data, **options)
Generate a node represents JSON schema object with type annotation added for given object node. :param data: Configuration data object (dict[-like] or namedtuple) :param options: Other keyword options such as: - ac_schema_strict: True if more strict (precise) schema is needed - ac_schema_typemap: Type to JSON schema type mappings :return: A dict represents JSON schema of this node
3.670727
3.322093
1.104944
outdir = os.path.dirname(filepath) if outdir and not os.path.exists(outdir): LOGGER.debug("Making output dir: %s", outdir) os.makedirs(outdir)
def ensure_outdir_exists(filepath)
Make dir to dump 'filepath' if that dir does not exist. :param filepath: path of file to dump
2.238849
3.134167
0.714336
@functools.wraps(func) def wrapper(*args, **kwargs): return func(*args[1:], **kwargs) return wrapper
def to_method(func)
Lift :func:`func` to a method; it will be called with the first argument 'self' ignored. :param func: Any callable object
2.798992
3.618137
0.7736
ret = load_fn(content_or_strm, **options) if anyconfig.utils.is_dict_like(ret): return container() if (ret is None or not ret) else container(ret) return ret if allow_primitives else container(ret)
def load_with_fn(load_fn, content_or_strm, container, allow_primitives=False, **options)
Load data from given string or stream 'content_or_strm'. :param load_fn: Callable to load data :param content_or_strm: data content or stream provides it :param container: callble to make a container object :param allow_primitives: True if the parser.load* may return objects of primitive data types other than mapping types such like JSON parser :param options: keyword options passed to 'load_fn' :return: container object holding data
4.289332
5.71088
0.751081
if stream is None: return dump_fn(data, **options) return dump_fn(data, stream, **options)
def dump_with_fn(dump_fn, data, stream, **options)
Dump 'data' to a string if 'stream' is None, or dump 'data' to a file or file-like object 'stream'. :param dump_fn: Callable to dump data :param data: Data to dump :param stream: File or file like object or None :param options: optional keyword parameters :return: String represents data if stream is None or None
2.86299
3.731079
0.767336
ac_dict = options.get("ac_dict", False) _dicts = [x for x in (options.get(o) for o in self.dict_options()) if x] if self.dict_options() and ac_dict and callable(ac_dict): return ac_dict # Higher priority than ac_ordered. if _dicts and callable(_dicts[0]): return _dicts[0] if self.ordered() and options.get("ac_ordered", False): return anyconfig.compat.OrderedDict return dict
def _container_factory(self, **options)
The order of prirorities are ac_dict, backend specific dict class option, ac_ordered. :param options: Keyword options may contain 'ac_ordered'. :return: Factory (class or function) to make an container.
5.445553
4.621786
1.178236
# Force set dict option if available in backend. For example, # options["object_hook"] will be OrderedDict if 'container' was # OrderedDict in JSON backend. for opt in self.dict_options(): options.setdefault(opt, container) return anyconfig.utils.filter_options(self._load_opts, options)
def _load_options(self, container, **options)
Select backend specific loading options.
15.96205
14.164444
1.12691
_not_implemented(self, content, container, **kwargs)
def load_from_string(self, content, container, **kwargs)
Load config from given string 'content'. :param content: Config content string :param container: callble to make a container object later :param kwargs: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters
12.230797
17.505264
0.698692
_not_implemented(self, filepath, container, **kwargs)
def load_from_path(self, filepath, container, **kwargs)
Load config from given file path 'filepath`. :param filepath: Config file path :param container: callble to make a container object later :param kwargs: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters
12.824657
19.275105
0.665348
_not_implemented(self, stream, container, **kwargs)
def load_from_stream(self, stream, container, **kwargs)
Load config from given file like object 'stream`. :param stream: Config file or file like object :param container: callble to make a container object later :param kwargs: optional keyword parameters to be sanitized :: dict :return: Dict-like object holding config parameters
12.46202
18.327133
0.679977