diff --git a/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__init__.py b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f707416286b22ddbdcf84f60b6ad38ded604bdfc --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,132 @@ +import sys +import os +import re +import importlib +import warnings + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +warnings.filterwarnings('ignore', + r'.+ distutils\b.+ deprecated', + DeprecationWarning) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + warnings.warn("Setuptools is replacing distutils.") + mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + return which == 'local' + + +def ensure_local_distutils(): + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + add_shim() + importlib.import_module('distutils') + remove_shim() + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib.abc + import importlib.util + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return importlib.import_module('setuptools._distutils') + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @staticmethod + def pip_imported_during_build(): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + frame.f_globals['__file__'].endswith('setup.py') + for frame, line in traceback.walk_stack(None) + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..824dcddebb685558f1a4f0e096f01ece877af5ef Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8260943c0e088c2af5be1bd60a2e10c597b8f565 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/_distutils_hack/override.py b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000000000000000000000000000000000000..2cc433a4a55e3b41fa31089918fb62096092f89f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/env-llmeval/lib/python3.10/site-packages/absl/__init__.py b/env-llmeval/lib/python3.10/site-packages/absl/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a3bd1cd51810385ca0e5e9fed3fb9a804febf27e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..921480838658ac882b79b64786975d39c0ce4be0 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/app.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/app.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8cff4b3a22d3011889591db2c7e13f64bca373e6 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/app.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/command_name.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/command_name.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db1794ecfbcdeeb006ad0a65d3341d543bf83aa1 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/__pycache__/command_name.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/app.py b/env-llmeval/lib/python3.10/site-packages/absl/app.py new file mode 100644 index 0000000000000000000000000000000000000000..d12397b31a9093dd0f9a8622c72a16d31fcb4fa9 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/app.py @@ -0,0 +1,480 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generic entry point for Abseil Python applications. + +To use this module, define a ``main`` function with a single ``argv`` argument +and call ``app.run(main)``. For example:: + + def main(argv): + if len(argv) > 1: + raise app.UsageError('Too many command-line arguments.') + + if __name__ == '__main__': + app.run(main) +""" + +import collections +import errno +import os +import pdb +import sys +import textwrap +import traceback + +from absl import command_name +from absl import flags +from absl import logging + +try: + import faulthandler +except ImportError: + faulthandler = None + +FLAGS = flags.FLAGS + +flags.DEFINE_boolean('run_with_pdb', False, 'Set to true for PDB debug mode') +flags.DEFINE_boolean('pdb_post_mortem', False, + 'Set to true to handle uncaught exceptions with PDB ' + 'post mortem.') +flags.DEFINE_alias('pdb', 'pdb_post_mortem') +flags.DEFINE_boolean('run_with_profiling', False, + 'Set to true for profiling the script. ' + 'Execution will be slower, and the output format might ' + 'change over time.') +flags.DEFINE_string('profile_file', None, + 'Dump profile information to a file (for python -m ' + 'pstats). Implies --run_with_profiling.') +flags.DEFINE_boolean('use_cprofile_for_profiling', True, + 'Use cProfile instead of the profile module for ' + 'profiling. This has no effect unless ' + '--run_with_profiling is set.') +flags.DEFINE_boolean('only_check_args', False, + 'Set to true to validate args and exit.', + allow_hide_cpp=True) + + +# If main() exits via an abnormal exception, call into these +# handlers before exiting. +EXCEPTION_HANDLERS = [] + + +class Error(Exception): + pass + + +class UsageError(Error): + """Exception raised when the arguments supplied by the user are invalid. + + Raise this when the arguments supplied are invalid from the point of + view of the application. For example when two mutually exclusive + flags have been supplied or when there are not enough non-flag + arguments. It is distinct from flags.Error which covers the lower + level of parsing and validating individual flags. + """ + + def __init__(self, message, exitcode=1): + super(UsageError, self).__init__(message) + self.exitcode = exitcode + + +class HelpFlag(flags.BooleanFlag): + """Special boolean flag that displays usage and raises SystemExit.""" + NAME = 'help' + SHORT_NAME = '?' + + def __init__(self): + super(HelpFlag, self).__init__( + self.NAME, False, 'show this help', + short_name=self.SHORT_NAME, allow_hide_cpp=True) + + def parse(self, arg): + if self._parse(arg): + usage(shorthelp=True, writeto_stdout=True) + # Advertise --helpfull on stdout, since usage() was on stdout. + print() + print('Try --helpfull to get a list of all flags.') + sys.exit(1) + + +class HelpshortFlag(HelpFlag): + """--helpshort is an alias for --help.""" + NAME = 'helpshort' + SHORT_NAME = None + + +class HelpfullFlag(flags.BooleanFlag): + """Display help for flags in the main module and all dependent modules.""" + + def __init__(self): + super(HelpfullFlag, self).__init__( + 'helpfull', False, 'show full help', allow_hide_cpp=True) + + def parse(self, arg): + if self._parse(arg): + usage(writeto_stdout=True) + sys.exit(1) + + +class HelpXMLFlag(flags.BooleanFlag): + """Similar to HelpfullFlag, but generates output in XML format.""" + + def __init__(self): + super(HelpXMLFlag, self).__init__( + 'helpxml', False, 'like --helpfull, but generates XML output', + allow_hide_cpp=True) + + def parse(self, arg): + if self._parse(arg): + flags.FLAGS.write_help_in_xml_format(sys.stdout) + sys.exit(1) + + +def parse_flags_with_usage(args): + """Tries to parse the flags, print usage, and exit if unparsable. + + Args: + args: [str], a non-empty list of the command line arguments including + program name. + + Returns: + [str], a non-empty list of remaining command line arguments after parsing + flags, including program name. + """ + try: + return FLAGS(args) + except flags.Error as error: + message = str(error) + if '\n' in message: + final_message = 'FATAL Flags parsing error:\n%s\n' % textwrap.indent( + message, ' ') + else: + final_message = 'FATAL Flags parsing error: %s\n' % message + sys.stderr.write(final_message) + sys.stderr.write('Pass --helpshort or --helpfull to see help on flags.\n') + sys.exit(1) + + +_define_help_flags_called = False + + +def define_help_flags(): + """Registers help flags. Idempotent.""" + # Use a global to ensure idempotence. + global _define_help_flags_called + + if not _define_help_flags_called: + flags.DEFINE_flag(HelpFlag()) + flags.DEFINE_flag(HelpshortFlag()) # alias for --help + flags.DEFINE_flag(HelpfullFlag()) + flags.DEFINE_flag(HelpXMLFlag()) + _define_help_flags_called = True + + +def _register_and_parse_flags_with_usage( + argv=None, + flags_parser=parse_flags_with_usage, +): + """Registers help flags, parses arguments and shows usage if appropriate. + + This also calls sys.exit(0) if flag --only_check_args is True. + + Args: + argv: [str], a non-empty list of the command line arguments including + program name, sys.argv is used if None. + flags_parser: Callable[[List[Text]], Any], the function used to parse flags. + The return value of this function is passed to `main` untouched. + It must guarantee FLAGS is parsed after this function is called. + + Returns: + The return value of `flags_parser`. When using the default `flags_parser`, + it returns the following: + [str], a non-empty list of remaining command line arguments after parsing + flags, including program name. + + Raises: + Error: Raised when flags_parser is called, but FLAGS is not parsed. + SystemError: Raised when it's called more than once. + """ + if _register_and_parse_flags_with_usage.done: + raise SystemError('Flag registration can be done only once.') + + define_help_flags() + + original_argv = sys.argv if argv is None else argv + args_to_main = flags_parser(original_argv) + if not FLAGS.is_parsed(): + raise Error('FLAGS must be parsed after flags_parser is called.') + + # Exit when told so. + if FLAGS.only_check_args: + sys.exit(0) + # Immediately after flags are parsed, bump verbosity to INFO if the flag has + # not been set. + if FLAGS['verbosity'].using_default_value: + FLAGS.verbosity = 0 + _register_and_parse_flags_with_usage.done = True + + return args_to_main + +_register_and_parse_flags_with_usage.done = False + + +def _run_main(main, argv): + """Calls main, optionally with pdb or profiler.""" + if FLAGS.run_with_pdb: + sys.exit(pdb.runcall(main, argv)) + elif FLAGS.run_with_profiling or FLAGS.profile_file: + # Avoid import overhead since most apps (including performance-sensitive + # ones) won't be run with profiling. + # pylint: disable=g-import-not-at-top + import atexit + if FLAGS.use_cprofile_for_profiling: + import cProfile as profile + else: + import profile + profiler = profile.Profile() + if FLAGS.profile_file: + atexit.register(profiler.dump_stats, FLAGS.profile_file) + else: + atexit.register(profiler.print_stats) + sys.exit(profiler.runcall(main, argv)) + else: + sys.exit(main(argv)) + + +def _call_exception_handlers(exception): + """Calls any installed exception handlers.""" + for handler in EXCEPTION_HANDLERS: + try: + if handler.wants(exception): + handler.handle(exception) + except: # pylint: disable=bare-except + try: + # We don't want to stop for exceptions in the exception handlers but + # we shouldn't hide them either. + logging.error(traceback.format_exc()) + except: # pylint: disable=bare-except + # In case even the logging statement fails, ignore. + pass + + +def run( + main, + argv=None, + flags_parser=parse_flags_with_usage, +): + """Begins executing the program. + + Args: + main: The main function to execute. It takes an single argument "argv", + which is a list of command line arguments with parsed flags removed. + The return value is passed to `sys.exit`, and so for example + a return value of 0 or None results in a successful termination, whereas + a return value of 1 results in abnormal termination. + For more details, see https://docs.python.org/3/library/sys#sys.exit + argv: A non-empty list of the command line arguments including program name, + sys.argv is used if None. + flags_parser: Callable[[List[Text]], Any], the function used to parse flags. + The return value of this function is passed to `main` untouched. + It must guarantee FLAGS is parsed after this function is called. + Should be passed as a keyword-only arg which will become mandatory in a + future release. + - Parses command line flags with the flag module. + - If there are any errors, prints usage(). + - Calls main() with the remaining arguments. + - If main() raises a UsageError, prints usage and the error message. + """ + try: + args = _run_init( + sys.argv if argv is None else argv, + flags_parser, + ) + while _init_callbacks: + callback = _init_callbacks.popleft() + callback() + try: + _run_main(main, args) + except UsageError as error: + usage(shorthelp=True, detailed_error=error, exitcode=error.exitcode) + except: + exc = sys.exc_info()[1] + # Don't try to post-mortem debug successful SystemExits, since those + # mean there wasn't actually an error. In particular, the test framework + # raises SystemExit(False) even if all tests passed. + if isinstance(exc, SystemExit) and not exc.code: + raise + + # Check the tty so that we don't hang waiting for input in an + # non-interactive scenario. + if FLAGS.pdb_post_mortem and sys.stdout.isatty(): + traceback.print_exc() + print() + print(' *** Entering post-mortem debugging ***') + print() + pdb.post_mortem() + raise + except Exception as e: + _call_exception_handlers(e) + raise + +# Callbacks which have been deferred until after _run_init has been called. +_init_callbacks = collections.deque() + + +def call_after_init(callback): + """Calls the given callback only once ABSL has finished initialization. + + If ABSL has already finished initialization when ``call_after_init`` is + called then the callback is executed immediately, otherwise `callback` is + stored to be executed after ``app.run`` has finished initializing (aka. just + before the main function is called). + + If called after ``app.run``, this is equivalent to calling ``callback()`` in + the caller thread. If called before ``app.run``, callbacks are run + sequentially (in an undefined order) in the same thread as ``app.run``. + + Args: + callback: a callable to be called once ABSL has finished initialization. + This may be immediate if initialization has already finished. It + takes no arguments and returns nothing. + """ + if _run_init.done: + callback() + else: + _init_callbacks.append(callback) + + +def _run_init( + argv, + flags_parser, +): + """Does one-time initialization and re-parses flags on rerun.""" + if _run_init.done: + return flags_parser(argv) + command_name.make_process_name_useful() + # Set up absl logging handler. + logging.use_absl_handler() + args = _register_and_parse_flags_with_usage( + argv=argv, + flags_parser=flags_parser, + ) + if faulthandler: + try: + faulthandler.enable() + except Exception: # pylint: disable=broad-except + # Some tests verify stderr output very closely, so don't print anything. + # Disabled faulthandler is a low-impact error. + pass + _run_init.done = True + return args + + +_run_init.done = False + + +def usage(shorthelp=False, writeto_stdout=False, detailed_error=None, + exitcode=None): + """Writes __main__'s docstring to stderr with some help text. + + Args: + shorthelp: bool, if True, prints only flags from the main module, + rather than all flags. + writeto_stdout: bool, if True, writes help message to stdout, + rather than to stderr. + detailed_error: str, additional detail about why usage info was presented. + exitcode: optional integer, if set, exits with this status code after + writing help. + """ + if writeto_stdout: + stdfile = sys.stdout + else: + stdfile = sys.stderr + + doc = sys.modules['__main__'].__doc__ + if not doc: + doc = '\nUSAGE: %s [flags]\n' % sys.argv[0] + doc = flags.text_wrap(doc, indent=' ', firstline_indent='') + else: + # Replace all '%s' with sys.argv[0], and all '%%' with '%'. + num_specifiers = doc.count('%') - 2 * doc.count('%%') + try: + doc %= (sys.argv[0],) * num_specifiers + except (OverflowError, TypeError, ValueError): + # Just display the docstring as-is. + pass + if shorthelp: + flag_str = FLAGS.main_module_help() + else: + flag_str = FLAGS.get_help() + try: + stdfile.write(doc) + if flag_str: + stdfile.write('\nflags:\n') + stdfile.write(flag_str) + stdfile.write('\n') + if detailed_error is not None: + stdfile.write('\n%s\n' % detailed_error) + except IOError as e: + # We avoid printing a huge backtrace if we get EPIPE, because + # "foo.par --help | less" is a frequent use case. + if e.errno != errno.EPIPE: + raise + if exitcode is not None: + sys.exit(exitcode) + + +class ExceptionHandler(object): + """Base exception handler from which other may inherit.""" + + def wants(self, exc): + """Returns whether this handler wants to handle the exception or not. + + This base class returns True for all exceptions by default. Override in + subclass if it wants to be more selective. + + Args: + exc: Exception, the current exception. + """ + del exc # Unused. + return True + + def handle(self, exc): + """Do something with the current exception. + + Args: + exc: Exception, the current exception + + This method must be overridden. + """ + raise NotImplementedError() + + +def install_exception_handler(handler): + """Installs an exception handler. + + Args: + handler: ExceptionHandler, the exception handler to install. + + Raises: + TypeError: Raised when the handler was not of the correct type. + + All installed exception handlers will be called if main() exits via + an abnormal exception, i.e. not one of SystemExit, KeyboardInterrupt, + FlagsError or UsageError. + """ + if not isinstance(handler, ExceptionHandler): + raise TypeError('handler of type %s does not inherit from ExceptionHandler' + % type(handler)) + EXCEPTION_HANDLERS.append(handler) diff --git a/env-llmeval/lib/python3.10/site-packages/absl/app.pyi b/env-llmeval/lib/python3.10/site-packages/absl/app.pyi new file mode 100644 index 0000000000000000000000000000000000000000..fe5e44809915f3dbd56b23207781a2219d86f842 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/app.pyi @@ -0,0 +1,99 @@ + +from typing import Any, Callable, Collection, Iterable, List, NoReturn, Optional, Text, TypeVar, Union, overload + +from absl.flags import _flag + + +_MainArgs = TypeVar('_MainArgs') +_Exc = TypeVar('_Exc', bound=Exception) + + +class ExceptionHandler(): + + def wants(self, exc: _Exc) -> bool: + ... + + def handle(self, exc: _Exc): + ... + + +EXCEPTION_HANDLERS: List[ExceptionHandler] = ... + + +class HelpFlag(_flag.BooleanFlag): + def __init__(self): + ... + + +class HelpshortFlag(HelpFlag): + ... + + +class HelpfullFlag(_flag.BooleanFlag): + def __init__(self): + ... + + +class HelpXMLFlag(_flag.BooleanFlag): + def __init__(self): + ... + + +def define_help_flags() -> None: + ... + + +@overload +def usage(shorthelp: Union[bool, int] = ..., + writeto_stdout: Union[bool, int] = ..., + detailed_error: Optional[Any] = ..., + exitcode: None = ...) -> None: + ... + + +@overload +def usage(shorthelp: Union[bool, int] = ..., + writeto_stdout: Union[bool, int] = ..., + detailed_error: Optional[Any] = ..., + exitcode: int = ...) -> NoReturn: + ... + + +def install_exception_handler(handler: ExceptionHandler) -> None: + ... + + +class Error(Exception): + ... + + +class UsageError(Error): + exitcode: int + + +def parse_flags_with_usage(args: List[Text]) -> List[Text]: + ... + + +def call_after_init(callback: Callable[[], Any]) -> None: + ... + + +# Without the flag_parser argument, `main` should require a List[Text]. +@overload +def run( + main: Callable[[List[Text]], Any], + argv: Optional[List[Text]] = ..., + *, +) -> NoReturn: + ... + + +@overload +def run( + main: Callable[[_MainArgs], Any], + argv: Optional[List[Text]] = ..., + *, + flags_parser: Callable[[List[Text]], _MainArgs], +) -> NoReturn: + ... diff --git a/env-llmeval/lib/python3.10/site-packages/absl/command_name.py b/env-llmeval/lib/python3.10/site-packages/absl/command_name.py new file mode 100644 index 0000000000000000000000000000000000000000..9260fee9bd853ba33b2139b3d47b73e59c127f36 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/command_name.py @@ -0,0 +1,63 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A tiny stand alone library to change the kernel process name on Linux.""" + +import os +import sys + +# This library must be kept small and stand alone. It is used by small things +# that require no extension modules. + + +def make_process_name_useful(): + """Sets the process name to something better than 'python' if possible.""" + set_kernel_process_name(os.path.basename(sys.argv[0])) + + +def set_kernel_process_name(name): + """Changes the Kernel's /proc/self/status process name on Linux. + + The kernel name is NOT what will be shown by the ps or top command. + It is a 15 character string stored in the kernel's process table that + is included in the kernel log when a process is OOM killed. + The first 15 bytes of name are used. Non-ASCII unicode is replaced with '?'. + + Does nothing if /proc/self/comm cannot be written or prctl() fails. + + Args: + name: bytes|unicode, the Linux kernel's command name to set. + """ + if not isinstance(name, bytes): + name = name.encode('ascii', 'replace') + try: + # This is preferred to using ctypes to try and call prctl() when possible. + with open('/proc/self/comm', 'wb') as proc_comm: + proc_comm.write(name[:15]) + except EnvironmentError: + try: + import ctypes # pylint: disable=g-import-not-at-top + except ImportError: + return # No ctypes. + try: + libc = ctypes.CDLL('libc.so.6') + except EnvironmentError: + return # No libc.so.6. + pr_set_name = ctypes.c_ulong(15) # linux/prctl.h PR_SET_NAME value. + zero = ctypes.c_ulong(0) + try: + libc.prctl(pr_set_name, name, zero, zero, zero) + # Ignore the prctl return value. Nothing we can do if it errored. + except AttributeError: + return # No prctl. diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/__init__.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..21e05c477ee3a983c37805c6671c9a8894d49d4b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/__init__.py @@ -0,0 +1,225 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This package is used to define and parse command line flags. + +This package defines a *distributed* flag-definition policy: rather than +an application having to define all flags in or near main(), each Python +module defines flags that are useful to it. When one Python module +imports another, it gains access to the other's flags. (This is +implemented by having all modules share a common, global registry object +containing all the flag information.) + +Flags are defined through the use of one of the DEFINE_xxx functions. +The specific function used determines how the flag is parsed, checked, +and optionally type-converted, when it's seen on the command line. +""" + +import getopt +import os +import re +import sys +import types +import warnings + +from absl.flags import _argument_parser +from absl.flags import _defines +from absl.flags import _exceptions +from absl.flags import _flag +from absl.flags import _flagvalues +from absl.flags import _helpers +from absl.flags import _validators + +__all__ = ( + 'DEFINE', + 'DEFINE_flag', + 'DEFINE_string', + 'DEFINE_boolean', + 'DEFINE_bool', + 'DEFINE_float', + 'DEFINE_integer', + 'DEFINE_enum', + 'DEFINE_enum_class', + 'DEFINE_list', + 'DEFINE_spaceseplist', + 'DEFINE_multi', + 'DEFINE_multi_string', + 'DEFINE_multi_integer', + 'DEFINE_multi_float', + 'DEFINE_multi_enum', + 'DEFINE_multi_enum_class', + 'DEFINE_alias', + # Flag validators. + 'register_validator', + 'validator', + 'register_multi_flags_validator', + 'multi_flags_validator', + 'mark_flag_as_required', + 'mark_flags_as_required', + 'mark_flags_as_mutual_exclusive', + 'mark_bool_flags_as_mutual_exclusive', + # Flag modifiers. + 'set_default', + 'override_value', + # Key flag related functions. + 'declare_key_flag', + 'adopt_module_key_flags', + 'disclaim_key_flags', + # Module exceptions. + 'Error', + 'CantOpenFlagFileError', + 'DuplicateFlagError', + 'IllegalFlagValueError', + 'UnrecognizedFlagError', + 'UnparsedFlagAccessError', + 'ValidationError', + 'FlagNameConflictsWithMethodError', + # Public classes. + 'Flag', + 'BooleanFlag', + 'EnumFlag', + 'EnumClassFlag', + 'MultiFlag', + 'MultiEnumClassFlag', + 'FlagHolder', + 'FlagValues', + 'ArgumentParser', + 'BooleanParser', + 'EnumParser', + 'EnumClassParser', + 'ArgumentSerializer', + 'FloatParser', + 'IntegerParser', + 'BaseListParser', + 'ListParser', + 'ListSerializer', + 'EnumClassListSerializer', + 'CsvListSerializer', + 'WhitespaceSeparatedListParser', + 'EnumClassSerializer', + # Helper functions. + 'get_help_width', + 'text_wrap', + 'flag_dict_to_args', + 'doc_to_help', + # The global FlagValues instance. + 'FLAGS', +) + +# Initialize the FLAGS_MODULE as early as possible. +# It's only used by adopt_module_key_flags to take SPECIAL_FLAGS into account. +_helpers.FLAGS_MODULE = sys.modules[__name__] + +# Add current module to disclaimed module ids. +_helpers.disclaim_module_ids.add(id(sys.modules[__name__])) + +# DEFINE functions. They are explained in more details in the module doc string. +# pylint: disable=invalid-name +DEFINE = _defines.DEFINE +DEFINE_flag = _defines.DEFINE_flag +DEFINE_string = _defines.DEFINE_string +DEFINE_boolean = _defines.DEFINE_boolean +DEFINE_bool = DEFINE_boolean # Match C++ API. +DEFINE_float = _defines.DEFINE_float +DEFINE_integer = _defines.DEFINE_integer +DEFINE_enum = _defines.DEFINE_enum +DEFINE_enum_class = _defines.DEFINE_enum_class +DEFINE_list = _defines.DEFINE_list +DEFINE_spaceseplist = _defines.DEFINE_spaceseplist +DEFINE_multi = _defines.DEFINE_multi +DEFINE_multi_string = _defines.DEFINE_multi_string +DEFINE_multi_integer = _defines.DEFINE_multi_integer +DEFINE_multi_float = _defines.DEFINE_multi_float +DEFINE_multi_enum = _defines.DEFINE_multi_enum +DEFINE_multi_enum_class = _defines.DEFINE_multi_enum_class +DEFINE_alias = _defines.DEFINE_alias +# pylint: enable=invalid-name + +# Flag validators. +register_validator = _validators.register_validator +validator = _validators.validator +register_multi_flags_validator = _validators.register_multi_flags_validator +multi_flags_validator = _validators.multi_flags_validator +mark_flag_as_required = _validators.mark_flag_as_required +mark_flags_as_required = _validators.mark_flags_as_required +mark_flags_as_mutual_exclusive = _validators.mark_flags_as_mutual_exclusive +mark_bool_flags_as_mutual_exclusive = _validators.mark_bool_flags_as_mutual_exclusive + +# Flag modifiers. +set_default = _defines.set_default +override_value = _defines.override_value + +# Key flag related functions. +declare_key_flag = _defines.declare_key_flag +adopt_module_key_flags = _defines.adopt_module_key_flags +disclaim_key_flags = _defines.disclaim_key_flags + +# Module exceptions. +# pylint: disable=invalid-name +Error = _exceptions.Error +CantOpenFlagFileError = _exceptions.CantOpenFlagFileError +DuplicateFlagError = _exceptions.DuplicateFlagError +IllegalFlagValueError = _exceptions.IllegalFlagValueError +UnrecognizedFlagError = _exceptions.UnrecognizedFlagError +UnparsedFlagAccessError = _exceptions.UnparsedFlagAccessError +ValidationError = _exceptions.ValidationError +FlagNameConflictsWithMethodError = _exceptions.FlagNameConflictsWithMethodError + +# Public classes. +Flag = _flag.Flag +BooleanFlag = _flag.BooleanFlag +EnumFlag = _flag.EnumFlag +EnumClassFlag = _flag.EnumClassFlag +MultiFlag = _flag.MultiFlag +MultiEnumClassFlag = _flag.MultiEnumClassFlag +FlagHolder = _flagvalues.FlagHolder +FlagValues = _flagvalues.FlagValues +ArgumentParser = _argument_parser.ArgumentParser +BooleanParser = _argument_parser.BooleanParser +EnumParser = _argument_parser.EnumParser +EnumClassParser = _argument_parser.EnumClassParser +ArgumentSerializer = _argument_parser.ArgumentSerializer +FloatParser = _argument_parser.FloatParser +IntegerParser = _argument_parser.IntegerParser +BaseListParser = _argument_parser.BaseListParser +ListParser = _argument_parser.ListParser +ListSerializer = _argument_parser.ListSerializer +EnumClassListSerializer = _argument_parser.EnumClassListSerializer +CsvListSerializer = _argument_parser.CsvListSerializer +WhitespaceSeparatedListParser = _argument_parser.WhitespaceSeparatedListParser +EnumClassSerializer = _argument_parser.EnumClassSerializer +# pylint: enable=invalid-name + +# Helper functions. +get_help_width = _helpers.get_help_width +text_wrap = _helpers.text_wrap +flag_dict_to_args = _helpers.flag_dict_to_args +doc_to_help = _helpers.doc_to_help + +# Special flags. +_helpers.SPECIAL_FLAGS = FlagValues() + +DEFINE_string( + 'flagfile', '', + 'Insert flag definitions from the given file into the command line.', + _helpers.SPECIAL_FLAGS) # pytype: disable=wrong-arg-types + +DEFINE_string('undefok', '', + 'comma-separated list of flag names that it is okay to specify ' + 'on the command line even if the program does not define a flag ' + 'with that name. IMPORTANT: flags in this list that have ' + 'arguments MUST use the --flag=value format.', + _helpers.SPECIAL_FLAGS) # pytype: disable=wrong-arg-types + +#: The global FlagValues instance. +FLAGS = _flagvalues.FLAGS diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..702f99e158853e2dc2ab546e1e8b73bffa772388 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_flag.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_flag.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..658a66bc393dbbc495d0327c8f9675621a3e1122 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_flag.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_helpers.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91a6f890ee9e93bef5f37b6ca880c4ad0bf34d8d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_helpers.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_validators.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_validators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6ca78ae270c90ed6acc408254c44b57883cff3f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/_validators.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/argparse_flags.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/argparse_flags.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7530b77f5b252a6417b53f53195b6eb231d452eb Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/flags/__pycache__/argparse_flags.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_argument_parser.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_argument_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..13dc640d025972baf4b395f575feeae539329c10 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_argument_parser.py @@ -0,0 +1,638 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Contains base classes used to parse and convert arguments. + +Do NOT import this module directly. Import the flags package and use the +aliases defined at the package level instead. +""" + +import collections +import csv +import enum +import io +import string +from typing import Generic, List, Iterable, Optional, Sequence, Text, Type, TypeVar, Union +from xml.dom import minidom + +from absl.flags import _helpers + +_T = TypeVar('_T') +_ET = TypeVar('_ET', bound=enum.Enum) +_N = TypeVar('_N', int, float) + + +def _is_integer_type(instance): + """Returns True if instance is an integer, and not a bool.""" + return (isinstance(instance, int) and + not isinstance(instance, bool)) + + +class _ArgumentParserCache(type): + """Metaclass used to cache and share argument parsers among flags.""" + + _instances = {} + + def __call__(cls, *args, **kwargs): + """Returns an instance of the argument parser cls. + + This method overrides behavior of the __new__ methods in + all subclasses of ArgumentParser (inclusive). If an instance + for cls with the same set of arguments exists, this instance is + returned, otherwise a new instance is created. + + If any keyword arguments are defined, or the values in args + are not hashable, this method always returns a new instance of + cls. + + Args: + *args: Positional initializer arguments. + **kwargs: Initializer keyword arguments. + + Returns: + An instance of cls, shared or new. + """ + if kwargs: + return type.__call__(cls, *args, **kwargs) + else: + instances = cls._instances + key = (cls,) + tuple(args) + try: + return instances[key] + except KeyError: + # No cache entry for key exists, create a new one. + return instances.setdefault(key, type.__call__(cls, *args)) + except TypeError: + # An object in args cannot be hashed, always return + # a new instance. + return type.__call__(cls, *args) + + +class ArgumentParser(Generic[_T], metaclass=_ArgumentParserCache): + """Base class used to parse and convert arguments. + + The :meth:`parse` method checks to make sure that the string argument is a + legal value and convert it to a native type. If the value cannot be + converted, it should throw a ``ValueError`` exception with a human + readable explanation of why the value is illegal. + + Subclasses should also define a syntactic_help string which may be + presented to the user to describe the form of the legal values. + + Argument parser classes must be stateless, since instances are cached + and shared between flags. Initializer arguments are allowed, but all + member variables must be derived from initializer arguments only. + """ + + syntactic_help: Text = '' + + def parse(self, argument: Text) -> Optional[_T]: + """Parses the string argument and returns the native value. + + By default it returns its argument unmodified. + + Args: + argument: string argument passed in the commandline. + + Raises: + ValueError: Raised when it fails to parse the argument. + TypeError: Raised when the argument has the wrong type. + + Returns: + The parsed value in native type. + """ + if not isinstance(argument, str): + raise TypeError('flag value must be a string, found "{}"'.format( + type(argument))) + return argument + + def flag_type(self) -> Text: + """Returns a string representing the type of the flag.""" + return 'string' + + def _custom_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + """Returns a list of minidom.Element to add additional flag information. + + Args: + doc: minidom.Document, the DOM document it should create nodes from. + """ + del doc # Unused. + return [] + + +class ArgumentSerializer(Generic[_T]): + """Base class for generating string representations of a flag value.""" + + def serialize(self, value: _T) -> Text: + """Returns a serialized string of the value.""" + return str(value) + + +class NumericParser(ArgumentParser[_N]): + """Parser of numeric values. + + Parsed value may be bounded to a given upper and lower bound. + """ + + lower_bound: Optional[_N] + upper_bound: Optional[_N] + + def is_outside_bounds(self, val: _N) -> bool: + """Returns whether the value is outside the bounds or not.""" + return ((self.lower_bound is not None and val < self.lower_bound) or + (self.upper_bound is not None and val > self.upper_bound)) + + def parse(self, argument: Text) -> _N: + """See base class.""" + val = self.convert(argument) + if self.is_outside_bounds(val): + raise ValueError('%s is not %s' % (val, self.syntactic_help)) + return val + + def _custom_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = [] + if self.lower_bound is not None: + elements.append(_helpers.create_xml_dom_element( + doc, 'lower_bound', self.lower_bound)) + if self.upper_bound is not None: + elements.append(_helpers.create_xml_dom_element( + doc, 'upper_bound', self.upper_bound)) + return elements + + def convert(self, argument: Text) -> _N: + """Returns the correct numeric value of argument. + + Subclass must implement this method, and raise TypeError if argument is not + string or has the right numeric type. + + Args: + argument: string argument passed in the commandline, or the numeric type. + + Raises: + TypeError: Raised when argument is not a string or the right numeric type. + ValueError: Raised when failed to convert argument to the numeric value. + """ + raise NotImplementedError + + +class FloatParser(NumericParser[float]): + """Parser of floating point values. + + Parsed value may be bounded to a given upper and lower bound. + """ + number_article = 'a' + number_name = 'number' + syntactic_help = ' '.join((number_article, number_name)) + + def __init__( + self, + lower_bound: Optional[float] = None, + upper_bound: Optional[float] = None, + ) -> None: + super(FloatParser, self).__init__() + self.lower_bound = lower_bound + self.upper_bound = upper_bound + sh = self.syntactic_help + if lower_bound is not None and upper_bound is not None: + sh = ('%s in the range [%s, %s]' % (sh, lower_bound, upper_bound)) + elif lower_bound == 0: + sh = 'a non-negative %s' % self.number_name + elif upper_bound == 0: + sh = 'a non-positive %s' % self.number_name + elif upper_bound is not None: + sh = '%s <= %s' % (self.number_name, upper_bound) + elif lower_bound is not None: + sh = '%s >= %s' % (self.number_name, lower_bound) + self.syntactic_help = sh + + def convert(self, argument: Union[int, float, str]) -> float: + """Returns the float value of argument.""" + if (_is_integer_type(argument) or isinstance(argument, float) or + isinstance(argument, str)): + return float(argument) + else: + raise TypeError( + 'Expect argument to be a string, int, or float, found {}'.format( + type(argument))) + + def flag_type(self) -> Text: + """See base class.""" + return 'float' + + +class IntegerParser(NumericParser[int]): + """Parser of an integer value. + + Parsed value may be bounded to a given upper and lower bound. + """ + number_article = 'an' + number_name = 'integer' + syntactic_help = ' '.join((number_article, number_name)) + + def __init__( + self, lower_bound: Optional[int] = None, upper_bound: Optional[int] = None + ) -> None: + super(IntegerParser, self).__init__() + self.lower_bound = lower_bound + self.upper_bound = upper_bound + sh = self.syntactic_help + if lower_bound is not None and upper_bound is not None: + sh = ('%s in the range [%s, %s]' % (sh, lower_bound, upper_bound)) + elif lower_bound == 1: + sh = 'a positive %s' % self.number_name + elif upper_bound == -1: + sh = 'a negative %s' % self.number_name + elif lower_bound == 0: + sh = 'a non-negative %s' % self.number_name + elif upper_bound == 0: + sh = 'a non-positive %s' % self.number_name + elif upper_bound is not None: + sh = '%s <= %s' % (self.number_name, upper_bound) + elif lower_bound is not None: + sh = '%s >= %s' % (self.number_name, lower_bound) + self.syntactic_help = sh + + def convert(self, argument: Union[int, Text]) -> int: + """Returns the int value of argument.""" + if _is_integer_type(argument): + return argument + elif isinstance(argument, str): + base = 10 + if len(argument) > 2 and argument[0] == '0': + if argument[1] == 'o': + base = 8 + elif argument[1] == 'x': + base = 16 + return int(argument, base) + else: + raise TypeError('Expect argument to be a string or int, found {}'.format( + type(argument))) + + def flag_type(self) -> Text: + """See base class.""" + return 'int' + + +class BooleanParser(ArgumentParser[bool]): + """Parser of boolean values.""" + + def parse(self, argument: Union[Text, int]) -> bool: + """See base class.""" + if isinstance(argument, str): + if argument.lower() in ('true', 't', '1'): + return True + elif argument.lower() in ('false', 'f', '0'): + return False + else: + raise ValueError('Non-boolean argument to boolean flag', argument) + elif isinstance(argument, int): + # Only allow bool or integer 0, 1. + # Note that float 1.0 == True, 0.0 == False. + bool_value = bool(argument) + if argument == bool_value: + return bool_value + else: + raise ValueError('Non-boolean argument to boolean flag', argument) + + raise TypeError('Non-boolean argument to boolean flag', argument) + + def flag_type(self) -> Text: + """See base class.""" + return 'bool' + + +class EnumParser(ArgumentParser[Text]): + """Parser of a string enum value (a string value from a given set).""" + + def __init__( + self, enum_values: Iterable[Text], case_sensitive: bool = True + ) -> None: + """Initializes EnumParser. + + Args: + enum_values: [str], a non-empty list of string values in the enum. + case_sensitive: bool, whether or not the enum is to be case-sensitive. + + Raises: + ValueError: When enum_values is empty. + """ + if not enum_values: + raise ValueError( + 'enum_values cannot be empty, found "{}"'.format(enum_values)) + if isinstance(enum_values, str): + raise ValueError( + 'enum_values cannot be a str, found "{}"'.format(enum_values) + ) + super(EnumParser, self).__init__() + self.enum_values = list(enum_values) + self.case_sensitive = case_sensitive + + def parse(self, argument: Text) -> Text: + """Determines validity of argument and returns the correct element of enum. + + Args: + argument: str, the supplied flag value. + + Returns: + The first matching element from enum_values. + + Raises: + ValueError: Raised when argument didn't match anything in enum. + """ + if self.case_sensitive: + if argument not in self.enum_values: + raise ValueError('value should be one of <%s>' % + '|'.join(self.enum_values)) + else: + return argument + else: + if argument.upper() not in [value.upper() for value in self.enum_values]: + raise ValueError('value should be one of <%s>' % + '|'.join(self.enum_values)) + else: + return [value for value in self.enum_values + if value.upper() == argument.upper()][0] + + def flag_type(self) -> Text: + """See base class.""" + return 'string enum' + + +class EnumClassParser(ArgumentParser[_ET]): + """Parser of an Enum class member.""" + + def __init__( + self, enum_class: Type[_ET], case_sensitive: bool = True + ) -> None: + """Initializes EnumParser. + + Args: + enum_class: class, the Enum class with all possible flag values. + case_sensitive: bool, whether or not the enum is to be case-sensitive. If + False, all member names must be unique when case is ignored. + + Raises: + TypeError: When enum_class is not a subclass of Enum. + ValueError: When enum_class is empty. + """ + if not issubclass(enum_class, enum.Enum): + raise TypeError('{} is not a subclass of Enum.'.format(enum_class)) + if not enum_class.__members__: + raise ValueError('enum_class cannot be empty, but "{}" is empty.' + .format(enum_class)) + if not case_sensitive: + members = collections.Counter( + name.lower() for name in enum_class.__members__) + duplicate_keys = { + member for member, count in members.items() if count > 1 + } + if duplicate_keys: + raise ValueError( + 'Duplicate enum values for {} using case_sensitive=False'.format( + duplicate_keys)) + + super(EnumClassParser, self).__init__() + self.enum_class = enum_class + self._case_sensitive = case_sensitive + if case_sensitive: + self._member_names = tuple(enum_class.__members__) + else: + self._member_names = tuple( + name.lower() for name in enum_class.__members__) + + @property + def member_names(self) -> Sequence[Text]: + """The accepted enum names, in lowercase if not case sensitive.""" + return self._member_names + + def parse(self, argument: Union[_ET, Text]) -> _ET: + """Determines validity of argument and returns the correct element of enum. + + Args: + argument: str or Enum class member, the supplied flag value. + + Returns: + The first matching Enum class member in Enum class. + + Raises: + ValueError: Raised when argument didn't match anything in enum. + """ + if isinstance(argument, self.enum_class): + return argument # pytype: disable=bad-return-type + elif not isinstance(argument, str): + raise ValueError( + '{} is not an enum member or a name of a member in {}'.format( + argument, self.enum_class)) + key = EnumParser( + self._member_names, case_sensitive=self._case_sensitive).parse(argument) + if self._case_sensitive: + return self.enum_class[key] + else: + # If EnumParser.parse() return a value, we're guaranteed to find it + # as a member of the class + return next(value for name, value in self.enum_class.__members__.items() + if name.lower() == key.lower()) + + def flag_type(self) -> Text: + """See base class.""" + return 'enum class' + + +class ListSerializer(Generic[_T], ArgumentSerializer[List[_T]]): + + def __init__(self, list_sep: Text) -> None: + self.list_sep = list_sep + + def serialize(self, value: List[_T]) -> Text: + """See base class.""" + return self.list_sep.join([str(x) for x in value]) + + +class EnumClassListSerializer(ListSerializer[_ET]): + """A serializer for :class:`MultiEnumClass` flags. + + This serializer simply joins the output of `EnumClassSerializer` using a + provided separator. + """ + + def __init__(self, list_sep: Text, **kwargs) -> None: + """Initializes EnumClassListSerializer. + + Args: + list_sep: String to be used as a separator when serializing + **kwargs: Keyword arguments to the `EnumClassSerializer` used to serialize + individual values. + """ + super(EnumClassListSerializer, self).__init__(list_sep) + self._element_serializer = EnumClassSerializer(**kwargs) + + def serialize(self, value: Union[_ET, List[_ET]]) -> Text: + """See base class.""" + if isinstance(value, list): + return self.list_sep.join( + self._element_serializer.serialize(x) for x in value) + else: + return self._element_serializer.serialize(value) + + +class CsvListSerializer(ListSerializer[Text]): + + def serialize(self, value: List[Text]) -> Text: + """Serializes a list as a CSV string or unicode.""" + output = io.StringIO() + writer = csv.writer(output, delimiter=self.list_sep) + writer.writerow([str(x) for x in value]) + serialized_value = output.getvalue().strip() + + # We need the returned value to be pure ascii or Unicodes so that + # when the xml help is generated they are usefully encodable. + return str(serialized_value) + + +class EnumClassSerializer(ArgumentSerializer[_ET]): + """Class for generating string representations of an enum class flag value.""" + + def __init__(self, lowercase: bool) -> None: + """Initializes EnumClassSerializer. + + Args: + lowercase: If True, enum member names are lowercased during serialization. + """ + self._lowercase = lowercase + + def serialize(self, value: _ET) -> Text: + """Returns a serialized string of the Enum class value.""" + as_string = str(value.name) + return as_string.lower() if self._lowercase else as_string + + +class BaseListParser(ArgumentParser): + """Base class for a parser of lists of strings. + + To extend, inherit from this class; from the subclass ``__init__``, call:: + + super().__init__(token, name) + + where token is a character used to tokenize, and name is a description + of the separator. + """ + + def __init__( + self, token: Optional[Text] = None, name: Optional[Text] = None + ) -> None: + assert name + super(BaseListParser, self).__init__() + self._token = token + self._name = name + self.syntactic_help = 'a %s separated list' % self._name + + def parse(self, argument: Text) -> List[Text]: + """See base class.""" + if isinstance(argument, list): + return argument + elif not argument: + return [] + else: + return [s.strip() for s in argument.split(self._token)] + + def flag_type(self) -> Text: + """See base class.""" + return '%s separated list of strings' % self._name + + +class ListParser(BaseListParser): + """Parser for a comma-separated list of strings.""" + + def __init__(self) -> None: + super(ListParser, self).__init__(',', 'comma') + + def parse(self, argument: Union[Text, List[Text]]) -> List[Text]: + """Parses argument as comma-separated list of strings.""" + if isinstance(argument, list): + return argument + elif not argument: + return [] + else: + try: + return [s.strip() for s in list(csv.reader([argument], strict=True))[0]] + except csv.Error as e: + # Provide a helpful report for case like + # --listflag="$(printf 'hello,\nworld')" + # IOW, list flag values containing naked newlines. This error + # was previously "reported" by allowing csv.Error to + # propagate. + raise ValueError('Unable to parse the value %r as a %s: %s' + % (argument, self.flag_type(), e)) + + def _custom_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = super(ListParser, self)._custom_xml_dom_elements(doc) + elements.append(_helpers.create_xml_dom_element( + doc, 'list_separator', repr(','))) + return elements + + +class WhitespaceSeparatedListParser(BaseListParser): + """Parser for a whitespace-separated list of strings.""" + + def __init__(self, comma_compat: bool = False) -> None: + """Initializer. + + Args: + comma_compat: bool, whether to support comma as an additional separator. + If False then only whitespace is supported. This is intended only for + backwards compatibility with flags that used to be comma-separated. + """ + self._comma_compat = comma_compat + name = 'whitespace or comma' if self._comma_compat else 'whitespace' + super(WhitespaceSeparatedListParser, self).__init__(None, name) + + def parse(self, argument: Union[Text, List[Text]]) -> List[Text]: + """Parses argument as whitespace-separated list of strings. + + It also parses argument as comma-separated list of strings if requested. + + Args: + argument: string argument passed in the commandline. + + Returns: + [str], the parsed flag value. + """ + if isinstance(argument, list): + return argument + elif not argument: + return [] + else: + if self._comma_compat: + argument = argument.replace(',', ' ') + return argument.split() + + def _custom_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = super(WhitespaceSeparatedListParser, self + )._custom_xml_dom_elements(doc) + separators = list(string.whitespace) + if self._comma_compat: + separators.append(',') + separators.sort() + for sep_char in separators: + elements.append(_helpers.create_xml_dom_element( + doc, 'list_separator', repr(sep_char))) + return elements diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_defines.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_defines.py new file mode 100644 index 0000000000000000000000000000000000000000..c7b102f21aec6533d6960c04aa63765687e1af7a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_defines.py @@ -0,0 +1,1686 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This modules contains flags DEFINE functions. + +Do NOT import this module directly. Import the flags package and use the +aliases defined at the package level instead. +""" + +import enum +import sys +import types +import typing +from typing import Text, List, Any, TypeVar, Optional, Union, Type, Iterable, overload + +from absl.flags import _argument_parser +from absl.flags import _exceptions +from absl.flags import _flag +from absl.flags import _flagvalues +from absl.flags import _helpers +from absl.flags import _validators + +_helpers.disclaim_module_ids.add(id(sys.modules[__name__])) + +_T = TypeVar('_T') +_ET = TypeVar('_ET', bound=enum.Enum) + + +def _register_bounds_validator_if_needed(parser, name, flag_values): + """Enforces lower and upper bounds for numeric flags. + + Args: + parser: NumericParser (either FloatParser or IntegerParser), provides lower + and upper bounds, and help text to display. + name: str, name of the flag + flag_values: FlagValues. + """ + if parser.lower_bound is not None or parser.upper_bound is not None: + + def checker(value): + if value is not None and parser.is_outside_bounds(value): + message = '%s is not %s' % (value, parser.syntactic_help) + raise _exceptions.ValidationError(message) + return True + + _validators.register_validator(name, checker, flag_values=flag_values) + + +@overload +def DEFINE( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + name: Text, + default: Any, + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + serializer: Optional[_argument_parser.ArgumentSerializer[_T]] = ..., + module_name: Optional[Text] = ..., + required: 'typing.Literal[True]' = ..., + **args: Any +) -> _flagvalues.FlagHolder[_T]: + ... + + +@overload +def DEFINE( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + name: Text, + default: Optional[Any], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + serializer: Optional[_argument_parser.ArgumentSerializer[_T]] = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[_T]]: + ... + + +def DEFINE( # pylint: disable=invalid-name + parser, + name, + default, + help, # pylint: disable=redefined-builtin + flag_values=_flagvalues.FLAGS, + serializer=None, + module_name=None, + required=False, + **args): + """Registers a generic Flag object. + + NOTE: in the docstrings of all DEFINE* functions, "registers" is short + for "creates a new flag and registers it". + + Auxiliary function: clients should use the specialized ``DEFINE_`` + function instead. + + Args: + parser: :class:`ArgumentParser`, used to parse the flag arguments. + name: str, the flag name. + default: The default value of the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + serializer: :class:`ArgumentSerializer`, the flag serializer instance. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + return DEFINE_flag( + _flag.Flag(parser, serializer, name, default, help, **args), + flag_values, + module_name, + required=True if required else False, + ) + + +@overload +def DEFINE_flag( # pylint: disable=invalid-name + flag: _flag.Flag[_T], + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: 'typing.Literal[True]' = ..., +) -> _flagvalues.FlagHolder[_T]: + ... + + +@overload +def DEFINE_flag( # pylint: disable=invalid-name + flag: _flag.Flag[_T], + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., +) -> _flagvalues.FlagHolder[Optional[_T]]: + ... + + +def DEFINE_flag( # pylint: disable=invalid-name + flag, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False): + """Registers a :class:`Flag` object with a :class:`FlagValues` object. + + By default, the global :const:`FLAGS` ``FlagValue`` object is used. + + Typical users will use one of the more specialized DEFINE_xxx + functions, such as :func:`DEFINE_string` or :func:`DEFINE_integer`. But + developers who need to create :class:`Flag` objects themselves should use + this function to register their flags. + + Args: + flag: :class:`Flag`, a flag that is key to the module. + flag_values: :class:`FlagValues`, the ``FlagValues`` instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + + Returns: + a handle to defined flag. + """ + if required and flag.default is not None: + raise ValueError('Required flag --%s cannot have a non-None default' % + flag.name) + # Copying the reference to flag_values prevents pychecker warnings. + fv = flag_values + fv[flag.name] = flag + # Tell flag_values who's defining the flag. + if module_name: + module = sys.modules.get(module_name) + else: + module, module_name = _helpers.get_calling_module_object_and_name() + flag_values.register_flag_by_module(module_name, flag) + flag_values.register_flag_by_module_id(id(module), flag) + if required: + _validators.mark_flag_as_required(flag.name, fv) + ensure_non_none_value = (flag.default is not None) or required + return _flagvalues.FlagHolder( + fv, flag, ensure_non_none_value=ensure_non_none_value) + + +def set_default(flag_holder: _flagvalues.FlagHolder[_T], value: _T) -> None: + """Changes the default value of the provided flag object. + + The flag's current value is also updated if the flag is currently using + the default value, i.e. not specified in the command line, and not set + by FLAGS.name = value. + + Args: + flag_holder: FlagHolder, the flag to modify. + value: The new default value. + + Raises: + IllegalFlagValueError: Raised when value is not valid. + """ + flag_holder._flagvalues.set_default(flag_holder.name, value) # pylint: disable=protected-access + + +def override_value(flag_holder: _flagvalues.FlagHolder[_T], value: _T) -> None: + """Overrides the value of the provided flag. + + This value takes precedent over the default value and, when called after flag + parsing, any value provided at the command line. + + Args: + flag_holder: FlagHolder, the flag to modify. + value: The new value. + + Raises: + IllegalFlagValueError: The value did not pass the flag parser or validators. + """ + fv = flag_holder._flagvalues # pylint: disable=protected-access + # Ensure the new value satisfies the flag's parser while avoiding side + # effects of calling parse(). + parsed = fv[flag_holder.name]._parse(value) # pylint: disable=protected-access + if parsed != value: + raise _exceptions.IllegalFlagValueError( + 'flag %s: parsed value %r not equal to original %r' + % (flag_holder.name, parsed, value) + ) + setattr(fv, flag_holder.name, value) + + +def _internal_declare_key_flags( + flag_names: List[str], + flag_values: _flagvalues.FlagValues = _flagvalues.FLAGS, + key_flag_values: Optional[_flagvalues.FlagValues] = None, +) -> None: + """Declares a flag as key for the calling module. + + Internal function. User code should call declare_key_flag or + adopt_module_key_flags instead. + + Args: + flag_names: [str], a list of names of already-registered Flag objects. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flags listed in flag_names have registered (the value of the flag_values + argument from the ``DEFINE_*`` calls that defined those flags). This + should almost never need to be overridden. + key_flag_values: :class:`FlagValues`, the FlagValues instance that (among + possibly many other things) keeps track of the key flags for each module. + Default ``None`` means "same as flag_values". This should almost never + need to be overridden. + + Raises: + UnrecognizedFlagError: Raised when the flag is not defined. + """ + key_flag_values = key_flag_values or flag_values + + module = _helpers.get_calling_module() + + for flag_name in flag_names: + key_flag_values.register_key_flag_for_module(module, flag_values[flag_name]) + + +def declare_key_flag( + flag_name: Union[Text, _flagvalues.FlagHolder], + flag_values: _flagvalues.FlagValues = _flagvalues.FLAGS, +) -> None: + """Declares one flag as key to the current module. + + Key flags are flags that are deemed really important for a module. + They are important when listing help messages; e.g., if the + --helpshort command-line flag is used, then only the key flags of the + main module are listed (instead of all flags, as in the case of + --helpfull). + + Sample usage:: + + flags.declare_key_flag('flag_1') + + Args: + flag_name: str | :class:`FlagHolder`, the name or holder of an already + declared flag. (Redeclaring flags as key, including flags implicitly key + because they were declared in this module, is a no-op.) + Positional-only parameter. + flag_values: :class:`FlagValues`, the FlagValues instance in which the + flag will be declared as a key flag. This should almost never need to be + overridden. + + Raises: + ValueError: Raised if flag_name not defined as a Python flag. + """ + flag_name, flag_values = _flagvalues.resolve_flag_ref(flag_name, flag_values) + if flag_name in _helpers.SPECIAL_FLAGS: + # Take care of the special flags, e.g., --flagfile, --undefok. + # These flags are defined in SPECIAL_FLAGS, and are treated + # specially during flag parsing, taking precedence over the + # user-defined flags. + _internal_declare_key_flags([flag_name], + flag_values=_helpers.SPECIAL_FLAGS, + key_flag_values=flag_values) + return + try: + _internal_declare_key_flags([flag_name], flag_values=flag_values) + except KeyError: + raise ValueError('Flag --%s is undefined. To set a flag as a key flag ' + 'first define it in Python.' % flag_name) + + +def adopt_module_key_flags( + module: Any, flag_values: _flagvalues.FlagValues = _flagvalues.FLAGS +) -> None: + """Declares that all flags key to a module are key to the current module. + + Args: + module: module, the module object from which all key flags will be declared + as key flags to the current module. + flag_values: :class:`FlagValues`, the FlagValues instance in which the + flags will be declared as key flags. This should almost never need to be + overridden. + + Raises: + Error: Raised when given an argument that is a module name (a string), + instead of a module object. + """ + if not isinstance(module, types.ModuleType): + raise _exceptions.Error('Expected a module object, not %r.' % (module,)) + _internal_declare_key_flags( + [f.name for f in flag_values.get_key_flags_for_module(module.__name__)], + flag_values=flag_values) + # If module is this flag module, take _helpers.SPECIAL_FLAGS into account. + if module == _helpers.FLAGS_MODULE: + _internal_declare_key_flags( + # As we associate flags with get_calling_module_object_and_name(), the + # special flags defined in this module are incorrectly registered with + # a different module. So, we can't use get_key_flags_for_module. + # Instead, we take all flags from _helpers.SPECIAL_FLAGS (a private + # FlagValues, where no other module should register flags). + [_helpers.SPECIAL_FLAGS[name].name for name in _helpers.SPECIAL_FLAGS], + flag_values=_helpers.SPECIAL_FLAGS, + key_flag_values=flag_values) + + +def disclaim_key_flags() -> None: + """Declares that the current module will not define any more key flags. + + Normally, the module that calls the DEFINE_xxx functions claims the + flag to be its key flag. This is undesirable for modules that + define additional DEFINE_yyy functions with its own flag parsers and + serializers, since that module will accidentally claim flags defined + by DEFINE_yyy as its key flags. After calling this function, the + module disclaims flag definitions thereafter, so the key flags will + be correctly attributed to the caller of DEFINE_yyy. + + After calling this function, the module will not be able to define + any more flags. This function will affect all FlagValues objects. + """ + globals_for_caller = sys._getframe(1).f_globals # pylint: disable=protected-access + module, _ = _helpers.get_module_object_and_name(globals_for_caller) + _helpers.disclaim_module_ids.add(id(module)) + + +@overload +def DEFINE_string( # pylint: disable=invalid-name + name: Text, + default: Optional[Text], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[Text]: + ... + + +@overload +def DEFINE_string( # pylint: disable=invalid-name + name: Text, + default: None, + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[Text]]: + ... + + +@overload +def DEFINE_string( # pylint: disable=invalid-name + name: Text, + default: Text, + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Text]: + ... + + +def DEFINE_string( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value can be any string.""" + parser = _argument_parser.ArgumentParser[str]() + serializer = _argument_parser.ArgumentSerializer[str]() + return DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_boolean( # pylint: disable=invalid-name + name: Text, + default: Union[None, Text, bool, int], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[bool]: + ... + + +@overload +def DEFINE_boolean( # pylint: disable=invalid-name + name: Text, + default: None, + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[bool]]: + ... + + +@overload +def DEFINE_boolean( # pylint: disable=invalid-name + name: Text, + default: Union[Text, bool, int], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[bool]: + ... + + +def DEFINE_boolean( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False, + **args): + """Registers a boolean flag. + + Such a boolean flag does not take an argument. If a user wants to + specify a false value explicitly, the long option beginning with 'no' + must be used: i.e. --noflag + + This flag will have a value of None, True or False. None is possible + if default=None and the user does not specify the flag on the command + line. + + Args: + name: str, the flag name. + default: bool|str|None, the default value of the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + return DEFINE_flag( + _flag.BooleanFlag(name, default, help, **args), + flag_values, + module_name, + required=True if required else False, + ) + + +@overload +def DEFINE_float( # pylint: disable=invalid-name + name: Text, + default: Union[None, float, Text], + help: Optional[Text], # pylint: disable=redefined-builtin + lower_bound: Optional[float] = ..., + upper_bound: Optional[float] = ..., + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[float]: + ... + + +@overload +def DEFINE_float( # pylint: disable=invalid-name + name: Text, + default: None, + help: Optional[Text], # pylint: disable=redefined-builtin + lower_bound: Optional[float] = ..., + upper_bound: Optional[float] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[float]]: + ... + + +@overload +def DEFINE_float( # pylint: disable=invalid-name + name: Text, + default: Union[float, Text], + help: Optional[Text], # pylint: disable=redefined-builtin + lower_bound: Optional[float] = ..., + upper_bound: Optional[float] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[float]: + ... + + +def DEFINE_float( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value must be a float. + + If ``lower_bound`` or ``upper_bound`` are set, then this flag must be + within the given range. + + Args: + name: str, the flag name. + default: float|str|None, the default value of the flag. + help: str, the help message. + lower_bound: float, min value of the flag. + upper_bound: float, max value of the flag. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to :func:`DEFINE`. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.FloatParser(lower_bound, upper_bound) + serializer = _argument_parser.ArgumentSerializer() + result = DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=True if required else False, + **args, + ) + _register_bounds_validator_if_needed(parser, name, flag_values=flag_values) + return result + + +@overload +def DEFINE_integer( # pylint: disable=invalid-name + name: Text, + default: Union[None, int, Text], + help: Optional[Text], # pylint: disable=redefined-builtin + lower_bound: Optional[int] = ..., + upper_bound: Optional[int] = ..., + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[int]: + ... + + +@overload +def DEFINE_integer( # pylint: disable=invalid-name + name: Text, + default: None, + help: Optional[Text], # pylint: disable=redefined-builtin + lower_bound: Optional[int] = ..., + upper_bound: Optional[int] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[int]]: + ... + + +@overload +def DEFINE_integer( # pylint: disable=invalid-name + name: Text, + default: Union[int, Text], + help: Optional[Text], # pylint: disable=redefined-builtin + lower_bound: Optional[int] = ..., + upper_bound: Optional[int] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[int]: + ... + + +def DEFINE_integer( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value must be an integer. + + If ``lower_bound``, or ``upper_bound`` are set, then this flag must be + within the given range. + + Args: + name: str, the flag name. + default: int|str|None, the default value of the flag. + help: str, the help message. + lower_bound: int, min value of the flag. + upper_bound: int, max value of the flag. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to :func:`DEFINE`. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.IntegerParser(lower_bound, upper_bound) + serializer = _argument_parser.ArgumentSerializer() + result = DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=True if required else False, + **args, + ) + _register_bounds_validator_if_needed(parser, name, flag_values=flag_values) + return result + + +@overload +def DEFINE_enum( # pylint: disable=invalid-name + name: Text, + default: Optional[Text], + enum_values: Iterable[Text], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[Text]: + ... + + +@overload +def DEFINE_enum( # pylint: disable=invalid-name + name: Text, + default: None, + enum_values: Iterable[Text], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[Text]]: + ... + + +@overload +def DEFINE_enum( # pylint: disable=invalid-name + name: Text, + default: Text, + enum_values: Iterable[Text], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Text]: + ... + + +def DEFINE_enum( # pylint: disable=invalid-name,redefined-builtin + name, + default, + enum_values, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False, + **args): + """Registers a flag whose value can be any string from enum_values. + + Instead of a string enum, prefer `DEFINE_enum_class`, which allows + defining enums from an `enum.Enum` class. + + Args: + name: str, the flag name. + default: str|None, the default value of the flag. + enum_values: [str], a non-empty list of strings with the possible values for + the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + result = DEFINE_flag( + _flag.EnumFlag(name, default, help, enum_values, **args), + flag_values, + module_name, + required=True if required else False, + ) + return result + + +@overload +def DEFINE_enum_class( # pylint: disable=invalid-name + name: Text, + default: Union[None, _ET, Text], + enum_class: Type[_ET], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + case_sensitive: bool = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[_ET]: + ... + + +@overload +def DEFINE_enum_class( # pylint: disable=invalid-name + name: Text, + default: None, + enum_class: Type[_ET], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + case_sensitive: bool = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[_ET]]: + ... + + +@overload +def DEFINE_enum_class( # pylint: disable=invalid-name + name: Text, + default: Union[_ET, Text], + enum_class: Type[_ET], + help: Optional[Text], # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + case_sensitive: bool = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[_ET]: + ... + + +def DEFINE_enum_class( # pylint: disable=invalid-name,redefined-builtin + name, + default, + enum_class, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + case_sensitive=False, + required=False, + **args): + """Registers a flag whose value can be the name of enum members. + + Args: + name: str, the flag name. + default: Enum|str|None, the default value of the flag. + enum_class: class, the Enum class with all the possible values for the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: str, the name of the Python module declaring this flag. If not + provided, it will be computed using the stack trace of this call. + case_sensitive: bool, whether to map strings to members of the enum_class + without considering case. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: dict, the extra keyword args that are passed to ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + # NOTE: pytype fails if this is a direct return. + result = DEFINE_flag( + _flag.EnumClassFlag( + name, default, help, enum_class, case_sensitive=case_sensitive, **args + ), + flag_values, + module_name, + required=True if required else False, + ) + return result + + +@overload +def DEFINE_list( # pylint: disable=invalid-name + name: Text, + default: Union[None, Iterable[Text], Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +@overload +def DEFINE_list( # pylint: disable=invalid-name + name: Text, + default: None, + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[Text]]]: + ... + + +@overload +def DEFINE_list( # pylint: disable=invalid-name + name: Text, + default: Union[Iterable[Text], Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +def DEFINE_list( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value is a comma-separated list of strings. + + The flag value is parsed with a CSV parser. + + Args: + name: str, the flag name. + default: list|str|None, the default value of the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.ListParser() + serializer = _argument_parser.CsvListSerializer(',') + return DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_spaceseplist( # pylint: disable=invalid-name + name: Text, + default: Union[None, Iterable[Text], Text], + help: Text, # pylint: disable=redefined-builtin + comma_compat: bool = ..., + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +@overload +def DEFINE_spaceseplist( # pylint: disable=invalid-name + name: Text, + default: None, + help: Text, # pylint: disable=redefined-builtin + comma_compat: bool = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[Text]]]: + ... + + +@overload +def DEFINE_spaceseplist( # pylint: disable=invalid-name + name: Text, + default: Union[Iterable[Text], Text], + help: Text, # pylint: disable=redefined-builtin + comma_compat: bool = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +def DEFINE_spaceseplist( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + comma_compat=False, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value is a whitespace-separated list of strings. + + Any whitespace can be used as a separator. + + Args: + name: str, the flag name. + default: list|str|None, the default value of the flag. + help: str, the help message. + comma_compat: bool - Whether to support comma as an additional separator. If + false then only whitespace is supported. This is intended only for + backwards compatibility with flags that used to be comma-separated. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.WhitespaceSeparatedListParser( + comma_compat=comma_compat) + serializer = _argument_parser.ListSerializer(' ') + return DEFINE( + parser, + name, + default, + help, + flag_values, + serializer, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_multi( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + serializer: _argument_parser.ArgumentSerializer[_T], + name: Text, + default: Iterable[_T], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[_T]]: + ... + + +@overload +def DEFINE_multi( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + serializer: _argument_parser.ArgumentSerializer[_T], + name: Text, + default: Union[None, _T], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[_T]]: + ... + + +@overload +def DEFINE_multi( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + serializer: _argument_parser.ArgumentSerializer[_T], + name: Text, + default: None, + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[_T]]]: + ... + + +@overload +def DEFINE_multi( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + serializer: _argument_parser.ArgumentSerializer[_T], + name: Text, + default: Iterable[_T], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[_T]]: + ... + + +@overload +def DEFINE_multi( # pylint: disable=invalid-name + parser: _argument_parser.ArgumentParser[_T], + serializer: _argument_parser.ArgumentSerializer[_T], + name: Text, + default: _T, + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[_T]]: + ... + + +def DEFINE_multi( # pylint: disable=invalid-name,redefined-builtin + parser, + serializer, + name, + default, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + required=False, + **args): + """Registers a generic MultiFlag that parses its args with a given parser. + + Auxiliary function. Normal users should NOT use it directly. + + Developers who need to create their own 'Parser' classes for options + which can appear multiple times can call this module function to + register their flags. + + Args: + parser: ArgumentParser, used to parse the flag arguments. + serializer: ArgumentSerializer, the flag serializer instance. + name: str, the flag name. + default: Union[Iterable[T], Text, None], the default value of the flag. If + the value is text, it will be parsed as if it was provided from the + command line. If the value is a non-string iterable, it will be iterated + over to create a shallow copy of the values. If it is None, it is left + as-is. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: A string, the name of the Python module declaring this flag. If + not provided, it will be computed using the stack trace of this call. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + result = DEFINE_flag( + _flag.MultiFlag(parser, serializer, name, default, help, **args), + flag_values, + module_name, + required=True if required else False, + ) + return result + + +@overload +def DEFINE_multi_string( # pylint: disable=invalid-name + name: Text, + default: Union[None, Iterable[Text], Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +@overload +def DEFINE_multi_string( # pylint: disable=invalid-name + name: Text, + default: None, + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[Text]]]: + ... + + +@overload +def DEFINE_multi_string( # pylint: disable=invalid-name + name: Text, + default: Union[Iterable[Text], Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +def DEFINE_multi_string( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value can be a list of any strings. + + Use the flag on the command line multiple times to place multiple + string values into the list. The 'default' may be a single string + (which will be converted into a single-element list) or a list of + strings. + + + Args: + name: str, the flag name. + default: Union[Iterable[Text], Text, None], the default value of the flag; + see :func:`DEFINE_multi`. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.ArgumentParser() + serializer = _argument_parser.ArgumentSerializer() + return DEFINE_multi( + parser, + serializer, + name, + default, + help, + flag_values, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_multi_integer( # pylint: disable=invalid-name + name: Text, + default: Union[None, Iterable[int], int, Text], + help: Text, # pylint: disable=redefined-builtin + lower_bound: Optional[int] = ..., + upper_bound: Optional[int] = ..., + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[int]]: + ... + + +@overload +def DEFINE_multi_integer( # pylint: disable=invalid-name + name: Text, + default: None, + help: Text, # pylint: disable=redefined-builtin + lower_bound: Optional[int] = ..., + upper_bound: Optional[int] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[int]]]: + ... + + +@overload +def DEFINE_multi_integer( # pylint: disable=invalid-name + name: Text, + default: Union[Iterable[int], int, Text], + help: Text, # pylint: disable=redefined-builtin + lower_bound: Optional[int] = ..., + upper_bound: Optional[int] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[int]]: + ... + + +def DEFINE_multi_integer( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value can be a list of arbitrary integers. + + Use the flag on the command line multiple times to place multiple + integer values into the list. The 'default' may be a single integer + (which will be converted into a single-element list) or a list of + integers. + + Args: + name: str, the flag name. + default: Union[Iterable[int], Text, None], the default value of the flag; + see `DEFINE_multi`. + help: str, the help message. + lower_bound: int, min values of the flag. + upper_bound: int, max values of the flag. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.IntegerParser(lower_bound, upper_bound) + serializer = _argument_parser.ArgumentSerializer() + return DEFINE_multi( + parser, + serializer, + name, + default, + help, + flag_values, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_multi_float( # pylint: disable=invalid-name + name: Text, + default: Union[None, Iterable[float], float, Text], + help: Text, # pylint: disable=redefined-builtin + lower_bound: Optional[float] = ..., + upper_bound: Optional[float] = ..., + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[float]]: + ... + + +@overload +def DEFINE_multi_float( # pylint: disable=invalid-name + name: Text, + default: None, + help: Text, # pylint: disable=redefined-builtin + lower_bound: Optional[float] = ..., + upper_bound: Optional[float] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[float]]]: + ... + + +@overload +def DEFINE_multi_float( # pylint: disable=invalid-name + name: Text, + default: Union[Iterable[float], float, Text], + help: Text, # pylint: disable=redefined-builtin + lower_bound: Optional[float] = ..., + upper_bound: Optional[float] = ..., + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[float]]: + ... + + +def DEFINE_multi_float( # pylint: disable=invalid-name,redefined-builtin + name, + default, + help, + lower_bound=None, + upper_bound=None, + flag_values=_flagvalues.FLAGS, + required=False, + **args): + """Registers a flag whose value can be a list of arbitrary floats. + + Use the flag on the command line multiple times to place multiple + float values into the list. The 'default' may be a single float + (which will be converted into a single-element list) or a list of + floats. + + Args: + name: str, the flag name. + default: Union[Iterable[float], Text, None], the default value of the flag; + see `DEFINE_multi`. + help: str, the help message. + lower_bound: float, min values of the flag. + upper_bound: float, max values of the flag. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.FloatParser(lower_bound, upper_bound) + serializer = _argument_parser.ArgumentSerializer() + return DEFINE_multi( + parser, + serializer, + name, + default, + help, + flag_values, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_multi_enum( # pylint: disable=invalid-name + name: Text, + default: Union[None, Iterable[Text], Text], + enum_values: Iterable[Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +@overload +def DEFINE_multi_enum( # pylint: disable=invalid-name + name: Text, + default: None, + enum_values: Iterable[Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[Text]]]: + ... + + +@overload +def DEFINE_multi_enum( # pylint: disable=invalid-name + name: Text, + default: Union[Iterable[Text], Text], + enum_values: Iterable[Text], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[Text]]: + ... + + +def DEFINE_multi_enum( # pylint: disable=invalid-name,redefined-builtin + name, + default, + enum_values, + help, + flag_values=_flagvalues.FLAGS, + case_sensitive=True, + required=False, + **args): + """Registers a flag whose value can be a list strings from enum_values. + + Use the flag on the command line multiple times to place multiple + enum values into the list. The 'default' may be a single string + (which will be converted into a single-element list) or a list of + strings. + + Args: + name: str, the flag name. + default: Union[Iterable[Text], Text, None], the default value of the flag; + see `DEFINE_multi`. + enum_values: [str], a non-empty list of strings with the possible values for + the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + case_sensitive: Whether or not the enum is to be case-sensitive. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + parser = _argument_parser.EnumParser(enum_values, case_sensitive) + serializer = _argument_parser.ArgumentSerializer() + return DEFINE_multi( + parser, + serializer, + name, + default, + '<%s>: %s' % ('|'.join(enum_values), help), + flag_values, + required=True if required else False, + **args, + ) + + +@overload +def DEFINE_multi_enum_class( # pylint: disable=invalid-name + name: Text, + # This is separate from `Union[None, _ET, Iterable[Text], Text]` to avoid a + # Pytype issue inferring the return value to + # FlagHolder[List[Union[_ET, enum.Enum]]] when an iterable of concrete enum + # subclasses are used. + default: Iterable[_ET], + enum_class: Type[_ET], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[_ET]]: + ... + + +@overload +def DEFINE_multi_enum_class( # pylint: disable=invalid-name + name: Text, + default: Union[None, _ET, Iterable[Text], Text], + enum_class: Type[_ET], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + *, + required: 'typing.Literal[True]', + **args: Any +) -> _flagvalues.FlagHolder[List[_ET]]: + ... + + +@overload +def DEFINE_multi_enum_class( # pylint: disable=invalid-name + name: Text, + default: None, + enum_class: Type[_ET], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[Optional[List[_ET]]]: + ... + + +@overload +def DEFINE_multi_enum_class( # pylint: disable=invalid-name + name: Text, + # This is separate from `Union[None, _ET, Iterable[Text], Text]` to avoid a + # Pytype issue inferring the return value to + # FlagHolder[List[Union[_ET, enum.Enum]]] when an iterable of concrete enum + # subclasses are used. + default: Iterable[_ET], + enum_class: Type[_ET], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[_ET]]: + ... + + +@overload +def DEFINE_multi_enum_class( # pylint: disable=invalid-name + name: Text, + default: Union[_ET, Iterable[Text], Text], + enum_class: Type[_ET], + help: Text, # pylint: disable=redefined-builtin + flag_values: _flagvalues.FlagValues = ..., + module_name: Optional[Text] = ..., + required: bool = ..., + **args: Any +) -> _flagvalues.FlagHolder[List[_ET]]: + ... + + +def DEFINE_multi_enum_class( # pylint: disable=invalid-name,redefined-builtin + name, + default, + enum_class, + help, + flag_values=_flagvalues.FLAGS, + module_name=None, + case_sensitive=False, + required=False, + **args): + """Registers a flag whose value can be a list of enum members. + + Use the flag on the command line multiple times to place multiple + enum values into the list. + + Args: + name: str, the flag name. + default: Union[Iterable[Enum], Iterable[Text], Enum, Text, None], the + default value of the flag; see `DEFINE_multi`; only differences are + documented here. If the value is a single Enum, it is treated as a + single-item list of that Enum value. If it is an iterable, text values + within the iterable will be converted to the equivalent Enum objects. + enum_class: class, the Enum class with all the possible values for the flag. + help: str, the help message. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: A string, the name of the Python module declaring this flag. If + not provided, it will be computed using the stack trace of this call. + case_sensitive: bool, whether to map strings to members of the enum_class + without considering case. + required: bool, is this a required flag. This must be used as a keyword + argument. + **args: Dictionary with extra keyword args that are passed to the + ``Flag.__init__``. + + Returns: + a handle to defined flag. + """ + # NOTE: pytype fails if this is a direct return. + result = DEFINE_flag( + _flag.MultiEnumClassFlag( + name, + default, + help, + enum_class, + case_sensitive=case_sensitive, + **args, + ), + flag_values, + module_name, + required=True if required else False, + ) + return result + + +def DEFINE_alias( # pylint: disable=invalid-name + name: Text, + original_name: Text, + flag_values: _flagvalues.FlagValues = _flagvalues.FLAGS, + module_name: Optional[Text] = None, +) -> _flagvalues.FlagHolder[Any]: + """Defines an alias flag for an existing one. + + Args: + name: str, the flag name. + original_name: str, the original flag name. + flag_values: :class:`FlagValues`, the FlagValues instance with which the + flag will be registered. This should almost never need to be overridden. + module_name: A string, the name of the module that defines this flag. + + Returns: + a handle to defined flag. + + Raises: + flags.FlagError: + UnrecognizedFlagError: if the referenced flag doesn't exist. + DuplicateFlagError: if the alias name has been used by some existing flag. + """ + if original_name not in flag_values: + raise _exceptions.UnrecognizedFlagError(original_name) + flag = flag_values[original_name] + + class _FlagAlias(_flag.Flag): + """Overrides Flag class so alias value is copy of original flag value.""" + + def parse(self, argument): + flag.parse(argument) + self.present += 1 + + def _parse_from_default(self, value): + # The value was already parsed by the aliased flag, so there is no + # need to call the parser on it a second time. + # Additionally, because of how MultiFlag parses and merges values, + # it isn't possible to delegate to the aliased flag and still get + # the correct values. + return value + + @property + def value(self): + return flag.value + + @value.setter + def value(self, value): + flag.value = value + + help_msg = 'Alias for --%s.' % flag.name + # If alias_name has been used, flags.DuplicatedFlag will be raised. + return DEFINE_flag( + _FlagAlias( + flag.parser, + flag.serializer, + name, + flag.default, + help_msg, + boolean=flag.boolean), flag_values, module_name) diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_exceptions.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..b569d9460e31622da8d1c826ea31400fb2d74fcc --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_exceptions.py @@ -0,0 +1,108 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exception classes in ABSL flags library. + +Do NOT import this module directly. Import the flags package and use the +aliases defined at the package level instead. +""" + +import sys + +from absl.flags import _helpers + + +_helpers.disclaim_module_ids.add(id(sys.modules[__name__])) + + +class Error(Exception): + """The base class for all flags errors.""" + + +class CantOpenFlagFileError(Error): + """Raised when flagfile fails to open. + + E.g. the file doesn't exist, or has wrong permissions. + """ + + +class DuplicateFlagError(Error): + """Raised if there is a flag naming conflict.""" + + @classmethod + def from_flag(cls, flagname, flag_values, other_flag_values=None): + """Creates a DuplicateFlagError by providing flag name and values. + + Args: + flagname: str, the name of the flag being redefined. + flag_values: :class:`FlagValues`, the FlagValues instance containing the + first definition of flagname. + other_flag_values: :class:`FlagValues`, if it is not None, it should be + the FlagValues object where the second definition of flagname occurs. + If it is None, we assume that we're being called when attempting to + create the flag a second time, and we use the module calling this one + as the source of the second definition. + + Returns: + An instance of DuplicateFlagError. + """ + first_module = flag_values.find_module_defining_flag( + flagname, default='') + if other_flag_values is None: + second_module = _helpers.get_calling_module() + else: + second_module = other_flag_values.find_module_defining_flag( + flagname, default='') + flag_summary = flag_values[flagname].help + msg = ("The flag '%s' is defined twice. First from %s, Second from %s. " + "Description from first occurrence: %s") % ( + flagname, first_module, second_module, flag_summary) + return cls(msg) + + +class IllegalFlagValueError(Error): + """Raised when the flag command line argument is illegal.""" + + +class UnrecognizedFlagError(Error): + """Raised when a flag is unrecognized. + + Attributes: + flagname: str, the name of the unrecognized flag. + flagvalue: The value of the flag, empty if the flag is not defined. + """ + + def __init__(self, flagname, flagvalue='', suggestions=None): + self.flagname = flagname + self.flagvalue = flagvalue + if suggestions: + # Space before the question mark is intentional to not include it in the + # selection when copy-pasting the suggestion from (some) terminals. + tip = '. Did you mean: %s ?' % ', '.join(suggestions) + else: + tip = '' + super(UnrecognizedFlagError, self).__init__( + 'Unknown command line flag \'%s\'%s' % (flagname, tip)) + + +class UnparsedFlagAccessError(Error): + """Raised when accessing the flag value from unparsed :class:`FlagValues`.""" + + +class ValidationError(Error): + """Raised when flag validator constraint is not satisfied.""" + + +class FlagNameConflictsWithMethodError(Error): + """Raised when a flag name conflicts with :class:`FlagValues` methods.""" diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_flag.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_flag.py new file mode 100644 index 0000000000000000000000000000000000000000..67117880c02b36de1291c12cd37e4906d932996f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_flag.py @@ -0,0 +1,556 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Contains Flag class - information about single command-line flag. + +Do NOT import this module directly. Import the flags package and use the +aliases defined at the package level instead. +""" + +from collections import abc +import copy +import enum +import functools +from typing import Any, Dict, Generic, Iterable, List, Optional, Text, Type, TypeVar, Union +from xml.dom import minidom + +from absl.flags import _argument_parser +from absl.flags import _exceptions +from absl.flags import _helpers + +_T = TypeVar('_T') +_ET = TypeVar('_ET', bound=enum.Enum) + + +@functools.total_ordering +class Flag(Generic[_T]): + """Information about a command-line flag. + + Attributes: + name: the name for this flag + default: the default value for this flag + default_unparsed: the unparsed default value for this flag. + default_as_str: default value as repr'd string, e.g., "'true'" + (or None) + value: the most recent parsed value of this flag set by :meth:`parse` + help: a help string or None if no help is available + short_name: the single letter alias for this flag (or None) + boolean: if 'true', this flag does not accept arguments + present: true if this flag was parsed from command line flags + parser: an :class:`~absl.flags.ArgumentParser` object + serializer: an ArgumentSerializer object + allow_override: the flag may be redefined without raising an error, + and newly defined flag overrides the old one. + allow_override_cpp: use the flag from C++ if available the flag + definition is replaced by the C++ flag after init + allow_hide_cpp: use the Python flag despite having a C++ flag with + the same name (ignore the C++ flag) + using_default_value: the flag value has not been set by user + allow_overwrite: the flag may be parsed more than once without + raising an error, the last set value will be used + allow_using_method_names: whether this flag can be defined even if + it has a name that conflicts with a FlagValues method. + validators: list of the flag validators. + + The only public method of a ``Flag`` object is :meth:`parse`, but it is + typically only called by a :class:`~absl.flags.FlagValues` object. The + :meth:`parse` method is a thin wrapper around the + :meth:`ArgumentParser.parse()` method. The + parsed value is saved in ``.value``, and the ``.present`` attribute is + updated. If this flag was already present, an Error is raised. + + :meth:`parse` is also called during ``__init__`` to parse the default value + and initialize the ``.value`` attribute. This enables other python modules to + safely use flags even if the ``__main__`` module neglects to parse the + command line arguments. The ``.present`` attribute is cleared after + ``__init__`` parsing. If the default value is set to ``None``, then the + ``__init__`` parsing step is skipped and the ``.value`` attribute is + initialized to None. + + Note: The default value is also presented to the user in the help + string, so it is important that it be a legal value for this flag. + """ + + # NOTE: pytype doesn't find defaults without this. + default: Optional[_T] + default_as_str: Optional[Text] + default_unparsed: Union[Optional[_T], Text] + + def __init__( + self, + parser: _argument_parser.ArgumentParser[_T], + serializer: Optional[_argument_parser.ArgumentSerializer[_T]], + name: Text, + default: Union[Optional[_T], Text], + help_string: Optional[Text], + short_name: Optional[Text] = None, + boolean: bool = False, + allow_override: bool = False, + allow_override_cpp: bool = False, + allow_hide_cpp: bool = False, + allow_overwrite: bool = True, + allow_using_method_names: bool = False, + ) -> None: + self.name = name + + if not help_string: + help_string = '(no help available)' + + self.help = help_string + self.short_name = short_name + self.boolean = boolean + self.present = 0 + self.parser = parser + self.serializer = serializer + self.allow_override = allow_override + self.allow_override_cpp = allow_override_cpp + self.allow_hide_cpp = allow_hide_cpp + self.allow_overwrite = allow_overwrite + self.allow_using_method_names = allow_using_method_names + + self.using_default_value = True + self._value = None + self.validators = [] + if self.allow_hide_cpp and self.allow_override_cpp: + raise _exceptions.Error( + "Can't have both allow_hide_cpp (means use Python flag) and " + 'allow_override_cpp (means use C++ flag after InitGoogle)') + + self._set_default(default) + + @property + def value(self) -> Optional[_T]: + return self._value + + @value.setter + def value(self, value: Optional[_T]): + self._value = value + + def __hash__(self): + return hash(id(self)) + + def __eq__(self, other): + return self is other + + def __lt__(self, other): + if isinstance(other, Flag): + return id(self) < id(other) + return NotImplemented + + def __bool__(self): + raise TypeError('A Flag instance would always be True. ' + 'Did you mean to test the `.value` attribute?') + + def __getstate__(self): + raise TypeError("can't pickle Flag objects") + + def __copy__(self): + raise TypeError('%s does not support shallow copies. ' + 'Use copy.deepcopy instead.' % type(self).__name__) + + def __deepcopy__(self, memo: Dict[int, Any]) -> 'Flag[_T]': + result = object.__new__(type(self)) + result.__dict__ = copy.deepcopy(self.__dict__, memo) + return result + + def _get_parsed_value_as_string(self, value: Optional[_T]) -> Optional[Text]: + """Returns parsed flag value as string.""" + if value is None: + return None + if self.serializer: + return repr(self.serializer.serialize(value)) + if self.boolean: + if value: + return repr('true') + else: + return repr('false') + return repr(str(value)) + + def parse(self, argument: Union[Text, Optional[_T]]) -> None: + """Parses string and sets flag value. + + Args: + argument: str or the correct flag value type, argument to be parsed. + """ + if self.present and not self.allow_overwrite: + raise _exceptions.IllegalFlagValueError( + 'flag --%s=%s: already defined as %s' % ( + self.name, argument, self.value)) + self.value = self._parse(argument) + self.present += 1 + + def _parse(self, argument: Union[Text, _T]) -> Optional[_T]: + """Internal parse function. + + It returns the parsed value, and does not modify class states. + + Args: + argument: str or the correct flag value type, argument to be parsed. + + Returns: + The parsed value. + """ + try: + return self.parser.parse(argument) + except (TypeError, ValueError) as e: # Recast as IllegalFlagValueError. + raise _exceptions.IllegalFlagValueError( + 'flag --%s=%s: %s' % (self.name, argument, e)) + + def unparse(self) -> None: + self.value = self.default + self.using_default_value = True + self.present = 0 + + def serialize(self) -> Text: + """Serializes the flag.""" + return self._serialize(self.value) + + def _serialize(self, value: Optional[_T]) -> Text: + """Internal serialize function.""" + if value is None: + return '' + if self.boolean: + if value: + return '--%s' % self.name + else: + return '--no%s' % self.name + else: + if not self.serializer: + raise _exceptions.Error( + 'Serializer not present for flag %s' % self.name) + return '--%s=%s' % (self.name, self.serializer.serialize(value)) + + def _set_default(self, value: Union[Optional[_T], Text]) -> None: + """Changes the default value (and current value too) for this Flag.""" + self.default_unparsed = value + if value is None: + self.default = None + else: + self.default = self._parse_from_default(value) + self.default_as_str = self._get_parsed_value_as_string(self.default) + if self.using_default_value: + self.value = self.default + + # This is split out so that aliases can skip regular parsing of the default + # value. + def _parse_from_default(self, value: Union[Text, _T]) -> Optional[_T]: + return self._parse(value) + + def flag_type(self) -> Text: + """Returns a str that describes the type of the flag. + + NOTE: we use strings, and not the types.*Type constants because + our flags can have more exotic types, e.g., 'comma separated list + of strings', 'whitespace separated list of strings', etc. + """ + return self.parser.flag_type() + + def _create_xml_dom_element( + self, doc: minidom.Document, module_name: str, is_key: bool = False + ) -> minidom.Element: + """Returns an XML element that contains this flag's information. + + This is information that is relevant to all flags (e.g., name, + meaning, etc.). If you defined a flag that has some other pieces of + info, then please override _ExtraXMLInfo. + + Please do NOT override this method. + + Args: + doc: minidom.Document, the DOM document it should create nodes from. + module_name: str,, the name of the module that defines this flag. + is_key: boolean, True iff this flag is key for main module. + + Returns: + A minidom.Element instance. + """ + element = doc.createElement('flag') + if is_key: + element.appendChild(_helpers.create_xml_dom_element(doc, 'key', 'yes')) + element.appendChild(_helpers.create_xml_dom_element( + doc, 'file', module_name)) + # Adds flag features that are relevant for all flags. + element.appendChild(_helpers.create_xml_dom_element(doc, 'name', self.name)) + if self.short_name: + element.appendChild(_helpers.create_xml_dom_element( + doc, 'short_name', self.short_name)) + if self.help: + element.appendChild(_helpers.create_xml_dom_element( + doc, 'meaning', self.help)) + # The default flag value can either be represented as a string like on the + # command line, or as a Python object. We serialize this value in the + # latter case in order to remain consistent. + if self.serializer and not isinstance(self.default, str): + if self.default is not None: + default_serialized = self.serializer.serialize(self.default) + else: + default_serialized = '' + else: + default_serialized = self.default + element.appendChild(_helpers.create_xml_dom_element( + doc, 'default', default_serialized)) + value_serialized = self._serialize_value_for_xml(self.value) + element.appendChild(_helpers.create_xml_dom_element( + doc, 'current', value_serialized)) + element.appendChild(_helpers.create_xml_dom_element( + doc, 'type', self.flag_type())) + # Adds extra flag features this flag may have. + for e in self._extra_xml_dom_elements(doc): + element.appendChild(e) + return element + + def _serialize_value_for_xml(self, value: Optional[_T]) -> Any: + """Returns the serialized value, for use in an XML help text.""" + return value + + def _extra_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + """Returns extra info about this flag in XML. + + "Extra" means "not already included by _create_xml_dom_element above." + + Args: + doc: minidom.Document, the DOM document it should create nodes from. + + Returns: + A list of minidom.Element. + """ + # Usually, the parser knows the extra details about the flag, so + # we just forward the call to it. + return self.parser._custom_xml_dom_elements(doc) # pylint: disable=protected-access + + +class BooleanFlag(Flag[bool]): + """Basic boolean flag. + + Boolean flags do not take any arguments, and their value is either + ``True`` (1) or ``False`` (0). The false value is specified on the command + line by prepending the word ``'no'`` to either the long or the short flag + name. + + For example, if a Boolean flag was created whose long name was + ``'update'`` and whose short name was ``'x'``, then this flag could be + explicitly unset through either ``--noupdate`` or ``--nox``. + """ + + def __init__( + self, + name: Text, + default: Union[Optional[bool], Text], + help: Optional[Text], # pylint: disable=redefined-builtin + short_name: Optional[Text] = None, + **args + ) -> None: + p = _argument_parser.BooleanParser() + super(BooleanFlag, self).__init__( + p, None, name, default, help, short_name, True, **args + ) + + +class EnumFlag(Flag[Text]): + """Basic enum flag; its value can be any string from list of enum_values.""" + + def __init__( + self, + name: Text, + default: Optional[Text], + help: Optional[Text], # pylint: disable=redefined-builtin + enum_values: Iterable[Text], + short_name: Optional[Text] = None, + case_sensitive: bool = True, + **args + ): + p = _argument_parser.EnumParser(enum_values, case_sensitive) + g = _argument_parser.ArgumentSerializer() + super(EnumFlag, self).__init__( + p, g, name, default, help, short_name, **args) + # NOTE: parser should be typed EnumParser but the constructor + # restricts the available interface to ArgumentParser[str]. + self.parser = p + self.help = '<%s>: %s' % ('|'.join(p.enum_values), self.help) + + def _extra_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = [] + for enum_value in self.parser.enum_values: + elements.append(_helpers.create_xml_dom_element( + doc, 'enum_value', enum_value)) + return elements + + +class EnumClassFlag(Flag[_ET]): + """Basic enum flag; its value is an enum class's member.""" + + def __init__( + self, + name: Text, + default: Union[Optional[_ET], Text], + help: Optional[Text], # pylint: disable=redefined-builtin + enum_class: Type[_ET], + short_name: Optional[Text] = None, + case_sensitive: bool = False, + **args + ): + p = _argument_parser.EnumClassParser( + enum_class, case_sensitive=case_sensitive) + g = _argument_parser.EnumClassSerializer(lowercase=not case_sensitive) + super(EnumClassFlag, self).__init__( + p, g, name, default, help, short_name, **args) + # NOTE: parser should be typed EnumClassParser[_ET] but the constructor + # restricts the available interface to ArgumentParser[_ET]. + self.parser = p + self.help = '<%s>: %s' % ('|'.join(p.member_names), self.help) + + def _extra_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = [] + for enum_value in self.parser.enum_class.__members__.keys(): + elements.append(_helpers.create_xml_dom_element( + doc, 'enum_value', enum_value)) + return elements + + +class MultiFlag(Generic[_T], Flag[List[_T]]): + """A flag that can appear multiple time on the command-line. + + The value of such a flag is a list that contains the individual values + from all the appearances of that flag on the command-line. + + See the __doc__ for Flag for most behavior of this class. Only + differences in behavior are described here: + + * The default value may be either a single value or an iterable of values. + A single value is transformed into a single-item list of that value. + + * The value of the flag is always a list, even if the option was + only supplied once, and even if the default value is a single + value + """ + + def __init__(self, *args, **kwargs): + super(MultiFlag, self).__init__(*args, **kwargs) + self.help += ';\n repeat this option to specify a list of values' + + def parse(self, arguments: Union[Text, _T, Iterable[_T]]): # pylint: disable=arguments-renamed + """Parses one or more arguments with the installed parser. + + Args: + arguments: a single argument or a list of arguments (typically a + list of default values); a single argument is converted + internally into a list containing one item. + """ + new_values = self._parse(arguments) + if self.present: + self.value.extend(new_values) + else: + self.value = new_values + self.present += len(new_values) + + def _parse(self, arguments: Union[Text, Optional[Iterable[_T]]]) -> List[_T]: # pylint: disable=arguments-renamed + if (isinstance(arguments, abc.Iterable) and + not isinstance(arguments, str)): + arguments = list(arguments) + + if not isinstance(arguments, list): + # Default value may be a list of values. Most other arguments + # will not be, so convert them into a single-item list to make + # processing simpler below. + arguments = [arguments] + + return [super(MultiFlag, self)._parse(item) for item in arguments] + + def _serialize(self, value: Optional[List[_T]]) -> Text: + """See base class.""" + if not self.serializer: + raise _exceptions.Error( + 'Serializer not present for flag %s' % self.name) + if value is None: + return '' + + serialized_items = [ + super(MultiFlag, self)._serialize(value_item) for value_item in value + ] + + return '\n'.join(serialized_items) + + def flag_type(self): + """See base class.""" + return 'multi ' + self.parser.flag_type() + + def _extra_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = [] + if hasattr(self.parser, 'enum_values'): + for enum_value in self.parser.enum_values: # pytype: disable=attribute-error + elements.append(_helpers.create_xml_dom_element( + doc, 'enum_value', enum_value)) + return elements + + +class MultiEnumClassFlag(MultiFlag[_ET]): # pytype: disable=not-indexable + """A multi_enum_class flag. + + See the __doc__ for MultiFlag for most behaviors of this class. In addition, + this class knows how to handle enum.Enum instances as values for this flag + type. + """ + + def __init__( + self, + name: str, + default: Union[None, Iterable[_ET], _ET, Iterable[Text], Text], + help_string: str, + enum_class: Type[_ET], + case_sensitive: bool = False, + **args + ): + p = _argument_parser.EnumClassParser( + enum_class, case_sensitive=case_sensitive) + g = _argument_parser.EnumClassListSerializer( + list_sep=',', lowercase=not case_sensitive) + super(MultiEnumClassFlag, self).__init__( + p, g, name, default, help_string, **args) + # NOTE: parser should be typed EnumClassParser[_ET] but the constructor + # restricts the available interface to ArgumentParser[str]. + self.parser = p + # NOTE: serializer should be non-Optional but this isn't inferred. + self.serializer = g + self.help = ( + '<%s>: %s;\n repeat this option to specify a list of values' % + ('|'.join(p.member_names), help_string or '(no help available)')) + + def _extra_xml_dom_elements( + self, doc: minidom.Document + ) -> List[minidom.Element]: + elements = [] + for enum_value in self.parser.enum_class.__members__.keys(): # pytype: disable=attribute-error + elements.append(_helpers.create_xml_dom_element( + doc, 'enum_value', enum_value)) + return elements + + def _serialize_value_for_xml(self, value): + """See base class.""" + if value is not None: + if not self.serializer: + raise _exceptions.Error( + 'Serializer not present for flag %s' % self.name + ) + value_serialized = self.serializer.serialize(value) + else: + value_serialized = '' + return value_serialized diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_flagvalues.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_flagvalues.py new file mode 100644 index 0000000000000000000000000000000000000000..e25f1d3e100b80aafb51b3a5b5a25772d2b6119c --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_flagvalues.py @@ -0,0 +1,1480 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines the FlagValues class - registry of 'Flag' objects. + +Do NOT import this module directly. Import the flags package and use the +aliases defined at the package level instead. +""" + +import copy +import itertools +import logging +import os +import sys +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sequence, Text, TextIO, Generic, TypeVar, Union, Tuple +from xml.dom import minidom + +from absl.flags import _exceptions +from absl.flags import _flag +from absl.flags import _helpers +from absl.flags import _validators_classes +from absl.flags._flag import Flag + +# Add flagvalues module to disclaimed module ids. +_helpers.disclaim_module_ids.add(id(sys.modules[__name__])) + +_T = TypeVar('_T') + + +class FlagValues: + """Registry of :class:`~absl.flags.Flag` objects. + + A :class:`FlagValues` can then scan command line arguments, passing flag + arguments through to the 'Flag' objects that it owns. It also + provides easy access to the flag values. Typically only one + :class:`FlagValues` object is needed by an application: + :const:`FLAGS`. + + This class is heavily overloaded: + + :class:`Flag` objects are registered via ``__setitem__``:: + + FLAGS['longname'] = x # register a new flag + + The ``.value`` attribute of the registered :class:`~absl.flags.Flag` objects + can be accessed as attributes of this :class:`FlagValues` object, through + ``__getattr__``. Both the long and short name of the original + :class:`~absl.flags.Flag` objects can be used to access its value:: + + FLAGS.longname # parsed flag value + FLAGS.x # parsed flag value (short name) + + Command line arguments are scanned and passed to the registered + :class:`~absl.flags.Flag` objects through the ``__call__`` method. Unparsed + arguments, including ``argv[0]`` (e.g. the program name) are returned:: + + argv = FLAGS(sys.argv) # scan command line arguments + + The original registered :class:`~absl.flags.Flag` objects can be retrieved + through the use of the dictionary-like operator, ``__getitem__``:: + + x = FLAGS['longname'] # access the registered Flag object + + The ``str()`` operator of a :class:`absl.flags.FlagValues` object provides + help for all of the registered :class:`~absl.flags.Flag` objects. + """ + + _HAS_DYNAMIC_ATTRIBUTES = True + + # A note on collections.abc.Mapping: + # FlagValues defines __getitem__, __iter__, and __len__. It makes perfect + # sense to let it be a collections.abc.Mapping class. However, we are not + # able to do so. The mixin methods, e.g. keys, values, are not uncommon flag + # names. Those flag values would not be accessible via the FLAGS.xxx form. + + __dict__: Dict[str, Any] + + def __init__(self): + # Since everything in this class is so heavily overloaded, the only + # way of defining and using fields is to access __dict__ directly. + + # Dictionary: flag name (string) -> Flag object. + self.__dict__['__flags'] = {} + + # Set: name of hidden flag (string). + # Holds flags that should not be directly accessible from Python. + self.__dict__['__hiddenflags'] = set() + + # Dictionary: module name (string) -> list of Flag objects that are defined + # by that module. + self.__dict__['__flags_by_module'] = {} + # Dictionary: module id (int) -> list of Flag objects that are defined by + # that module. + self.__dict__['__flags_by_module_id'] = {} + # Dictionary: module name (string) -> list of Flag objects that are + # key for that module. + self.__dict__['__key_flags_by_module'] = {} + + # Bool: True if flags were parsed. + self.__dict__['__flags_parsed'] = False + + # Bool: True if unparse_flags() was called. + self.__dict__['__unparse_flags_called'] = False + + # None or Method(name, value) to call from __setattr__ for an unknown flag. + self.__dict__['__set_unknown'] = None + + # A set of banned flag names. This is to prevent users from accidentally + # defining a flag that has the same name as a method on this class. + # Users can still allow defining the flag by passing + # allow_using_method_names=True in DEFINE_xxx functions. + self.__dict__['__banned_flag_names'] = frozenset(dir(FlagValues)) + + # Bool: Whether to use GNU style scanning. + self.__dict__['__use_gnu_getopt'] = True + + # Bool: Whether use_gnu_getopt has been explicitly set by the user. + self.__dict__['__use_gnu_getopt_explicitly_set'] = False + + # Function: Takes a flag name as parameter, returns a tuple + # (is_retired, type_is_bool). + self.__dict__['__is_retired_flag_func'] = None + + def set_gnu_getopt(self, gnu_getopt: bool = True) -> None: + """Sets whether or not to use GNU style scanning. + + GNU style allows mixing of flag and non-flag arguments. See + http://docs.python.org/library/getopt.html#getopt.gnu_getopt + + Args: + gnu_getopt: bool, whether or not to use GNU style scanning. + """ + self.__dict__['__use_gnu_getopt'] = gnu_getopt + self.__dict__['__use_gnu_getopt_explicitly_set'] = True + + def is_gnu_getopt(self) -> bool: + return self.__dict__['__use_gnu_getopt'] + + def _flags(self) -> Dict[Text, Flag]: + return self.__dict__['__flags'] + + def flags_by_module_dict(self) -> Dict[Text, List[Flag]]: + """Returns the dictionary of module_name -> list of defined flags. + + Returns: + A dictionary. Its keys are module names (strings). Its values + are lists of Flag objects. + """ + return self.__dict__['__flags_by_module'] + + def flags_by_module_id_dict(self) -> Dict[int, List[Flag]]: + """Returns the dictionary of module_id -> list of defined flags. + + Returns: + A dictionary. Its keys are module IDs (ints). Its values + are lists of Flag objects. + """ + return self.__dict__['__flags_by_module_id'] + + def key_flags_by_module_dict(self) -> Dict[Text, List[Flag]]: + """Returns the dictionary of module_name -> list of key flags. + + Returns: + A dictionary. Its keys are module names (strings). Its values + are lists of Flag objects. + """ + return self.__dict__['__key_flags_by_module'] + + def register_flag_by_module(self, module_name: Text, flag: Flag) -> None: + """Records the module that defines a specific flag. + + We keep track of which flag is defined by which module so that we + can later sort the flags by module. + + Args: + module_name: str, the name of a Python module. + flag: Flag, the Flag instance that is key to the module. + """ + flags_by_module = self.flags_by_module_dict() + flags_by_module.setdefault(module_name, []).append(flag) + + def register_flag_by_module_id(self, module_id: int, flag: Flag) -> None: + """Records the module that defines a specific flag. + + Args: + module_id: int, the ID of the Python module. + flag: Flag, the Flag instance that is key to the module. + """ + flags_by_module_id = self.flags_by_module_id_dict() + flags_by_module_id.setdefault(module_id, []).append(flag) + + def register_key_flag_for_module(self, module_name: Text, flag: Flag) -> None: + """Specifies that a flag is a key flag for a module. + + Args: + module_name: str, the name of a Python module. + flag: Flag, the Flag instance that is key to the module. + """ + key_flags_by_module = self.key_flags_by_module_dict() + # The list of key flags for the module named module_name. + key_flags = key_flags_by_module.setdefault(module_name, []) + # Add flag, but avoid duplicates. + if flag not in key_flags: + key_flags.append(flag) + + def _flag_is_registered(self, flag_obj: Flag) -> bool: + """Checks whether a Flag object is registered under long name or short name. + + Args: + flag_obj: Flag, the Flag instance to check for. + + Returns: + bool, True iff flag_obj is registered under long name or short name. + """ + flag_dict = self._flags() + # Check whether flag_obj is registered under its long name. + name = flag_obj.name + if flag_dict.get(name, None) == flag_obj: + return True + # Check whether flag_obj is registered under its short name. + short_name = flag_obj.short_name + if (short_name is not None and flag_dict.get(short_name, None) == flag_obj): + return True + return False + + def _cleanup_unregistered_flag_from_module_dicts( + self, flag_obj: Flag + ) -> None: + """Cleans up unregistered flags from all module -> [flags] dictionaries. + + If flag_obj is registered under either its long name or short name, it + won't be removed from the dictionaries. + + Args: + flag_obj: Flag, the Flag instance to clean up for. + """ + if self._flag_is_registered(flag_obj): + return + for flags_by_module_dict in (self.flags_by_module_dict(), + self.flags_by_module_id_dict(), + self.key_flags_by_module_dict()): + for flags_in_module in flags_by_module_dict.values(): + # While (as opposed to if) takes care of multiple occurrences of a + # flag in the list for the same module. + while flag_obj in flags_in_module: + flags_in_module.remove(flag_obj) + + def get_flags_for_module(self, module: Union[Text, Any]) -> List[Flag]: + """Returns the list of flags defined by a module. + + Args: + module: module|str, the module to get flags from. + + Returns: + [Flag], a new list of Flag instances. Caller may update this list as + desired: none of those changes will affect the internals of this + FlagValue instance. + """ + if not isinstance(module, str): + module = module.__name__ + if module == '__main__': + module = sys.argv[0] + + return list(self.flags_by_module_dict().get(module, [])) + + def get_key_flags_for_module(self, module: Union[Text, Any]) -> List[Flag]: + """Returns the list of key flags for a module. + + Args: + module: module|str, the module to get key flags from. + + Returns: + [Flag], a new list of Flag instances. Caller may update this list as + desired: none of those changes will affect the internals of this + FlagValue instance. + """ + if not isinstance(module, str): + module = module.__name__ + if module == '__main__': + module = sys.argv[0] + + # Any flag is a key flag for the module that defined it. NOTE: + # key_flags is a fresh list: we can update it without affecting the + # internals of this FlagValues object. + key_flags = self.get_flags_for_module(module) + + # Take into account flags explicitly declared as key for a module. + for flag in self.key_flags_by_module_dict().get(module, []): + if flag not in key_flags: + key_flags.append(flag) + return key_flags + + # TODO(yileiyang): Restrict default to Optional[Text]. + def find_module_defining_flag( + self, flagname: Text, default: Optional[_T] = None + ) -> Union[str, Optional[_T]]: + """Return the name of the module defining this flag, or default. + + Args: + flagname: str, name of the flag to lookup. + default: Value to return if flagname is not defined. Defaults to None. + + Returns: + The name of the module which registered the flag with this name. + If no such module exists (i.e. no flag with this name exists), + we return default. + """ + registered_flag = self._flags().get(flagname) + if registered_flag is None: + return default + for module, flags in self.flags_by_module_dict().items(): + for flag in flags: + # It must compare the flag with the one in _flags. This is because a + # flag might be overridden only for its long name (or short name), + # and only its short name (or long name) is considered registered. + if (flag.name == registered_flag.name and + flag.short_name == registered_flag.short_name): + return module + return default + + # TODO(yileiyang): Restrict default to Optional[Text]. + def find_module_id_defining_flag( + self, flagname: Text, default: Optional[_T] = None + ) -> Union[int, Optional[_T]]: + """Return the ID of the module defining this flag, or default. + + Args: + flagname: str, name of the flag to lookup. + default: Value to return if flagname is not defined. Defaults to None. + + Returns: + The ID of the module which registered the flag with this name. + If no such module exists (i.e. no flag with this name exists), + we return default. + """ + registered_flag = self._flags().get(flagname) + if registered_flag is None: + return default + for module_id, flags in self.flags_by_module_id_dict().items(): + for flag in flags: + # It must compare the flag with the one in _flags. This is because a + # flag might be overridden only for its long name (or short name), + # and only its short name (or long name) is considered registered. + if (flag.name == registered_flag.name and + flag.short_name == registered_flag.short_name): + return module_id + return default + + def _register_unknown_flag_setter( + self, setter: Callable[[str, Any], None] + ) -> None: + """Allow set default values for undefined flags. + + Args: + setter: Method(name, value) to call to __setattr__ an unknown flag. Must + raise NameError or ValueError for invalid name/value. + """ + self.__dict__['__set_unknown'] = setter + + def _set_unknown_flag(self, name: str, value: _T) -> _T: + """Returns value if setting flag |name| to |value| returned True. + + Args: + name: str, name of the flag to set. + value: Value to set. + + Returns: + Flag value on successful call. + + Raises: + UnrecognizedFlagError + IllegalFlagValueError + """ + setter = self.__dict__['__set_unknown'] + if setter: + try: + setter(name, value) + return value + except (TypeError, ValueError): # Flag value is not valid. + raise _exceptions.IllegalFlagValueError( + '"{1}" is not valid for --{0}'.format(name, value)) + except NameError: # Flag name is not valid. + pass + raise _exceptions.UnrecognizedFlagError(name, value) + + def append_flag_values(self, flag_values: 'FlagValues') -> None: + """Appends flags registered in another FlagValues instance. + + Args: + flag_values: FlagValues, the FlagValues instance from which to copy flags. + """ + for flag_name, flag in flag_values._flags().items(): # pylint: disable=protected-access + # Each flags with short_name appears here twice (once under its + # normal name, and again with its short name). To prevent + # problems (DuplicateFlagError) with double flag registration, we + # perform a check to make sure that the entry we're looking at is + # for its normal name. + if flag_name == flag.name: + try: + self[flag_name] = flag + except _exceptions.DuplicateFlagError: + raise _exceptions.DuplicateFlagError.from_flag( + flag_name, self, other_flag_values=flag_values) + + def remove_flag_values( + self, flag_values: 'Union[FlagValues, Iterable[Text]]' + ) -> None: + """Remove flags that were previously appended from another FlagValues. + + Args: + flag_values: FlagValues, the FlagValues instance containing flags to + remove. + """ + for flag_name in flag_values: + self.__delattr__(flag_name) + + def __setitem__(self, name: Text, flag: Flag) -> None: + """Registers a new flag variable.""" + fl = self._flags() + if not isinstance(flag, _flag.Flag): + raise _exceptions.IllegalFlagValueError( + f'Expect Flag instances, found type {type(flag)}. ' + "Maybe you didn't mean to use FlagValue.__setitem__?") + if not isinstance(name, str): + raise _exceptions.Error('Flag name must be a string') + if not name: + raise _exceptions.Error('Flag name cannot be empty') + if ' ' in name: + raise _exceptions.Error('Flag name cannot contain a space') + self._check_method_name_conflicts(name, flag) + if name in fl and not flag.allow_override and not fl[name].allow_override: + module, module_name = _helpers.get_calling_module_object_and_name() + if (self.find_module_defining_flag(name) == module_name and + id(module) != self.find_module_id_defining_flag(name)): + # If the flag has already been defined by a module with the same name, + # but a different ID, we can stop here because it indicates that the + # module is simply being imported a subsequent time. + return + raise _exceptions.DuplicateFlagError.from_flag(name, self) + # If a new flag overrides an old one, we need to cleanup the old flag's + # modules if it's not registered. + flags_to_cleanup = set() + short_name: str = flag.short_name # pytype: disable=annotation-type-mismatch + if short_name is not None: + if (short_name in fl and not flag.allow_override and + not fl[short_name].allow_override): + raise _exceptions.DuplicateFlagError.from_flag(short_name, self) + if short_name in fl and fl[short_name] != flag: + flags_to_cleanup.add(fl[short_name]) + fl[short_name] = flag + if (name not in fl # new flag + or fl[name].using_default_value or not flag.using_default_value): + if name in fl and fl[name] != flag: + flags_to_cleanup.add(fl[name]) + fl[name] = flag + for f in flags_to_cleanup: + self._cleanup_unregistered_flag_from_module_dicts(f) + + def __dir__(self) -> List[Text]: + """Returns list of names of all defined flags. + + Useful for TAB-completion in ipython. + + Returns: + [str], a list of names of all defined flags. + """ + return sorted(self.__dict__['__flags']) + + def __getitem__(self, name: Text) -> Flag: + """Returns the Flag object for the flag --name.""" + return self._flags()[name] + + def _hide_flag(self, name): + """Marks the flag --name as hidden.""" + self.__dict__['__hiddenflags'].add(name) + + def __getattr__(self, name: Text) -> Any: + """Retrieves the 'value' attribute of the flag --name.""" + fl = self._flags() + if name not in fl: + raise AttributeError(name) + if name in self.__dict__['__hiddenflags']: + raise AttributeError(name) + + if self.__dict__['__flags_parsed'] or fl[name].present: + return fl[name].value + else: + raise _exceptions.UnparsedFlagAccessError( + 'Trying to access flag --%s before flags were parsed.' % name) + + def __setattr__(self, name: Text, value: _T) -> _T: + """Sets the 'value' attribute of the flag --name.""" + self._set_attributes(**{name: value}) + return value + + def _set_attributes(self, **attributes: Any) -> None: + """Sets multiple flag values together, triggers validators afterwards.""" + fl = self._flags() + known_flag_vals = {} + known_flag_used_defaults = {} + try: + for name, value in attributes.items(): + if name in self.__dict__['__hiddenflags']: + raise AttributeError(name) + if name in fl: + orig = fl[name].value + fl[name].value = value + known_flag_vals[name] = orig + else: + self._set_unknown_flag(name, value) + for name in known_flag_vals: + self._assert_validators(fl[name].validators) + known_flag_used_defaults[name] = fl[name].using_default_value + fl[name].using_default_value = False + except: + for name, orig in known_flag_vals.items(): + fl[name].value = orig + for name, orig in known_flag_used_defaults.items(): + fl[name].using_default_value = orig + # NOTE: We do not attempt to undo unknown flag side effects because we + # cannot reliably undo the user-configured behavior. + raise + + def validate_all_flags(self) -> None: + """Verifies whether all flags pass validation. + + Raises: + AttributeError: Raised if validators work with a non-existing flag. + IllegalFlagValueError: Raised if validation fails for at least one + validator. + """ + all_validators = set() + for flag in self._flags().values(): + all_validators.update(flag.validators) + self._assert_validators(all_validators) + + def _assert_validators( + self, validators: Iterable[_validators_classes.Validator] + ) -> None: + """Asserts if all validators in the list are satisfied. + + It asserts validators in the order they were created. + + Args: + validators: Iterable(validators.Validator), validators to be verified. + + Raises: + AttributeError: Raised if validators work with a non-existing flag. + IllegalFlagValueError: Raised if validation fails for at least one + validator. + """ + messages = [] + bad_flags = set() + for validator in sorted( + validators, key=lambda validator: validator.insertion_index): + try: + if isinstance(validator, _validators_classes.SingleFlagValidator): + if validator.flag_name in bad_flags: + continue + elif isinstance(validator, _validators_classes.MultiFlagsValidator): + if bad_flags & set(validator.flag_names): + continue + validator.verify(self) + except _exceptions.ValidationError as e: + if isinstance(validator, _validators_classes.SingleFlagValidator): + bad_flags.add(validator.flag_name) + elif isinstance(validator, _validators_classes.MultiFlagsValidator): + bad_flags.update(set(validator.flag_names)) + message = validator.print_flags_with_values(self) + messages.append('%s: %s' % (message, str(e))) + if messages: + raise _exceptions.IllegalFlagValueError('\n'.join(messages)) + + def __delattr__(self, flag_name: Text) -> None: + """Deletes a previously-defined flag from a flag object. + + This method makes sure we can delete a flag by using + + del FLAGS. + + E.g., + + flags.DEFINE_integer('foo', 1, 'Integer flag.') + del flags.FLAGS.foo + + If a flag is also registered by its the other name (long name or short + name), the other name won't be deleted. + + Args: + flag_name: str, the name of the flag to be deleted. + + Raises: + AttributeError: Raised when there is no registered flag named flag_name. + """ + fl = self._flags() + if flag_name not in fl: + raise AttributeError(flag_name) + + flag_obj = fl[flag_name] + del fl[flag_name] + + self._cleanup_unregistered_flag_from_module_dicts(flag_obj) + + def set_default(self, name: Text, value: Any) -> None: + """Changes the default value of the named flag object. + + The flag's current value is also updated if the flag is currently using + the default value, i.e. not specified in the command line, and not set + by FLAGS.name = value. + + Args: + name: str, the name of the flag to modify. + value: The new default value. + + Raises: + UnrecognizedFlagError: Raised when there is no registered flag named name. + IllegalFlagValueError: Raised when value is not valid. + """ + fl = self._flags() + if name not in fl: + self._set_unknown_flag(name, value) + return + fl[name]._set_default(value) # pylint: disable=protected-access + self._assert_validators(fl[name].validators) + + def __contains__(self, name: Text) -> bool: + """Returns True if name is a value (flag) in the dict.""" + return name in self._flags() + + def __len__(self) -> int: + return len(self.__dict__['__flags']) + + def __iter__(self) -> Iterator[Text]: + return iter(self._flags()) + + def __call__( + self, argv: Sequence[Text], known_only: bool = False + ) -> List[Text]: + """Parses flags from argv; stores parsed flags into this FlagValues object. + + All unparsed arguments are returned. + + Args: + argv: a tuple/list of strings. + known_only: bool, if True, parse and remove known flags; return the rest + untouched. Unknown flags specified by --undefok are not returned. + + Returns: + The list of arguments not parsed as options, including argv[0]. + + Raises: + Error: Raised on any parsing error. + TypeError: Raised on passing wrong type of arguments. + ValueError: Raised on flag value parsing error. + """ + if isinstance(argv, (str, bytes)): + raise TypeError( + 'argv should be a tuple/list of strings, not bytes or string.') + if not argv: + raise ValueError( + 'argv cannot be an empty list, and must contain the program name as ' + 'the first element.') + + # This pre parses the argv list for --flagfile=<> options. + program_name = argv[0] + args = self.read_flags_from_files(argv[1:], force_gnu=False) + + # Parse the arguments. + unknown_flags, unparsed_args = self._parse_args(args, known_only) + + # Handle unknown flags by raising UnrecognizedFlagError. + # Note some users depend on us raising this particular error. + for name, value in unknown_flags: + suggestions = _helpers.get_flag_suggestions(name, list(self)) + raise _exceptions.UnrecognizedFlagError( + name, value, suggestions=suggestions) + + self.mark_as_parsed() + self.validate_all_flags() + return [program_name] + unparsed_args + + def __getstate__(self) -> Any: + raise TypeError("can't pickle FlagValues") + + def __copy__(self) -> Any: + raise TypeError('FlagValues does not support shallow copies. ' + 'Use absl.testing.flagsaver or copy.deepcopy instead.') + + def __deepcopy__(self, memo) -> Any: + result = object.__new__(type(self)) + result.__dict__.update(copy.deepcopy(self.__dict__, memo)) + return result + + def _set_is_retired_flag_func(self, is_retired_flag_func): + """Sets a function for checking retired flags. + + Do not use it. This is a private absl API used to check retired flags + registered by the absl C++ flags library. + + Args: + is_retired_flag_func: Callable(str) -> (bool, bool), a function takes flag + name as parameter, returns a tuple (is_retired, type_is_bool). + """ + self.__dict__['__is_retired_flag_func'] = is_retired_flag_func + + def _parse_args( + self, args: List[str], known_only: bool + ) -> Tuple[List[Tuple[Optional[str], Any]], List[str]]: + """Helper function to do the main argument parsing. + + This function goes through args and does the bulk of the flag parsing. + It will find the corresponding flag in our flag dictionary, and call its + .parse() method on the flag value. + + Args: + args: [str], a list of strings with the arguments to parse. + known_only: bool, if True, parse and remove known flags; return the rest + untouched. Unknown flags specified by --undefok are not returned. + + Returns: + A tuple with the following: + unknown_flags: List of (flag name, arg) for flags we don't know about. + unparsed_args: List of arguments we did not parse. + + Raises: + Error: Raised on any parsing error. + ValueError: Raised on flag value parsing error. + """ + unparsed_names_and_args = [] # A list of (flag name or None, arg). + undefok = set() + retired_flag_func = self.__dict__['__is_retired_flag_func'] + + flag_dict = self._flags() + args = iter(args) + for arg in args: + value = None + + def get_value(): + # pylint: disable=cell-var-from-loop + try: + return next(args) if value is None else value + except StopIteration: + raise _exceptions.Error('Missing value for flag ' + arg) # pylint: disable=undefined-loop-variable + + if not arg.startswith('-'): + # A non-argument: default is break, GNU is skip. + unparsed_names_and_args.append((None, arg)) + if self.is_gnu_getopt(): + continue + else: + break + + if arg == '--': + if known_only: + unparsed_names_and_args.append((None, arg)) + break + + # At this point, arg must start with '-'. + if arg.startswith('--'): + arg_without_dashes = arg[2:] + else: + arg_without_dashes = arg[1:] + + if '=' in arg_without_dashes: + name, value = arg_without_dashes.split('=', 1) + else: + name, value = arg_without_dashes, None + + if not name: + # The argument is all dashes (including one dash). + unparsed_names_and_args.append((None, arg)) + if self.is_gnu_getopt(): + continue + else: + break + + # --undefok is a special case. + if name == 'undefok': + value = get_value() + undefok.update(v.strip() for v in value.split(',')) + undefok.update('no' + v.strip() for v in value.split(',')) + continue + + flag = flag_dict.get(name) + if flag is not None: + if flag.boolean and value is None: + value = 'true' + else: + value = get_value() + elif name.startswith('no') and len(name) > 2: + # Boolean flags can take the form of --noflag, with no value. + noflag = flag_dict.get(name[2:]) + if noflag is not None and noflag.boolean: + if value is not None: + raise ValueError(arg + ' does not take an argument') + flag = noflag + value = 'false' + + if retired_flag_func and flag is None: + is_retired, is_bool = retired_flag_func(name) + + # If we didn't recognize that flag, but it starts with + # "no" then maybe it was a boolean flag specified in the + # --nofoo form. + if not is_retired and name.startswith('no'): + is_retired, is_bool = retired_flag_func(name[2:]) + is_retired = is_retired and is_bool + + if is_retired: + if not is_bool and value is None: + # This happens when a non-bool retired flag is specified + # in format of "--flag value". + get_value() + logging.error( + 'Flag "%s" is retired and should no longer be specified. See ' + 'https://abseil.io/tips/90.', + name, + ) + continue + + if flag is not None: + # LINT.IfChange + flag.parse(value) + flag.using_default_value = False + # LINT.ThenChange(../testing/flagsaver.py:flag_override_parsing) + else: + unparsed_names_and_args.append((name, arg)) + + unknown_flags = [] + unparsed_args = [] + for name, arg in unparsed_names_and_args: + if name is None: + # Positional arguments. + unparsed_args.append(arg) + elif name in undefok: + # Remove undefok flags. + continue + else: + # This is an unknown flag. + if known_only: + unparsed_args.append(arg) + else: + unknown_flags.append((name, arg)) + + unparsed_args.extend(list(args)) + return unknown_flags, unparsed_args + + def is_parsed(self) -> bool: + """Returns whether flags were parsed.""" + return self.__dict__['__flags_parsed'] + + def mark_as_parsed(self) -> None: + """Explicitly marks flags as parsed. + + Use this when the caller knows that this FlagValues has been parsed as if + a ``__call__()`` invocation has happened. This is only a public method for + use by things like appcommands which do additional command like parsing. + """ + self.__dict__['__flags_parsed'] = True + + def unparse_flags(self) -> None: + """Unparses all flags to the point before any FLAGS(argv) was called.""" + for f in self._flags().values(): + f.unparse() + # We log this message before marking flags as unparsed to avoid a + # problem when the logging library causes flags access. + logging.info('unparse_flags() called; flags access will now raise errors.') + self.__dict__['__flags_parsed'] = False + self.__dict__['__unparse_flags_called'] = True + + def flag_values_dict(self) -> Dict[Text, Any]: + """Returns a dictionary that maps flag names to flag values.""" + return {name: flag.value for name, flag in self._flags().items()} + + def __str__(self): + """Returns a help string for all known flags.""" + return self.get_help() + + def get_help( + self, prefix: Text = '', include_special_flags: bool = True + ) -> Text: + """Returns a help string for all known flags. + + Args: + prefix: str, per-line output prefix. + include_special_flags: bool, whether to include description of + SPECIAL_FLAGS, i.e. --flagfile and --undefok. + + Returns: + str, formatted help message. + """ + flags_by_module = self.flags_by_module_dict() + if flags_by_module: + modules = sorted(flags_by_module) + # Print the help for the main module first, if possible. + main_module = sys.argv[0] + if main_module in modules: + modules.remove(main_module) + modules = [main_module] + modules + return self._get_help_for_modules(modules, prefix, include_special_flags) + else: + output_lines = [] + # Just print one long list of flags. + values = self._flags().values() + if include_special_flags: + values = itertools.chain( + values, _helpers.SPECIAL_FLAGS._flags().values() # pylint: disable=protected-access # pytype: disable=attribute-error + ) + self._render_flag_list(values, output_lines, prefix) + return '\n'.join(output_lines) + + def _get_help_for_modules(self, modules, prefix, include_special_flags): + """Returns the help string for a list of modules. + + Private to absl.flags package. + + Args: + modules: List[str], a list of modules to get the help string for. + prefix: str, a string that is prepended to each generated help line. + include_special_flags: bool, whether to include description of + SPECIAL_FLAGS, i.e. --flagfile and --undefok. + """ + output_lines = [] + for module in modules: + self._render_our_module_flags(module, output_lines, prefix) + if include_special_flags: + self._render_module_flags( + 'absl.flags', + _helpers.SPECIAL_FLAGS._flags().values(), # pylint: disable=protected-access # pytype: disable=attribute-error + output_lines, + prefix, + ) + return '\n'.join(output_lines) + + def _render_module_flags(self, module, flags, output_lines, prefix=''): + """Returns a help string for a given module.""" + if not isinstance(module, str): + module = module.__name__ + output_lines.append('\n%s%s:' % (prefix, module)) + self._render_flag_list(flags, output_lines, prefix + ' ') + + def _render_our_module_flags(self, module, output_lines, prefix=''): + """Returns a help string for a given module.""" + flags = self.get_flags_for_module(module) + if flags: + self._render_module_flags(module, flags, output_lines, prefix) + + def _render_our_module_key_flags(self, module, output_lines, prefix=''): + """Returns a help string for the key flags of a given module. + + Args: + module: module|str, the module to render key flags for. + output_lines: [str], a list of strings. The generated help message lines + will be appended to this list. + prefix: str, a string that is prepended to each generated help line. + """ + key_flags = self.get_key_flags_for_module(module) + if key_flags: + self._render_module_flags(module, key_flags, output_lines, prefix) + + def module_help(self, module: Any) -> Text: + """Describes the key flags of a module. + + Args: + module: module|str, the module to describe the key flags for. + + Returns: + str, describing the key flags of a module. + """ + helplist = [] + self._render_our_module_key_flags(module, helplist) + return '\n'.join(helplist) + + def main_module_help(self) -> Text: + """Describes the key flags of the main module. + + Returns: + str, describing the key flags of the main module. + """ + return self.module_help(sys.argv[0]) + + def _render_flag_list(self, flaglist, output_lines, prefix=' '): + fl = self._flags() + special_fl = _helpers.SPECIAL_FLAGS._flags() # pylint: disable=protected-access # pytype: disable=attribute-error + flaglist = [(flag.name, flag) for flag in flaglist] + flaglist.sort() + flagset = {} + for (name, flag) in flaglist: + # It's possible this flag got deleted or overridden since being + # registered in the per-module flaglist. Check now against the + # canonical source of current flag information, the _flags. + if fl.get(name, None) != flag and special_fl.get(name, None) != flag: + # a different flag is using this name now + continue + # only print help once + if flag in flagset: + continue + flagset[flag] = 1 + flaghelp = '' + if flag.short_name: + flaghelp += '-%s,' % flag.short_name + if flag.boolean: + flaghelp += '--[no]%s:' % flag.name + else: + flaghelp += '--%s:' % flag.name + flaghelp += ' ' + if flag.help: + flaghelp += flag.help + flaghelp = _helpers.text_wrap( + flaghelp, indent=prefix + ' ', firstline_indent=prefix) + if flag.default_as_str: + flaghelp += '\n' + flaghelp += _helpers.text_wrap( + '(default: %s)' % flag.default_as_str, indent=prefix + ' ') + if flag.parser.syntactic_help: + flaghelp += '\n' + flaghelp += _helpers.text_wrap( + '(%s)' % flag.parser.syntactic_help, indent=prefix + ' ') + output_lines.append(flaghelp) + + def get_flag_value(self, name: Text, default: Any) -> Any: # pylint: disable=invalid-name + """Returns the value of a flag (if not None) or a default value. + + Args: + name: str, the name of a flag. + default: Default value to use if the flag value is None. + + Returns: + Requested flag value or default. + """ + + value = self.__getattr__(name) + if value is not None: # Can't do if not value, b/c value might be '0' or "" + return value + else: + return default + + def _is_flag_file_directive(self, flag_string): + """Checks whether flag_string contain a --flagfile= directive.""" + if isinstance(flag_string, str): + if flag_string.startswith('--flagfile='): + return 1 + elif flag_string == '--flagfile': + return 1 + elif flag_string.startswith('-flagfile='): + return 1 + elif flag_string == '-flagfile': + return 1 + else: + return 0 + return 0 + + def _extract_filename(self, flagfile_str): + """Returns filename from a flagfile_str of form -[-]flagfile=filename. + + The cases of --flagfile foo and -flagfile foo shouldn't be hitting + this function, as they are dealt with in the level above this + function. + + Args: + flagfile_str: str, the flagfile string. + + Returns: + str, the filename from a flagfile_str of form -[-]flagfile=filename. + + Raises: + Error: Raised when illegal --flagfile is provided. + """ + if flagfile_str.startswith('--flagfile='): + return os.path.expanduser((flagfile_str[(len('--flagfile=')):]).strip()) + elif flagfile_str.startswith('-flagfile='): + return os.path.expanduser((flagfile_str[(len('-flagfile=')):]).strip()) + else: + raise _exceptions.Error('Hit illegal --flagfile type: %s' % flagfile_str) + + def _get_flag_file_lines(self, filename, parsed_file_stack=None): + """Returns the useful (!=comments, etc) lines from a file with flags. + + Args: + filename: str, the name of the flag file. + parsed_file_stack: [str], a list of the names of the files that we have + recursively encountered at the current depth. MUTATED BY THIS FUNCTION + (but the original value is preserved upon successfully returning from + function call). + + Returns: + List of strings. See the note below. + + NOTE(springer): This function checks for a nested --flagfile= + tag and handles the lower file recursively. It returns a list of + all the lines that _could_ contain command flags. This is + EVERYTHING except whitespace lines and comments (lines starting + with '#' or '//'). + """ + # For consistency with the cpp version, ignore empty values. + if not filename: + return [] + if parsed_file_stack is None: + parsed_file_stack = [] + # We do a little safety check for reparsing a file we've already encountered + # at a previous depth. + if filename in parsed_file_stack: + sys.stderr.write('Warning: Hit circular flagfile dependency. Ignoring' + ' flagfile: %s\n' % (filename,)) + return [] + else: + parsed_file_stack.append(filename) + + line_list = [] # All line from flagfile. + flag_line_list = [] # Subset of lines w/o comments, blanks, flagfile= tags. + try: + file_obj = open(filename, 'r') + except IOError as e_msg: + raise _exceptions.CantOpenFlagFileError( + 'ERROR:: Unable to open flagfile: %s' % e_msg) + + with file_obj: + line_list = file_obj.readlines() + + # This is where we check each line in the file we just read. + for line in line_list: + if line.isspace(): + pass + # Checks for comment (a line that starts with '#'). + elif line.startswith('#') or line.startswith('//'): + pass + # Checks for a nested "--flagfile=" flag in the current file. + # If we find one, recursively parse down into that file. + elif self._is_flag_file_directive(line): + sub_filename = self._extract_filename(line) + included_flags = self._get_flag_file_lines( + sub_filename, parsed_file_stack=parsed_file_stack) + flag_line_list.extend(included_flags) + else: + # Any line that's not a comment or a nested flagfile should get + # copied into 2nd position. This leaves earlier arguments + # further back in the list, thus giving them higher priority. + flag_line_list.append(line.strip()) + + parsed_file_stack.pop() + return flag_line_list + + def read_flags_from_files( + self, argv: Sequence[Text], force_gnu: bool = True + ) -> List[Text]: + """Processes command line args, but also allow args to be read from file. + + Args: + argv: [str], a list of strings, usually sys.argv[1:], which may contain + one or more flagfile directives of the form --flagfile="./filename". + Note that the name of the program (sys.argv[0]) should be omitted. + force_gnu: bool, if False, --flagfile parsing obeys the + FLAGS.is_gnu_getopt() value. If True, ignore the value and always follow + gnu_getopt semantics. + + Returns: + A new list which has the original list combined with what we read + from any flagfile(s). + + Raises: + IllegalFlagValueError: Raised when --flagfile is provided with no + argument. + + This function is called by FLAGS(argv). + It scans the input list for a flag that looks like: + --flagfile=. Then it opens , reads all valid key + and value pairs and inserts them into the input list in exactly the + place where the --flagfile arg is found. + + Note that your application's flags are still defined the usual way + using absl.flags DEFINE_flag() type functions. + + Notes (assuming we're getting a commandline of some sort as our input): + + * For duplicate flags, the last one we hit should "win". + * Since flags that appear later win, a flagfile's settings can be "weak" + if the --flagfile comes at the beginning of the argument sequence, + and it can be "strong" if the --flagfile comes at the end. + * A further "--flagfile=" CAN be nested in a flagfile. + It will be expanded in exactly the spot where it is found. + * In a flagfile, a line beginning with # or // is a comment. + * Entirely blank lines _should_ be ignored. + """ + rest_of_args = argv + new_argv = [] + while rest_of_args: + current_arg = rest_of_args[0] + rest_of_args = rest_of_args[1:] + if self._is_flag_file_directive(current_arg): + # This handles the case of -(-)flagfile foo. In this case the + # next arg really is part of this one. + if current_arg == '--flagfile' or current_arg == '-flagfile': + if not rest_of_args: + raise _exceptions.IllegalFlagValueError( + '--flagfile with no argument') + flag_filename = os.path.expanduser(rest_of_args[0]) + rest_of_args = rest_of_args[1:] + else: + # This handles the case of (-)-flagfile=foo. + flag_filename = self._extract_filename(current_arg) + new_argv.extend(self._get_flag_file_lines(flag_filename)) + else: + new_argv.append(current_arg) + # Stop parsing after '--', like getopt and gnu_getopt. + if current_arg == '--': + break + # Stop parsing after a non-flag, like getopt. + if not current_arg.startswith('-'): + if not force_gnu and not self.__dict__['__use_gnu_getopt']: + break + else: + if ('=' not in current_arg and rest_of_args and + not rest_of_args[0].startswith('-')): + # If this is an occurrence of a legitimate --x y, skip the value + # so that it won't be mistaken for a standalone arg. + fl = self._flags() + name = current_arg.lstrip('-') + if name in fl and not fl[name].boolean: + current_arg = rest_of_args[0] + rest_of_args = rest_of_args[1:] + new_argv.append(current_arg) + + if rest_of_args: + new_argv.extend(rest_of_args) + + return new_argv + + def flags_into_string(self) -> Text: + """Returns a string with the flags assignments from this FlagValues object. + + This function ignores flags whose value is None. Each flag + assignment is separated by a newline. + + NOTE: MUST mirror the behavior of the C++ CommandlineFlagsIntoString + from https://github.com/gflags/gflags. + + Returns: + str, the string with the flags assignments from this FlagValues object. + The flags are ordered by (module_name, flag_name). + """ + module_flags = sorted(self.flags_by_module_dict().items()) + s = '' + for unused_module_name, flags in module_flags: + flags = sorted(flags, key=lambda f: f.name) + for flag in flags: + if flag.value is not None: + s += flag.serialize() + '\n' + return s + + def append_flags_into_file(self, filename: Text) -> None: + """Appends all flags assignments from this FlagInfo object to a file. + + Output will be in the format of a flagfile. + + NOTE: MUST mirror the behavior of the C++ AppendFlagsIntoFile + from https://github.com/gflags/gflags. + + Args: + filename: str, name of the file. + """ + with open(filename, 'a') as out_file: + out_file.write(self.flags_into_string()) + + def write_help_in_xml_format(self, outfile: Optional[TextIO] = None) -> None: + """Outputs flag documentation in XML format. + + NOTE: We use element names that are consistent with those used by + the C++ command-line flag library, from + https://github.com/gflags/gflags. + We also use a few new elements (e.g., ), but we do not + interfere / overlap with existing XML elements used by the C++ + library. Please maintain this consistency. + + Args: + outfile: File object we write to. Default None means sys.stdout. + """ + doc = minidom.Document() + all_flag = doc.createElement('AllFlags') + doc.appendChild(all_flag) + + all_flag.appendChild( + _helpers.create_xml_dom_element(doc, 'program', + os.path.basename(sys.argv[0]))) + + usage_doc = sys.modules['__main__'].__doc__ + if not usage_doc: + usage_doc = '\nUSAGE: %s [flags]\n' % sys.argv[0] + else: + usage_doc = usage_doc.replace('%s', sys.argv[0]) + all_flag.appendChild( + _helpers.create_xml_dom_element(doc, 'usage', usage_doc)) + + # Get list of key flags for the main module. + key_flags = self.get_key_flags_for_module(sys.argv[0]) + + # Sort flags by declaring module name and next by flag name. + flags_by_module = self.flags_by_module_dict() + all_module_names = list(flags_by_module.keys()) + all_module_names.sort() + for module_name in all_module_names: + flag_list = [(f.name, f) for f in flags_by_module[module_name]] + flag_list.sort() + for unused_flag_name, flag in flag_list: + is_key = flag in key_flags + all_flag.appendChild( + flag._create_xml_dom_element( # pylint: disable=protected-access + doc, + module_name, + is_key=is_key)) + + outfile = outfile or sys.stdout + outfile.write( + doc.toprettyxml(indent=' ', encoding='utf-8').decode('utf-8')) + outfile.flush() + + def _check_method_name_conflicts(self, name: str, flag: Flag): + if flag.allow_using_method_names: + return + short_name = flag.short_name + flag_names = {name} if short_name is None else {name, short_name} + for flag_name in flag_names: + if flag_name in self.__dict__['__banned_flag_names']: + raise _exceptions.FlagNameConflictsWithMethodError( + 'Cannot define a flag named "{name}". It conflicts with a method ' + 'on class "{class_name}". To allow defining it, use ' + 'allow_using_method_names and access the flag value with ' + "FLAGS['{name}'].value. FLAGS.{name} returns the method, " + 'not the flag value.'.format( + name=flag_name, class_name=type(self).__name__)) + + +FLAGS = FlagValues() + + +class FlagHolder(Generic[_T]): + """Holds a defined flag. + + This facilitates a cleaner api around global state. Instead of:: + + flags.DEFINE_integer('foo', ...) + flags.DEFINE_integer('bar', ...) + + def method(): + # prints parsed value of 'bar' flag + print(flags.FLAGS.foo) + # runtime error due to typo or possibly bad coding style. + print(flags.FLAGS.baz) + + it encourages code like:: + + _FOO_FLAG = flags.DEFINE_integer('foo', ...) + _BAR_FLAG = flags.DEFINE_integer('bar', ...) + + def method(): + print(_FOO_FLAG.value) + print(_BAR_FLAG.value) + + since the name of the flag appears only once in the source code. + """ + + value: _T + + def __init__( + self, + flag_values: FlagValues, + flag: Flag[_T], + ensure_non_none_value: bool = False, + ): + """Constructs a FlagHolder instance providing typesafe access to flag. + + Args: + flag_values: The container the flag is registered to. + flag: The flag object for this flag. + ensure_non_none_value: Is the value of the flag allowed to be None. + """ + self._flagvalues = flag_values + # We take the entire flag object, but only keep the name. Why? + # - We want FlagHolder[T] to be generic container + # - flag_values contains all flags, so has no reference to T. + # - typecheckers don't like to see a generic class where none of the ctor + # arguments refer to the generic type. + self._name = flag.name + # We intentionally do NOT check if the default value is None. + # This allows future use of this for "required flags with None default" + self._ensure_non_none_value = ensure_non_none_value + + def __eq__(self, other): + raise TypeError( + "unsupported operand type(s) for ==: '{0}' and '{1}' " + "(did you mean to use '{0}.value' instead?)".format( + type(self).__name__, type(other).__name__)) + + def __bool__(self): + raise TypeError( + "bool() not supported for instances of type '{0}' " + "(did you mean to use '{0}.value' instead?)".format( + type(self).__name__)) + + __nonzero__ = __bool__ + + @property + def name(self) -> Text: + return self._name + + @property + def value(self) -> _T: + """Returns the value of the flag. + + If ``_ensure_non_none_value`` is ``True``, then return value is not + ``None``. + + Raises: + UnparsedFlagAccessError: if flag parsing has not finished. + IllegalFlagValueError: if value is None unexpectedly. + """ + val = getattr(self._flagvalues, self._name) + if self._ensure_non_none_value and val is None: + raise _exceptions.IllegalFlagValueError( + 'Unexpected None value for flag %s' % self._name) + return val + + @property + def default(self) -> _T: + """Returns the default value of the flag.""" + return self._flagvalues[self._name].default + + @property + def present(self) -> bool: + """Returns True if the flag was parsed from command-line flags.""" + return bool(self._flagvalues[self._name].present) + + def serialize(self) -> Text: + """Returns a serialized representation of the flag.""" + return self._flagvalues[self._name].serialize() + + +def resolve_flag_ref( + flag_ref: Union[str, FlagHolder], flag_values: FlagValues +) -> Tuple[str, FlagValues]: + """Helper to validate and resolve a flag reference argument.""" + if isinstance(flag_ref, FlagHolder): + new_flag_values = flag_ref._flagvalues # pylint: disable=protected-access + if flag_values != FLAGS and flag_values != new_flag_values: + raise ValueError( + 'flag_values must not be customized when operating on a FlagHolder') + return flag_ref.name, new_flag_values + return flag_ref, flag_values + + +def resolve_flag_refs( + flag_refs: Sequence[Union[str, FlagHolder]], flag_values: FlagValues +) -> Tuple[List[str], FlagValues]: + """Helper to validate and resolve flag reference list arguments.""" + fv = None + names = [] + for ref in flag_refs: + if isinstance(ref, FlagHolder): + newfv = ref._flagvalues # pylint: disable=protected-access + name = ref.name + else: + newfv = flag_values + name = ref + if fv and fv != newfv: + raise ValueError( + 'multiple FlagValues instances used in invocation. ' + 'FlagHolders must be registered to the same FlagValues instance as ' + 'do flag names, if provided.') + fv = newfv + names.append(name) + return names, fv diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_helpers.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..1ad559c0487b41d24f406c46e0da4d3240a1947f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_helpers.py @@ -0,0 +1,421 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Internal helper functions for Abseil Python flags library.""" + +import os +import re +import struct +import sys +import textwrap +import types +from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Sequence, Set +from xml.dom import minidom +# pylint: disable=g-import-not-at-top +try: + import fcntl +except ImportError: + fcntl = None +try: + # Importing termios will fail on non-unix platforms. + import termios +except ImportError: + termios = None +# pylint: enable=g-import-not-at-top + + +_DEFAULT_HELP_WIDTH = 80 # Default width of help output. +# Minimal "sane" width of help output. We assume that any value below 40 is +# unreasonable. +_MIN_HELP_WIDTH = 40 + +# Define the allowed error rate in an input string to get suggestions. +# +# We lean towards a high threshold because we tend to be matching a phrase, +# and the simple algorithm used here is geared towards correcting word +# spellings. +# +# For manual testing, consider " --list" which produced a large number +# of spurious suggestions when we used "least_errors > 0.5" instead of +# "least_erros >= 0.5". +_SUGGESTION_ERROR_RATE_THRESHOLD = 0.50 + +# Characters that cannot appear or are highly discouraged in an XML 1.0 +# document. (See http://www.w3.org/TR/REC-xml/#charsets or +# https://en.wikipedia.org/wiki/Valid_characters_in_XML#XML_1.0) +_ILLEGAL_XML_CHARS_REGEX = re.compile( + u'[\x00-\x08\x0b\x0c\x0e-\x1f\x7f-\x84\x86-\x9f\ud800-\udfff\ufffe\uffff]') + +# This is a set of module ids for the modules that disclaim key flags. +# This module is explicitly added to this set so that we never consider it to +# define key flag. +disclaim_module_ids: Set[int] = set([id(sys.modules[__name__])]) + + +# Define special flags here so that help may be generated for them. +# NOTE: Please do NOT use SPECIAL_FLAGS from outside flags module. +# Initialized inside flagvalues.py. +# NOTE: This cannot be annotated as its actual FlagValues type since this would +# create a circular dependency. +SPECIAL_FLAGS: Any = None + + +# This points to the flags module, initialized in flags/__init__.py. +# This should only be used in adopt_module_key_flags to take SPECIAL_FLAGS into +# account. +FLAGS_MODULE: types.ModuleType = None + + +class _ModuleObjectAndName(NamedTuple): + """Module object and name. + + Fields: + - module: object, module object. + - module_name: str, module name. + """ + module: types.ModuleType + module_name: str + + +def get_module_object_and_name( + globals_dict: Dict[str, Any] +) -> _ModuleObjectAndName: + """Returns the module that defines a global environment, and its name. + + Args: + globals_dict: A dictionary that should correspond to an environment + providing the values of the globals. + + Returns: + _ModuleObjectAndName - pair of module object & module name. + Returns (None, None) if the module could not be identified. + """ + name = globals_dict.get('__name__', None) + module = sys.modules.get(name, None) + # Pick a more informative name for the main module. + return _ModuleObjectAndName(module, + (sys.argv[0] if name == '__main__' else name)) + + +def get_calling_module_object_and_name() -> _ModuleObjectAndName: + """Returns the module that's calling into this module. + + We generally use this function to get the name of the module calling a + DEFINE_foo... function. + + Returns: + The module object that called into this one. + + Raises: + AssertionError: Raised when no calling module could be identified. + """ + for depth in range(1, sys.getrecursionlimit()): + # sys._getframe is the right thing to use here, as it's the best + # way to walk up the call stack. + globals_for_frame = sys._getframe(depth).f_globals # pylint: disable=protected-access + module, module_name = get_module_object_and_name(globals_for_frame) + if id(module) not in disclaim_module_ids and module_name is not None: + return _ModuleObjectAndName(module, module_name) + raise AssertionError('No module was found') + + +def get_calling_module() -> str: + """Returns the name of the module that's calling into this module.""" + return get_calling_module_object_and_name().module_name + + +def create_xml_dom_element( + doc: minidom.Document, name: str, value: Any +) -> minidom.Element: + """Returns an XML DOM element with name and text value. + + Args: + doc: minidom.Document, the DOM document it should create nodes from. + name: str, the tag of XML element. + value: object, whose string representation will be used + as the value of the XML element. Illegal or highly discouraged xml 1.0 + characters are stripped. + + Returns: + An instance of minidom.Element. + """ + s = str(value) + if isinstance(value, bool): + # Display boolean values as the C++ flag library does: no caps. + s = s.lower() + # Remove illegal xml characters. + s = _ILLEGAL_XML_CHARS_REGEX.sub(u'', s) + + e = doc.createElement(name) + e.appendChild(doc.createTextNode(s)) + return e + + +def get_help_width() -> int: + """Returns the integer width of help lines that is used in TextWrap.""" + if not sys.stdout.isatty() or termios is None or fcntl is None: + return _DEFAULT_HELP_WIDTH + try: + data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, b'1234') + columns = struct.unpack('hh', data)[1] + # Emacs mode returns 0. + # Here we assume that any value below 40 is unreasonable. + if columns >= _MIN_HELP_WIDTH: + return columns + # Returning an int as default is fine, int(int) just return the int. + return int(os.getenv('COLUMNS', _DEFAULT_HELP_WIDTH)) + + except (TypeError, IOError, struct.error): + return _DEFAULT_HELP_WIDTH + + +def get_flag_suggestions( + attempt: Optional[str], longopt_list: Sequence[str] +) -> List[str]: + """Returns helpful similar matches for an invalid flag.""" + # Don't suggest on very short strings, or if no longopts are specified. + if len(attempt) <= 2 or not longopt_list: + return [] + + option_names = [v.split('=')[0] for v in longopt_list] + + # Find close approximations in flag prefixes. + # This also handles the case where the flag is spelled right but ambiguous. + distances = [(_damerau_levenshtein(attempt, option[0:len(attempt)]), option) + for option in option_names] + # t[0] is distance, and sorting by t[1] allows us to have stable output. + distances.sort() + + least_errors, _ = distances[0] + # Don't suggest excessively bad matches. + if least_errors >= _SUGGESTION_ERROR_RATE_THRESHOLD * len(attempt): + return [] + + suggestions = [] + for errors, name in distances: + if errors == least_errors: + suggestions.append(name) + else: + break + return suggestions + + +def _damerau_levenshtein(a, b): + """Returns Damerau-Levenshtein edit distance from a to b.""" + memo = {} + + def distance(x, y): + """Recursively defined string distance with memoization.""" + if (x, y) in memo: + return memo[x, y] + if not x: + d = len(y) + elif not y: + d = len(x) + else: + d = min( + distance(x[1:], y) + 1, # correct an insertion error + distance(x, y[1:]) + 1, # correct a deletion error + distance(x[1:], y[1:]) + (x[0] != y[0])) # correct a wrong character + if len(x) >= 2 and len(y) >= 2 and x[0] == y[1] and x[1] == y[0]: + # Correct a transposition. + t = distance(x[2:], y[2:]) + 1 + if d > t: + d = t + + memo[x, y] = d + return d + return distance(a, b) + + +def text_wrap( + text: str, + length: Optional[int] = None, + indent: str = '', + firstline_indent: Optional[str] = None, +) -> str: + """Wraps a given text to a maximum line length and returns it. + + It turns lines that only contain whitespace into empty lines, keeps new lines, + and expands tabs using 4 spaces. + + Args: + text: str, text to wrap. + length: int, maximum length of a line, includes indentation. + If this is None then use get_help_width() + indent: str, indent for all but first line. + firstline_indent: str, indent for first line; if None, fall back to indent. + + Returns: + str, the wrapped text. + + Raises: + ValueError: Raised if indent or firstline_indent not shorter than length. + """ + # Get defaults where callee used None + if length is None: + length = get_help_width() + if indent is None: + indent = '' + if firstline_indent is None: + firstline_indent = indent + + if len(indent) >= length: + raise ValueError('Length of indent exceeds length') + if len(firstline_indent) >= length: + raise ValueError('Length of first line indent exceeds length') + + text = text.expandtabs(4) + + result = [] + # Create one wrapper for the first paragraph and one for subsequent + # paragraphs that does not have the initial wrapping. + wrapper = textwrap.TextWrapper( + width=length, initial_indent=firstline_indent, subsequent_indent=indent) + subsequent_wrapper = textwrap.TextWrapper( + width=length, initial_indent=indent, subsequent_indent=indent) + + # textwrap does not have any special treatment for newlines. From the docs: + # "...newlines may appear in the middle of a line and cause strange output. + # For this reason, text should be split into paragraphs (using + # str.splitlines() or similar) which are wrapped separately." + for paragraph in (p.strip() for p in text.splitlines()): + if paragraph: + result.extend(wrapper.wrap(paragraph)) + else: + result.append('') # Keep empty lines. + # Replace initial wrapper with wrapper for subsequent paragraphs. + wrapper = subsequent_wrapper + + return '\n'.join(result) + + +def flag_dict_to_args( + flag_map: Dict[str, Any], multi_flags: Optional[Set[str]] = None +) -> Iterable[str]: + """Convert a dict of values into process call parameters. + + This method is used to convert a dictionary into a sequence of parameters + for a binary that parses arguments using this module. + + Args: + flag_map: dict, a mapping where the keys are flag names (strings). + values are treated according to their type: + + * If value is ``None``, then only the name is emitted. + * If value is ``True``, then only the name is emitted. + * If value is ``False``, then only the name prepended with 'no' is + emitted. + * If value is a string then ``--name=value`` is emitted. + * If value is a collection, this will emit + ``--name=value1,value2,value3``, unless the flag name is in + ``multi_flags``, in which case this will emit + ``--name=value1 --name=value2 --name=value3``. + * Everything else is converted to string an passed as such. + + multi_flags: set, names (strings) of flags that should be treated as + multi-flags. + Yields: + sequence of string suitable for a subprocess execution. + """ + for key, value in flag_map.items(): + if value is None: + yield '--%s' % key + elif isinstance(value, bool): + if value: + yield '--%s' % key + else: + yield '--no%s' % key + elif isinstance(value, (bytes, type(u''))): + # We don't want strings to be handled like python collections. + yield '--%s=%s' % (key, value) + else: + # Now we attempt to deal with collections. + try: + if multi_flags and key in multi_flags: + for item in value: + yield '--%s=%s' % (key, str(item)) + else: + yield '--%s=%s' % (key, ','.join(str(item) for item in value)) + except TypeError: + # Default case. + yield '--%s=%s' % (key, value) + + +def trim_docstring(docstring: str) -> str: + """Removes indentation from triple-quoted strings. + + This is the function specified in PEP 257 to handle docstrings: + https://www.python.org/dev/peps/pep-0257/. + + Args: + docstring: str, a python docstring. + + Returns: + str, docstring with indentation removed. + """ + if not docstring: + return '' + + # If you've got a line longer than this you have other problems... + max_indent = 1 << 29 + + # Convert tabs to spaces (following the normal Python rules) + # and split into a list of lines: + lines = docstring.expandtabs().splitlines() + + # Determine minimum indentation (first line doesn't count): + indent = max_indent + for line in lines[1:]: + stripped = line.lstrip() + if stripped: + indent = min(indent, len(line) - len(stripped)) + # Remove indentation (first line is special): + trimmed = [lines[0].strip()] + if indent < max_indent: + for line in lines[1:]: + trimmed.append(line[indent:].rstrip()) + # Strip off trailing and leading blank lines: + while trimmed and not trimmed[-1]: + trimmed.pop() + while trimmed and not trimmed[0]: + trimmed.pop(0) + # Return a single string: + return '\n'.join(trimmed) + + +def doc_to_help(doc: str) -> str: + """Takes a __doc__ string and reformats it as help.""" + + # Get rid of starting and ending white space. Using lstrip() or even + # strip() could drop more than maximum of first line and right space + # of last line. + doc = doc.strip() + + # Get rid of all empty lines. + whitespace_only_line = re.compile('^[ \t]+$', re.M) + doc = whitespace_only_line.sub('', doc) + + # Cut out common space at line beginnings. + doc = trim_docstring(doc) + + # Just like this module's comment, comments tend to be aligned somehow. + # In other words they all start with the same amount of white space. + # 1) keep double new lines; + # 2) keep ws after new lines if not empty line; + # 3) all other new lines shall be changed to a space; + # Solution: Match new lines between non white space and replace with space. + doc = re.sub(r'(?<=\S)\n(?=\S)', ' ', doc, flags=re.M) + + return doc diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_validators.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..2161284a8e284fdfbe42d0f0128e7068cb1ba85f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_validators.py @@ -0,0 +1,352 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Module to enforce different constraints on flags. + +Flags validators can be registered using following functions / decorators:: + + flags.register_validator + @flags.validator + flags.register_multi_flags_validator + @flags.multi_flags_validator + +Three convenience functions are also provided for common flag constraints:: + + flags.mark_flag_as_required + flags.mark_flags_as_required + flags.mark_flags_as_mutual_exclusive + flags.mark_bool_flags_as_mutual_exclusive + +See their docstring in this module for a usage manual. + +Do NOT import this module directly. Import the flags package and use the +aliases defined at the package level instead. +""" + +import warnings + +from absl.flags import _exceptions +from absl.flags import _flagvalues +from absl.flags import _validators_classes + + +def register_validator(flag_name, + checker, + message='Flag validation failed', + flag_values=_flagvalues.FLAGS): + """Adds a constraint, which will be enforced during program execution. + + The constraint is validated when flags are initially parsed, and after each + change of the corresponding flag's value. + + Args: + flag_name: str | FlagHolder, name or holder of the flag to be checked. + Positional-only parameter. + checker: callable, a function to validate the flag. + + * input - A single positional argument: The value of the corresponding + flag (string, boolean, etc. This value will be passed to checker + by the library). + * output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either ``return False`` or + ``raise flags.ValidationError(desired_error_message)``. + + message: str, error text to be shown to the user if checker returns False. + If checker raises flags.ValidationError, message from the raised + error will be shown. + flag_values: flags.FlagValues, optional FlagValues instance to validate + against. + + Raises: + AttributeError: Raised when flag_name is not registered as a valid flag + name. + ValueError: Raised when flag_values is non-default and does not match the + FlagValues of the provided FlagHolder instance. + """ + flag_name, flag_values = _flagvalues.resolve_flag_ref(flag_name, flag_values) + v = _validators_classes.SingleFlagValidator(flag_name, checker, message) + _add_validator(flag_values, v) + + +def validator(flag_name, message='Flag validation failed', + flag_values=_flagvalues.FLAGS): + """A function decorator for defining a flag validator. + + Registers the decorated function as a validator for flag_name, e.g.:: + + @flags.validator('foo') + def _CheckFoo(foo): + ... + + See :func:`register_validator` for the specification of checker function. + + Args: + flag_name: str | FlagHolder, name or holder of the flag to be checked. + Positional-only parameter. + message: str, error text to be shown to the user if checker returns False. + If checker raises flags.ValidationError, message from the raised + error will be shown. + flag_values: flags.FlagValues, optional FlagValues instance to validate + against. + Returns: + A function decorator that registers its function argument as a validator. + Raises: + AttributeError: Raised when flag_name is not registered as a valid flag + name. + """ + + def decorate(function): + register_validator(flag_name, function, + message=message, + flag_values=flag_values) + return function + return decorate + + +def register_multi_flags_validator(flag_names, + multi_flags_checker, + message='Flags validation failed', + flag_values=_flagvalues.FLAGS): + """Adds a constraint to multiple flags. + + The constraint is validated when flags are initially parsed, and after each + change of the corresponding flag's value. + + Args: + flag_names: [str | FlagHolder], a list of the flag names or holders to be + checked. Positional-only parameter. + multi_flags_checker: callable, a function to validate the flag. + + * input - dict, with keys() being flag_names, and value for each key + being the value of the corresponding flag (string, boolean, etc). + * output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either return False or + raise flags.ValidationError. + + message: str, error text to be shown to the user if checker returns False. + If checker raises flags.ValidationError, message from the raised + error will be shown. + flag_values: flags.FlagValues, optional FlagValues instance to validate + against. + + Raises: + AttributeError: Raised when a flag is not registered as a valid flag name. + ValueError: Raised when multiple FlagValues are used in the same + invocation. This can occur when FlagHolders have different `_flagvalues` + or when str-type flag_names entries are present and the `flag_values` + argument does not match that of provided FlagHolder(s). + """ + flag_names, flag_values = _flagvalues.resolve_flag_refs( + flag_names, flag_values) + v = _validators_classes.MultiFlagsValidator( + flag_names, multi_flags_checker, message) + _add_validator(flag_values, v) + + +def multi_flags_validator(flag_names, + message='Flag validation failed', + flag_values=_flagvalues.FLAGS): + """A function decorator for defining a multi-flag validator. + + Registers the decorated function as a validator for flag_names, e.g.:: + + @flags.multi_flags_validator(['foo', 'bar']) + def _CheckFooBar(flags_dict): + ... + + See :func:`register_multi_flags_validator` for the specification of checker + function. + + Args: + flag_names: [str | FlagHolder], a list of the flag names or holders to be + checked. Positional-only parameter. + message: str, error text to be shown to the user if checker returns False. + If checker raises flags.ValidationError, message from the raised + error will be shown. + flag_values: flags.FlagValues, optional FlagValues instance to validate + against. + + Returns: + A function decorator that registers its function argument as a validator. + + Raises: + AttributeError: Raised when a flag is not registered as a valid flag name. + """ + + def decorate(function): + register_multi_flags_validator(flag_names, + function, + message=message, + flag_values=flag_values) + return function + + return decorate + + +def mark_flag_as_required(flag_name, flag_values=_flagvalues.FLAGS): + """Ensures that flag is not None during program execution. + + Registers a flag validator, which will follow usual validator rules. + Important note: validator will pass for any non-``None`` value, such as + ``False``, ``0`` (zero), ``''`` (empty string) and so on. + + If your module might be imported by others, and you only wish to make the flag + required when the module is directly executed, call this method like this:: + + if __name__ == '__main__': + flags.mark_flag_as_required('your_flag_name') + app.run() + + Args: + flag_name: str | FlagHolder, name or holder of the flag. + Positional-only parameter. + flag_values: flags.FlagValues, optional :class:`~absl.flags.FlagValues` + instance where the flag is defined. + Raises: + AttributeError: Raised when flag_name is not registered as a valid flag + name. + ValueError: Raised when flag_values is non-default and does not match the + FlagValues of the provided FlagHolder instance. + """ + flag_name, flag_values = _flagvalues.resolve_flag_ref(flag_name, flag_values) + if flag_values[flag_name].default is not None: + warnings.warn( + 'Flag --%s has a non-None default value; therefore, ' + 'mark_flag_as_required will pass even if flag is not specified in the ' + 'command line!' % flag_name, + stacklevel=2) + register_validator( + flag_name, + lambda value: value is not None, + message='Flag --{} must have a value other than None.'.format(flag_name), + flag_values=flag_values) + + +def mark_flags_as_required(flag_names, flag_values=_flagvalues.FLAGS): + """Ensures that flags are not None during program execution. + + If your module might be imported by others, and you only wish to make the flag + required when the module is directly executed, call this method like this:: + + if __name__ == '__main__': + flags.mark_flags_as_required(['flag1', 'flag2', 'flag3']) + app.run() + + Args: + flag_names: Sequence[str | FlagHolder], names or holders of the flags. + flag_values: flags.FlagValues, optional FlagValues instance where the flags + are defined. + Raises: + AttributeError: If any of flag name has not already been defined as a flag. + """ + for flag_name in flag_names: + mark_flag_as_required(flag_name, flag_values) + + +def mark_flags_as_mutual_exclusive(flag_names, required=False, + flag_values=_flagvalues.FLAGS): + """Ensures that only one flag among flag_names is not None. + + Important note: This validator checks if flag values are ``None``, and it does + not distinguish between default and explicit values. Therefore, this validator + does not make sense when applied to flags with default values other than None, + including other false values (e.g. ``False``, ``0``, ``''``, ``[]``). That + includes multi flags with a default value of ``[]`` instead of None. + + Args: + flag_names: [str | FlagHolder], names or holders of flags. + Positional-only parameter. + required: bool. If true, exactly one of the flags must have a value other + than None. Otherwise, at most one of the flags can have a value other + than None, and it is valid for all of the flags to be None. + flag_values: flags.FlagValues, optional FlagValues instance where the flags + are defined. + + Raises: + ValueError: Raised when multiple FlagValues are used in the same + invocation. This can occur when FlagHolders have different `_flagvalues` + or when str-type flag_names entries are present and the `flag_values` + argument does not match that of provided FlagHolder(s). + """ + flag_names, flag_values = _flagvalues.resolve_flag_refs( + flag_names, flag_values) + for flag_name in flag_names: + if flag_values[flag_name].default is not None: + warnings.warn( + 'Flag --{} has a non-None default value. That does not make sense ' + 'with mark_flags_as_mutual_exclusive, which checks whether the ' + 'listed flags have a value other than None.'.format(flag_name), + stacklevel=2) + + def validate_mutual_exclusion(flags_dict): + flag_count = sum(1 for val in flags_dict.values() if val is not None) + if flag_count == 1 or (not required and flag_count == 0): + return True + raise _exceptions.ValidationError( + '{} one of ({}) must have a value other than None.'.format( + 'Exactly' if required else 'At most', ', '.join(flag_names))) + + register_multi_flags_validator( + flag_names, validate_mutual_exclusion, flag_values=flag_values) + + +def mark_bool_flags_as_mutual_exclusive(flag_names, required=False, + flag_values=_flagvalues.FLAGS): + """Ensures that only one flag among flag_names is True. + + Args: + flag_names: [str | FlagHolder], names or holders of flags. + Positional-only parameter. + required: bool. If true, exactly one flag must be True. Otherwise, at most + one flag can be True, and it is valid for all flags to be False. + flag_values: flags.FlagValues, optional FlagValues instance where the flags + are defined. + + Raises: + ValueError: Raised when multiple FlagValues are used in the same + invocation. This can occur when FlagHolders have different `_flagvalues` + or when str-type flag_names entries are present and the `flag_values` + argument does not match that of provided FlagHolder(s). + """ + flag_names, flag_values = _flagvalues.resolve_flag_refs( + flag_names, flag_values) + for flag_name in flag_names: + if not flag_values[flag_name].boolean: + raise _exceptions.ValidationError( + 'Flag --{} is not Boolean, which is required for flags used in ' + 'mark_bool_flags_as_mutual_exclusive.'.format(flag_name)) + + def validate_boolean_mutual_exclusion(flags_dict): + flag_count = sum(bool(val) for val in flags_dict.values()) + if flag_count == 1 or (not required and flag_count == 0): + return True + raise _exceptions.ValidationError( + '{} one of ({}) must be True.'.format( + 'Exactly' if required else 'At most', ', '.join(flag_names))) + + register_multi_flags_validator( + flag_names, validate_boolean_mutual_exclusion, flag_values=flag_values) + + +def _add_validator(fv, validator_instance): + """Register new flags validator to be checked. + + Args: + fv: flags.FlagValues, the FlagValues instance to add the validator. + validator_instance: validators.Validator, the validator to add. + Raises: + KeyError: Raised when validators work with a non-existing flag. + """ + for flag_name in validator_instance.get_flags_names(): + fv[flag_name].validators.append(validator_instance) diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/_validators_classes.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/_validators_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..59100c8e6dc9528634c1752a8de18983eba26676 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/_validators_classes.py @@ -0,0 +1,172 @@ +# Copyright 2021 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Defines *private* classes used for flag validators. + +Do NOT import this module. DO NOT use anything from this module. They are +private APIs. +""" + +from absl.flags import _exceptions + + +class Validator(object): + """Base class for flags validators. + + Users should NOT overload these classes, and use flags.Register... + methods instead. + """ + + # Used to assign each validator an unique insertion_index + validators_count = 0 + + def __init__(self, checker, message): + """Constructor to create all validators. + + Args: + checker: function to verify the constraint. + Input of this method varies, see SingleFlagValidator and + multi_flags_validator for a detailed description. + message: str, error message to be shown to the user. + """ + self.checker = checker + self.message = message + Validator.validators_count += 1 + # Used to assert validators in the order they were registered. + self.insertion_index = Validator.validators_count + + def verify(self, flag_values): + """Verifies that constraint is satisfied. + + flags library calls this method to verify Validator's constraint. + + Args: + flag_values: flags.FlagValues, the FlagValues instance to get flags from. + Raises: + Error: Raised if constraint is not satisfied. + """ + param = self._get_input_to_checker_function(flag_values) + if not self.checker(param): + raise _exceptions.ValidationError(self.message) + + def get_flags_names(self): + """Returns the names of the flags checked by this validator. + + Returns: + [string], names of the flags. + """ + raise NotImplementedError('This method should be overloaded') + + def print_flags_with_values(self, flag_values): + raise NotImplementedError('This method should be overloaded') + + def _get_input_to_checker_function(self, flag_values): + """Given flag values, returns the input to be given to checker. + + Args: + flag_values: flags.FlagValues, containing all flags. + Returns: + The input to be given to checker. The return type depends on the specific + validator. + """ + raise NotImplementedError('This method should be overloaded') + + +class SingleFlagValidator(Validator): + """Validator behind register_validator() method. + + Validates that a single flag passes its checker function. The checker function + takes the flag value and returns True (if value looks fine) or, if flag value + is not valid, either returns False or raises an Exception. + """ + + def __init__(self, flag_name, checker, message): + """Constructor. + + Args: + flag_name: string, name of the flag. + checker: function to verify the validator. + input - value of the corresponding flag (string, boolean, etc). + output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either return False or + raise flags.ValidationError(desired_error_message). + message: str, error message to be shown to the user if validator's + condition is not satisfied. + """ + super(SingleFlagValidator, self).__init__(checker, message) + self.flag_name = flag_name + + def get_flags_names(self): + return [self.flag_name] + + def print_flags_with_values(self, flag_values): + return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value) + + def _get_input_to_checker_function(self, flag_values): + """Given flag values, returns the input to be given to checker. + + Args: + flag_values: flags.FlagValues, the FlagValues instance to get flags from. + Returns: + object, the input to be given to checker. + """ + return flag_values[self.flag_name].value + + +class MultiFlagsValidator(Validator): + """Validator behind register_multi_flags_validator method. + + Validates that flag values pass their common checker function. The checker + function takes flag values and returns True (if values look fine) or, + if values are not valid, either returns False or raises an Exception. + """ + + def __init__(self, flag_names, checker, message): + """Constructor. + + Args: + flag_names: [str], containing names of the flags used by checker. + checker: function to verify the validator. + input - dict, with keys() being flag_names, and value for each + key being the value of the corresponding flag (string, boolean, + etc). + output - bool, True if validator constraint is satisfied. + If constraint is not satisfied, it should either return False or + raise flags.ValidationError(desired_error_message). + message: str, error message to be shown to the user if validator's + condition is not satisfied + """ + super(MultiFlagsValidator, self).__init__(checker, message) + self.flag_names = flag_names + + def _get_input_to_checker_function(self, flag_values): + """Given flag values, returns the input to be given to checker. + + Args: + flag_values: flags.FlagValues, the FlagValues instance to get flags from. + Returns: + dict, with keys() being self.flag_names, and value for each key + being the value of the corresponding flag (string, boolean, etc). + """ + return dict([key, flag_values[key].value] for key in self.flag_names) + + def print_flags_with_values(self, flag_values): + prefix = 'flags ' + flags_with_values = [] + for key in self.flag_names: + flags_with_values.append('%s=%s' % (key, flag_values[key].value)) + return prefix + ', '.join(flags_with_values) + + def get_flags_names(self): + return self.flag_names diff --git a/env-llmeval/lib/python3.10/site-packages/absl/flags/argparse_flags.py b/env-llmeval/lib/python3.10/site-packages/absl/flags/argparse_flags.py new file mode 100644 index 0000000000000000000000000000000000000000..f05c7943ec53b5364817675ef4fc94bbd9976df4 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/flags/argparse_flags.py @@ -0,0 +1,388 @@ +# Copyright 2018 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module provides argparse integration with absl.flags. + +``argparse_flags.ArgumentParser`` is a drop-in replacement for +:class:`argparse.ArgumentParser`. It takes care of collecting and defining absl +flags in :mod:`argparse`. + +Here is a simple example:: + + # Assume the following absl.flags is defined in another module: + # + # from absl import flags + # flags.DEFINE_string('echo', None, 'The echo message.') + # + parser = argparse_flags.ArgumentParser( + description='A demo of absl.flags and argparse integration.') + parser.add_argument('--header', help='Header message to print.') + + # The parser will also accept the absl flag `--echo`. + # The `header` value is available as `args.header` just like a regular + # argparse flag. The absl flag `--echo` continues to be available via + # `absl.flags.FLAGS` if you want to access it. + args = parser.parse_args() + + # Example usages: + # ./program --echo='A message.' --header='A header' + # ./program --header 'A header' --echo 'A message.' + + +Here is another example demonstrates subparsers:: + + parser = argparse_flags.ArgumentParser(description='A subcommands demo.') + parser.add_argument('--header', help='The header message to print.') + + subparsers = parser.add_subparsers(help='The command to execute.') + + roll_dice_parser = subparsers.add_parser( + 'roll_dice', help='Roll a dice.', + # By default, absl flags can also be specified after the sub-command. + # To only allow them before sub-command, pass + # `inherited_absl_flags=None`. + inherited_absl_flags=None) + roll_dice_parser.add_argument('--num_faces', type=int, default=6) + roll_dice_parser.set_defaults(command=roll_dice) + + shuffle_parser = subparsers.add_parser('shuffle', help='Shuffle inputs.') + shuffle_parser.add_argument( + 'inputs', metavar='I', nargs='+', help='Inputs to shuffle.') + shuffle_parser.set_defaults(command=shuffle) + + args = parser.parse_args(argv[1:]) + args.command(args) + + # Example usages: + # ./program --echo='A message.' roll_dice --num_faces=6 + # ./program shuffle --echo='A message.' 1 2 3 4 + + +There are several differences between :mod:`absl.flags` and +:mod:`~absl.flags.argparse_flags`: + +1. Flags defined with absl.flags are parsed differently when using the + argparse parser. Notably: + + 1) absl.flags allows both single-dash and double-dash for any flag, and + doesn't distinguish them; argparse_flags only allows double-dash for + flag's regular name, and single-dash for flag's ``short_name``. + 2) Boolean flags in absl.flags can be specified with ``--bool``, + ``--nobool``, as well as ``--bool=true/false`` (though not recommended); + in argparse_flags, it only allows ``--bool``, ``--nobool``. + +2. Help related flag differences: + + 1) absl.flags does not define help flags, absl.app does that; argparse_flags + defines help flags unless passed with ``add_help=False``. + 2) absl.app supports ``--helpxml``; argparse_flags does not. + 3) argparse_flags supports ``-h``; absl.app does not. +""" + +import argparse +import sys + +from absl import flags + + +_BUILT_IN_FLAGS = frozenset({ + 'help', + 'helpshort', + 'helpfull', + 'helpxml', + 'flagfile', + 'undefok', +}) + + +class ArgumentParser(argparse.ArgumentParser): + """Custom ArgumentParser class to support special absl flags.""" + + def __init__(self, **kwargs): + """Initializes ArgumentParser. + + Args: + **kwargs: same as argparse.ArgumentParser, except: + 1. It also accepts `inherited_absl_flags`: the absl flags to inherit. + The default is the global absl.flags.FLAGS instance. Pass None to + ignore absl flags. + 2. The `prefix_chars` argument must be the default value '-'. + + Raises: + ValueError: Raised when prefix_chars is not '-'. + """ + prefix_chars = kwargs.get('prefix_chars', '-') + if prefix_chars != '-': + raise ValueError( + 'argparse_flags.ArgumentParser only supports "-" as the prefix ' + 'character, found "{}".'.format(prefix_chars)) + + # Remove inherited_absl_flags before calling super. + self._inherited_absl_flags = kwargs.pop('inherited_absl_flags', flags.FLAGS) + # Now call super to initialize argparse.ArgumentParser before calling + # add_argument in _define_absl_flags. + super(ArgumentParser, self).__init__(**kwargs) + + if self.add_help: + # -h and --help are defined in super. + # Also add the --helpshort and --helpfull flags. + self.add_argument( + # Action 'help' defines a similar flag to -h/--help. + '--helpshort', action='help', + default=argparse.SUPPRESS, help=argparse.SUPPRESS) + self.add_argument( + '--helpfull', action=_HelpFullAction, + default=argparse.SUPPRESS, help='show full help message and exit') + + if self._inherited_absl_flags is not None: + self.add_argument( + '--undefok', default=argparse.SUPPRESS, help=argparse.SUPPRESS) + self._define_absl_flags(self._inherited_absl_flags) + + def parse_known_args(self, args=None, namespace=None): + if args is None: + args = sys.argv[1:] + if self._inherited_absl_flags is not None: + # Handle --flagfile. + # Explicitly specify force_gnu=True, since argparse behaves like + # gnu_getopt: flags can be specified after positional arguments. + args = self._inherited_absl_flags.read_flags_from_files( + args, force_gnu=True) + + undefok_missing = object() + undefok = getattr(namespace, 'undefok', undefok_missing) + + namespace, args = super(ArgumentParser, self).parse_known_args( + args, namespace) + + # For Python <= 2.7.8: https://bugs.python.org/issue9351, a bug where + # sub-parsers don't preserve existing namespace attributes. + # Restore the undefok attribute if a sub-parser dropped it. + if undefok is not undefok_missing: + namespace.undefok = undefok + + if self._inherited_absl_flags is not None: + # Handle --undefok. At this point, `args` only contains unknown flags, + # so it won't strip defined flags that are also specified with --undefok. + # For Python <= 2.7.8: https://bugs.python.org/issue9351, a bug where + # sub-parsers don't preserve existing namespace attributes. The undefok + # attribute might not exist because a subparser dropped it. + if hasattr(namespace, 'undefok'): + args = _strip_undefok_args(namespace.undefok, args) + # absl flags are not exposed in the Namespace object. See Namespace: + # https://docs.python.org/3/library/argparse.html#argparse.Namespace. + del namespace.undefok + self._inherited_absl_flags.mark_as_parsed() + try: + self._inherited_absl_flags.validate_all_flags() + except flags.IllegalFlagValueError as e: + self.error(str(e)) + + return namespace, args + + def _define_absl_flags(self, absl_flags): + """Defines flags from absl_flags.""" + key_flags = set(absl_flags.get_key_flags_for_module(sys.argv[0])) + for name in absl_flags: + if name in _BUILT_IN_FLAGS: + # Do not inherit built-in flags. + continue + flag_instance = absl_flags[name] + # Each flags with short_name appears in FLAGS twice, so only define + # when the dictionary key is equal to the regular name. + if name == flag_instance.name: + # Suppress the flag in the help short message if it's not a main + # module's key flag. + suppress = flag_instance not in key_flags + self._define_absl_flag(flag_instance, suppress) + + def _define_absl_flag(self, flag_instance, suppress): + """Defines a flag from the flag_instance.""" + flag_name = flag_instance.name + short_name = flag_instance.short_name + argument_names = ['--' + flag_name] + if short_name: + argument_names.insert(0, '-' + short_name) + if suppress: + helptext = argparse.SUPPRESS + else: + # argparse help string uses %-formatting. Escape the literal %'s. + helptext = flag_instance.help.replace('%', '%%') + if flag_instance.boolean: + # Only add the `no` form to the long name. + argument_names.append('--no' + flag_name) + self.add_argument( + *argument_names, action=_BooleanFlagAction, help=helptext, + metavar=flag_instance.name.upper(), + flag_instance=flag_instance) + else: + self.add_argument( + *argument_names, action=_FlagAction, help=helptext, + metavar=flag_instance.name.upper(), + flag_instance=flag_instance) + + +class _FlagAction(argparse.Action): + """Action class for Abseil non-boolean flags.""" + + def __init__( + self, + option_strings, + dest, + help, # pylint: disable=redefined-builtin + metavar, + flag_instance, + default=argparse.SUPPRESS): + """Initializes _FlagAction. + + Args: + option_strings: See argparse.Action. + dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS. + help: See argparse.Action. + metavar: See argparse.Action. + flag_instance: absl.flags.Flag, the absl flag instance. + default: Ignored. The flag always uses dest=argparse.SUPPRESS so it + doesn't affect the parsing result. + """ + del dest + self._flag_instance = flag_instance + super(_FlagAction, self).__init__( + option_strings=option_strings, + dest=argparse.SUPPRESS, + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + """See https://docs.python.org/3/library/argparse.html#action-classes.""" + self._flag_instance.parse(values) + self._flag_instance.using_default_value = False + + +class _BooleanFlagAction(argparse.Action): + """Action class for Abseil boolean flags.""" + + def __init__( + self, + option_strings, + dest, + help, # pylint: disable=redefined-builtin + metavar, + flag_instance, + default=argparse.SUPPRESS): + """Initializes _BooleanFlagAction. + + Args: + option_strings: See argparse.Action. + dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS. + help: See argparse.Action. + metavar: See argparse.Action. + flag_instance: absl.flags.Flag, the absl flag instance. + default: Ignored. The flag always uses dest=argparse.SUPPRESS so it + doesn't affect the parsing result. + """ + del dest, default + self._flag_instance = flag_instance + flag_names = [self._flag_instance.name] + if self._flag_instance.short_name: + flag_names.append(self._flag_instance.short_name) + self._flag_names = frozenset(flag_names) + super(_BooleanFlagAction, self).__init__( + option_strings=option_strings, + dest=argparse.SUPPRESS, + nargs=0, # Does not accept values, only `--bool` or `--nobool`. + help=help, + metavar=metavar) + + def __call__(self, parser, namespace, values, option_string=None): + """See https://docs.python.org/3/library/argparse.html#action-classes.""" + if not isinstance(values, list) or values: + raise ValueError('values must be an empty list.') + if option_string.startswith('--'): + option = option_string[2:] + else: + option = option_string[1:] + if option in self._flag_names: + self._flag_instance.parse('true') + else: + if not option.startswith('no') or option[2:] not in self._flag_names: + raise ValueError('invalid option_string: ' + option_string) + self._flag_instance.parse('false') + self._flag_instance.using_default_value = False + + +class _HelpFullAction(argparse.Action): + """Action class for --helpfull flag.""" + + def __init__(self, option_strings, dest, default, help): # pylint: disable=redefined-builtin + """Initializes _HelpFullAction. + + Args: + option_strings: See argparse.Action. + dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS. + default: Ignored. + help: See argparse.Action. + """ + del dest, default + super(_HelpFullAction, self).__init__( + option_strings=option_strings, + dest=argparse.SUPPRESS, + default=argparse.SUPPRESS, + nargs=0, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + """See https://docs.python.org/3/library/argparse.html#action-classes.""" + # This only prints flags when help is not argparse.SUPPRESS. + # It includes user defined argparse flags, as well as main module's + # key absl flags. Other absl flags use argparse.SUPPRESS, so they aren't + # printed here. + parser.print_help() + + absl_flags = parser._inherited_absl_flags # pylint: disable=protected-access + if absl_flags is not None: + modules = sorted(absl_flags.flags_by_module_dict()) + main_module = sys.argv[0] + if main_module in modules: + # The main module flags are already printed in parser.print_help(). + modules.remove(main_module) + print(absl_flags._get_help_for_modules( # pylint: disable=protected-access + modules, prefix='', include_special_flags=True)) + parser.exit() + + +def _strip_undefok_args(undefok, args): + """Returns a new list of args after removing flags in --undefok.""" + if undefok: + undefok_names = set(name.strip() for name in undefok.split(',')) + undefok_names |= set('no' + name for name in undefok_names) + # Remove undefok flags. + args = [arg for arg in args if not _is_undefok(arg, undefok_names)] + return args + + +def _is_undefok(arg, undefok_names): + """Returns whether we can ignore arg based on a set of undefok flag names.""" + if not arg.startswith('-'): + return False + if arg.startswith('--'): + arg_without_dash = arg[2:] + else: + arg_without_dash = arg[1:] + if '=' in arg_without_dash: + name, _ = arg_without_dash.split('=', 1) + else: + name = arg_without_dash + if name in undefok_names: + return True + return False diff --git a/env-llmeval/lib/python3.10/site-packages/absl/logging/__init__.py b/env-llmeval/lib/python3.10/site-packages/absl/logging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..42166cd892f99c34f637dd661576897196a357f4 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/logging/__init__.py @@ -0,0 +1,1281 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abseil Python logging module implemented on top of standard logging. + +Simple usage:: + + from absl import logging + + logging.info('Interesting Stuff') + logging.info('Interesting Stuff with Arguments: %d', 42) + + logging.set_verbosity(logging.INFO) + logging.log(logging.DEBUG, 'This will *not* be printed') + logging.set_verbosity(logging.DEBUG) + logging.log(logging.DEBUG, 'This will be printed') + + logging.warning('Worrying Stuff') + logging.error('Alarming Stuff') + logging.fatal('AAAAHHHHH!!!!') # Process exits. + +Usage note: Do not pre-format the strings in your program code. +Instead, let the logging module perform argument interpolation. +This saves cycles because strings that don't need to be printed +are never formatted. Note that this module does not attempt to +interpolate arguments when no arguments are given. In other words:: + + logging.info('Interesting Stuff: %s') + +does not raise an exception because logging.info() has only one +argument, the message string. + +"Lazy" evaluation for debugging +------------------------------- + +If you do something like this:: + + logging.debug('Thing: %s', thing.ExpensiveOp()) + +then the ExpensiveOp will be evaluated even if nothing +is printed to the log. To avoid this, use the level_debug() function:: + + if logging.level_debug(): + logging.debug('Thing: %s', thing.ExpensiveOp()) + +Per file level logging is supported by logging.vlog() and +logging.vlog_is_on(). For example:: + + if logging.vlog_is_on(2): + logging.vlog(2, very_expensive_debug_message()) + +Notes on Unicode +---------------- + +The log output is encoded as UTF-8. Don't pass data in other encodings in +bytes() instances -- instead pass unicode string instances when you need to +(for both the format string and arguments). + +Note on critical and fatal: +Standard logging module defines fatal as an alias to critical, but it's not +documented, and it does NOT actually terminate the program. +This module only defines fatal but not critical, and it DOES terminate the +program. + +The differences in behavior are historical and unfortunate. +""" + +import collections +from collections import abc +import getpass +import io +import itertools +import logging +import os +import socket +import struct +import sys +import tempfile +import threading +import time +import timeit +import traceback +import types +import warnings + +from absl import flags +from absl.logging import converter + +# pylint: disable=g-import-not-at-top +try: + from typing import NoReturn +except ImportError: + pass + +# pylint: enable=g-import-not-at-top + +FLAGS = flags.FLAGS + + +# Logging levels. +FATAL = converter.ABSL_FATAL +ERROR = converter.ABSL_ERROR +WARNING = converter.ABSL_WARNING +WARN = converter.ABSL_WARNING # Deprecated name. +INFO = converter.ABSL_INFO +DEBUG = converter.ABSL_DEBUG + +# Regex to match/parse log line prefixes. +ABSL_LOGGING_PREFIX_REGEX = ( + r'^(?P[IWEF])' + r'(?P\d\d)(?P\d\d) ' + r'(?P\d\d):(?P\d\d):(?P\d\d)' + r'\.(?P\d\d\d\d\d\d) +' + r'(?P-?\d+) ' + r'(?P[a-zA-Z<][\w._<>-]+):(?P\d+)') + + +# Mask to convert integer thread ids to unsigned quantities for logging purposes +_THREAD_ID_MASK = 2 ** (struct.calcsize('L') * 8) - 1 + +# Extra property set on the LogRecord created by ABSLLogger when its level is +# CRITICAL/FATAL. +_ABSL_LOG_FATAL = '_absl_log_fatal' +# Extra prefix added to the log message when a non-absl logger logs a +# CRITICAL/FATAL message. +_CRITICAL_PREFIX = 'CRITICAL - ' + +# Used by findCaller to skip callers from */logging/__init__.py. +_LOGGING_FILE_PREFIX = os.path.join('logging', '__init__.') + +# The ABSL logger instance, initialized in _initialize(). +_absl_logger = None +# The ABSL handler instance, initialized in _initialize(). +_absl_handler = None + + +_CPP_NAME_TO_LEVELS = { + 'debug': '0', # Abseil C++ has no DEBUG level, mapping it to INFO here. + 'info': '0', + 'warning': '1', + 'warn': '1', + 'error': '2', + 'fatal': '3' +} + +_CPP_LEVEL_TO_NAMES = { + '0': 'info', + '1': 'warning', + '2': 'error', + '3': 'fatal', +} + + +class _VerbosityFlag(flags.Flag): + """Flag class for -v/--verbosity.""" + + def __init__(self, *args, **kwargs): + super(_VerbosityFlag, self).__init__( + flags.IntegerParser(), + flags.ArgumentSerializer(), + *args, **kwargs) + + @property + def value(self): + return self._value + + @value.setter + def value(self, v): + self._value = v + self._update_logging_levels() + + def _update_logging_levels(self): + """Updates absl logging levels to the current verbosity. + + Visibility: module-private + """ + if not _absl_logger: + return + + if self._value <= converter.ABSL_DEBUG: + standard_verbosity = converter.absl_to_standard(self._value) + else: + # --verbosity is set to higher than 1 for vlog. + standard_verbosity = logging.DEBUG - (self._value - 1) + + # Also update root level when absl_handler is used. + if _absl_handler in logging.root.handlers: + # Make absl logger inherit from the root logger. absl logger might have + # a non-NOTSET value if logging.set_verbosity() is called at import time. + _absl_logger.setLevel(logging.NOTSET) + logging.root.setLevel(standard_verbosity) + else: + _absl_logger.setLevel(standard_verbosity) + + +class _LoggerLevelsFlag(flags.Flag): + """Flag class for --logger_levels.""" + + def __init__(self, *args, **kwargs): + super(_LoggerLevelsFlag, self).__init__( + _LoggerLevelsParser(), + _LoggerLevelsSerializer(), + *args, **kwargs) + + @property + def value(self): + # For lack of an immutable type, be defensive and return a copy. + # Modifications to the dict aren't supported and won't have any affect. + # While Py3 could use MappingProxyType, that isn't deepcopy friendly, so + # just return a copy. + return self._value.copy() + + @value.setter + def value(self, v): + self._value = {} if v is None else v + self._update_logger_levels() + + def _update_logger_levels(self): + # Visibility: module-private. + # This is called by absl.app.run() during initialization. + for name, level in self._value.items(): + logging.getLogger(name).setLevel(level) + + +class _LoggerLevelsParser(flags.ArgumentParser): + """Parser for --logger_levels flag.""" + + def parse(self, value): + if isinstance(value, abc.Mapping): + return value + + pairs = [pair.strip() for pair in value.split(',') if pair.strip()] + + # Preserve the order so that serialization is deterministic. + levels = collections.OrderedDict() + for name_level in pairs: + name, level = name_level.split(':', 1) + name = name.strip() + level = level.strip() + levels[name] = level + return levels + + +class _LoggerLevelsSerializer(object): + """Serializer for --logger_levels flag.""" + + def serialize(self, value): + if isinstance(value, str): + return value + return ','.join( + '{}:{}'.format(name, level) for name, level in value.items()) + + +class _StderrthresholdFlag(flags.Flag): + """Flag class for --stderrthreshold.""" + + def __init__(self, *args, **kwargs): + super(_StderrthresholdFlag, self).__init__( + flags.ArgumentParser(), + flags.ArgumentSerializer(), + *args, **kwargs) + + @property + def value(self): + return self._value + + @value.setter + def value(self, v): + if v in _CPP_LEVEL_TO_NAMES: + # --stderrthreshold also accepts numeric strings whose values are + # Abseil C++ log levels. + cpp_value = int(v) + v = _CPP_LEVEL_TO_NAMES[v] # Normalize to strings. + elif v.lower() in _CPP_NAME_TO_LEVELS: + v = v.lower() + if v == 'warn': + v = 'warning' # Use 'warning' as the canonical name. + cpp_value = int(_CPP_NAME_TO_LEVELS[v]) + else: + raise ValueError( + '--stderrthreshold must be one of (case-insensitive) ' + "'debug', 'info', 'warning', 'error', 'fatal', " + "or '0', '1', '2', '3', not '%s'" % v) + + self._value = v + + +LOGTOSTDERR = flags.DEFINE_boolean( + 'logtostderr', + False, + 'Should only log to stderr?', + allow_override_cpp=True, +) +ALSOLOGTOSTDERR = flags.DEFINE_boolean( + 'alsologtostderr', + False, + 'also log to stderr?', + allow_override_cpp=True, +) +LOG_DIR = flags.DEFINE_string( + 'log_dir', + os.getenv('TEST_TMPDIR', ''), + 'directory to write logfiles into', + allow_override_cpp=True, +) +VERBOSITY = flags.DEFINE_flag( + _VerbosityFlag( + 'verbosity', + -1, + ( + 'Logging verbosity level. Messages logged at this level or lower' + ' will be included. Set to 1 for debug logging. If the flag was not' + ' set or supplied, the value will be changed from the default of -1' + ' (warning) to 0 (info) after flags are parsed.' + ), + short_name='v', + allow_hide_cpp=True, + ) +) +LOGGER_LEVELS = flags.DEFINE_flag( + _LoggerLevelsFlag( + 'logger_levels', + {}, + ( + 'Specify log level of loggers. The format is a CSV list of ' + '`name:level`. Where `name` is the logger name used with ' + '`logging.getLogger()`, and `level` is a level name (INFO, DEBUG, ' + 'etc). e.g. `myapp.foo:INFO,other.logger:DEBUG`' + ), + ) +) +STDERRTHRESHOLD = flags.DEFINE_flag( + _StderrthresholdFlag( + 'stderrthreshold', + 'fatal', + ( + 'log messages at this level, or more severe, to stderr in ' + 'addition to the logfile. Possible values are ' + "'debug', 'info', 'warning', 'error', and 'fatal'. " + 'Obsoletes --alsologtostderr. Using --alsologtostderr ' + 'cancels the effect of this flag. Please also note that ' + 'this flag is subject to --verbosity and requires logfile ' + 'not be stderr.' + ), + allow_hide_cpp=True, + ) +) +SHOWPREFIXFORINFO = flags.DEFINE_boolean( + 'showprefixforinfo', + True, + ( + 'If False, do not prepend prefix to info messages ' + "when it's logged to stderr, " + '--verbosity is set to INFO level, ' + 'and python logging is used.' + ), +) + + +def get_verbosity(): + """Returns the logging verbosity.""" + return FLAGS['verbosity'].value + + +def set_verbosity(v): + """Sets the logging verbosity. + + Causes all messages of level <= v to be logged, + and all messages of level > v to be silently discarded. + + Args: + v: int|str, the verbosity level as an integer or string. Legal string values + are those that can be coerced to an integer as well as case-insensitive + 'debug', 'info', 'warning', 'error', and 'fatal'. + """ + try: + new_level = int(v) + except ValueError: + new_level = converter.ABSL_NAMES[v.upper()] + FLAGS.verbosity = new_level + + +def set_stderrthreshold(s): + """Sets the stderr threshold to the value passed in. + + Args: + s: str|int, valid strings values are case-insensitive 'debug', + 'info', 'warning', 'error', and 'fatal'; valid integer values are + logging.DEBUG|INFO|WARNING|ERROR|FATAL. + + Raises: + ValueError: Raised when s is an invalid value. + """ + if s in converter.ABSL_LEVELS: + FLAGS.stderrthreshold = converter.ABSL_LEVELS[s] + elif isinstance(s, str) and s.upper() in converter.ABSL_NAMES: + FLAGS.stderrthreshold = s + else: + raise ValueError( + 'set_stderrthreshold only accepts integer absl logging level ' + 'from -3 to 1, or case-insensitive string values ' + "'debug', 'info', 'warning', 'error', and 'fatal'. " + 'But found "{}" ({}).'.format(s, type(s))) + + +def fatal(msg, *args, **kwargs): + # type: (Any, Any, Any) -> NoReturn + """Logs a fatal message.""" + log(FATAL, msg, *args, **kwargs) + + +def error(msg, *args, **kwargs): + """Logs an error message.""" + log(ERROR, msg, *args, **kwargs) + + +def warning(msg, *args, **kwargs): + """Logs a warning message.""" + log(WARNING, msg, *args, **kwargs) + + +def warn(msg, *args, **kwargs): + """Deprecated, use 'warning' instead.""" + warnings.warn("The 'warn' function is deprecated, use 'warning' instead", + DeprecationWarning, 2) + log(WARNING, msg, *args, **kwargs) + + +def info(msg, *args, **kwargs): + """Logs an info message.""" + log(INFO, msg, *args, **kwargs) + + +def debug(msg, *args, **kwargs): + """Logs a debug message.""" + log(DEBUG, msg, *args, **kwargs) + + +def exception(msg, *args, exc_info=True, **kwargs): + """Logs an exception, with traceback and message.""" + error(msg, *args, exc_info=exc_info, **kwargs) + + +# Counter to keep track of number of log entries per token. +_log_counter_per_token = {} + + +def _get_next_log_count_per_token(token): + """Wrapper for _log_counter_per_token. Thread-safe. + + Args: + token: The token for which to look up the count. + + Returns: + The number of times this function has been called with + *token* as an argument (starting at 0). + """ + # Can't use a defaultdict because defaultdict isn't atomic, whereas + # setdefault is. + return next(_log_counter_per_token.setdefault(token, itertools.count())) + + +def log_every_n(level, msg, n, *args): + """Logs ``msg % args`` at level 'level' once per 'n' times. + + Logs the 1st call, (N+1)st call, (2N+1)st call, etc. + Not threadsafe. + + Args: + level: int, the absl logging level at which to log. + msg: str, the message to be logged. + n: int, the number of times this should be called before it is logged. + *args: The args to be substituted into the msg. + """ + count = _get_next_log_count_per_token(get_absl_logger().findCaller()) + log_if(level, msg, not (count % n), *args) + + +# Keeps track of the last log time of the given token. +# Note: must be a dict since set/get is atomic in CPython. +# Note: entries are never released as their number is expected to be low. +_log_timer_per_token = {} + + +def _seconds_have_elapsed(token, num_seconds): + """Tests if 'num_seconds' have passed since 'token' was requested. + + Not strictly thread-safe - may log with the wrong frequency if called + concurrently from multiple threads. Accuracy depends on resolution of + 'timeit.default_timer()'. + + Always returns True on the first call for a given 'token'. + + Args: + token: The token for which to look up the count. + num_seconds: The number of seconds to test for. + + Returns: + Whether it has been >= 'num_seconds' since 'token' was last requested. + """ + now = timeit.default_timer() + then = _log_timer_per_token.get(token, None) + if then is None or (now - then) >= num_seconds: + _log_timer_per_token[token] = now + return True + else: + return False + + +def log_every_n_seconds(level, msg, n_seconds, *args): + """Logs ``msg % args`` at level ``level`` iff ``n_seconds`` elapsed since last call. + + Logs the first call, logs subsequent calls if 'n' seconds have elapsed since + the last logging call from the same call site (file + line). Not thread-safe. + + Args: + level: int, the absl logging level at which to log. + msg: str, the message to be logged. + n_seconds: float or int, seconds which should elapse before logging again. + *args: The args to be substituted into the msg. + """ + should_log = _seconds_have_elapsed(get_absl_logger().findCaller(), n_seconds) + log_if(level, msg, should_log, *args) + + +def log_first_n(level, msg, n, *args): + """Logs ``msg % args`` at level ``level`` only first ``n`` times. + + Not threadsafe. + + Args: + level: int, the absl logging level at which to log. + msg: str, the message to be logged. + n: int, the maximal number of times the message is logged. + *args: The args to be substituted into the msg. + """ + count = _get_next_log_count_per_token(get_absl_logger().findCaller()) + log_if(level, msg, count < n, *args) + + +def log_if(level, msg, condition, *args): + """Logs ``msg % args`` at level ``level`` only if condition is fulfilled.""" + if condition: + log(level, msg, *args) + + +def log(level, msg, *args, **kwargs): + """Logs ``msg % args`` at absl logging level ``level``. + + If no args are given just print msg, ignoring any interpolation specifiers. + + Args: + level: int, the absl logging level at which to log the message + (logging.DEBUG|INFO|WARNING|ERROR|FATAL). While some C++ verbose logging + level constants are also supported, callers should prefer explicit + logging.vlog() calls for such purpose. + + msg: str, the message to be logged. + *args: The args to be substituted into the msg. + **kwargs: May contain exc_info to add exception traceback to message. + """ + if level > converter.ABSL_DEBUG: + # Even though this function supports level that is greater than 1, users + # should use logging.vlog instead for such cases. + # Treat this as vlog, 1 is equivalent to DEBUG. + standard_level = converter.STANDARD_DEBUG - (level - 1) + else: + if level < converter.ABSL_FATAL: + level = converter.ABSL_FATAL + standard_level = converter.absl_to_standard(level) + + # Match standard logging's behavior. Before use_absl_handler() and + # logging is configured, there is no handler attached on _absl_logger nor + # logging.root. So logs go no where. + if not logging.root.handlers: + logging.basicConfig() + + _absl_logger.log(standard_level, msg, *args, **kwargs) + + +def vlog(level, msg, *args, **kwargs): + """Log ``msg % args`` at C++ vlog level ``level``. + + Args: + level: int, the C++ verbose logging level at which to log the message, + e.g. 1, 2, 3, 4... While absl level constants are also supported, + callers should prefer logging.log|debug|info|... calls for such purpose. + msg: str, the message to be logged. + *args: The args to be substituted into the msg. + **kwargs: May contain exc_info to add exception traceback to message. + """ + log(level, msg, *args, **kwargs) + + +def vlog_is_on(level): + """Checks if vlog is enabled for the given level in caller's source file. + + Args: + level: int, the C++ verbose logging level at which to log the message, + e.g. 1, 2, 3, 4... While absl level constants are also supported, + callers should prefer level_debug|level_info|... calls for + checking those. + + Returns: + True if logging is turned on for that level. + """ + + if level > converter.ABSL_DEBUG: + # Even though this function supports level that is greater than 1, users + # should use logging.vlog instead for such cases. + # Treat this as vlog, 1 is equivalent to DEBUG. + standard_level = converter.STANDARD_DEBUG - (level - 1) + else: + if level < converter.ABSL_FATAL: + level = converter.ABSL_FATAL + standard_level = converter.absl_to_standard(level) + return _absl_logger.isEnabledFor(standard_level) + + +def flush(): + """Flushes all log files.""" + get_absl_handler().flush() + + +def level_debug(): + """Returns True if debug logging is turned on.""" + return get_verbosity() >= DEBUG + + +def level_info(): + """Returns True if info logging is turned on.""" + return get_verbosity() >= INFO + + +def level_warning(): + """Returns True if warning logging is turned on.""" + return get_verbosity() >= WARNING + + +level_warn = level_warning # Deprecated function. + + +def level_error(): + """Returns True if error logging is turned on.""" + return get_verbosity() >= ERROR + + +def get_log_file_name(level=INFO): + """Returns the name of the log file. + + For Python logging, only one file is used and level is ignored. And it returns + empty string if it logs to stderr/stdout or the log stream has no `name` + attribute. + + Args: + level: int, the absl.logging level. + + Raises: + ValueError: Raised when `level` has an invalid value. + """ + if level not in converter.ABSL_LEVELS: + raise ValueError('Invalid absl.logging level {}'.format(level)) + stream = get_absl_handler().python_handler.stream + if (stream == sys.stderr or stream == sys.stdout or + not hasattr(stream, 'name')): + return '' + else: + return stream.name + + +def find_log_dir_and_names(program_name=None, log_dir=None): + """Computes the directory and filename prefix for log file. + + Args: + program_name: str|None, the filename part of the path to the program that + is running without its extension. e.g: if your program is called + ``usr/bin/foobar.py`` this method should probably be called with + ``program_name='foobar`` However, this is just a convention, you can + pass in any string you want, and it will be used as part of the + log filename. If you don't pass in anything, the default behavior + is as described in the example. In python standard logging mode, + the program_name will be prepended with ``py_`` if it is the + ``program_name`` argument is omitted. + log_dir: str|None, the desired log directory. + + Returns: + (log_dir, file_prefix, symlink_prefix) + + Raises: + FileNotFoundError: raised in Python 3 when it cannot find a log directory. + OSError: raised in Python 2 when it cannot find a log directory. + """ + if not program_name: + # Strip the extension (foobar.par becomes foobar, and + # fubar.py becomes fubar). We do this so that the log + # file names are similar to C++ log file names. + program_name = os.path.splitext(os.path.basename(sys.argv[0]))[0] + + # Prepend py_ to files so that python code gets a unique file, and + # so that C++ libraries do not try to write to the same log files as us. + program_name = 'py_%s' % program_name + + actual_log_dir = find_log_dir(log_dir=log_dir) + + try: + username = getpass.getuser() + except KeyError: + # This can happen, e.g. when running under docker w/o passwd file. + if hasattr(os, 'getuid'): + # Windows doesn't have os.getuid + username = str(os.getuid()) + else: + username = 'unknown' + hostname = socket.gethostname() + file_prefix = '%s.%s.%s.log' % (program_name, hostname, username) + + return actual_log_dir, file_prefix, program_name + + +def find_log_dir(log_dir=None): + """Returns the most suitable directory to put log files into. + + Args: + log_dir: str|None, if specified, the logfile(s) will be created in that + directory. Otherwise if the --log_dir command-line flag is provided, + the logfile will be created in that directory. Otherwise the logfile + will be created in a standard location. + + Raises: + FileNotFoundError: raised in Python 3 when it cannot find a log directory. + OSError: raised in Python 2 when it cannot find a log directory. + """ + # Get a list of possible log dirs (will try to use them in order). + # NOTE: Google's internal implementation has a special handling for Google + # machines, which uses a list of directories. Hence the following uses `dirs` + # instead of a single directory. + if log_dir: + # log_dir was explicitly specified as an arg, so use it and it alone. + dirs = [log_dir] + elif FLAGS['log_dir'].value: + # log_dir flag was provided, so use it and it alone (this mimics the + # behavior of the same flag in logging.cc). + dirs = [FLAGS['log_dir'].value] + else: + dirs = [tempfile.gettempdir()] + + # Find the first usable log dir. + for d in dirs: + if os.path.isdir(d) and os.access(d, os.W_OK): + return d + raise FileNotFoundError( + "Can't find a writable directory for logs, tried %s" % dirs) + + +def get_absl_log_prefix(record): + """Returns the absl log prefix for the log record. + + Args: + record: logging.LogRecord, the record to get prefix for. + """ + created_tuple = time.localtime(record.created) + created_microsecond = int(record.created % 1.0 * 1e6) + + critical_prefix = '' + level = record.levelno + if _is_non_absl_fatal_record(record): + # When the level is FATAL, but not logged from absl, lower the level so + # it's treated as ERROR. + level = logging.ERROR + critical_prefix = _CRITICAL_PREFIX + severity = converter.get_initial_for_level(level) + + return '%c%02d%02d %02d:%02d:%02d.%06d %5d %s:%d] %s' % ( + severity, + created_tuple.tm_mon, + created_tuple.tm_mday, + created_tuple.tm_hour, + created_tuple.tm_min, + created_tuple.tm_sec, + created_microsecond, + _get_thread_id(), + record.filename, + record.lineno, + critical_prefix) + + +def skip_log_prefix(func): + """Skips reporting the prefix of a given function or name by :class:`~absl.logging.ABSLLogger`. + + This is a convenience wrapper function / decorator for + :meth:`~absl.logging.ABSLLogger.register_frame_to_skip`. + + If a callable function is provided, only that function will be skipped. + If a function name is provided, all functions with the same name in the + file that this is called in will be skipped. + + This can be used as a decorator of the intended function to be skipped. + + Args: + func: Callable function or its name as a string. + + Returns: + func (the input, unchanged). + + Raises: + ValueError: The input is callable but does not have a function code object. + TypeError: The input is neither callable nor a string. + """ + if callable(func): + func_code = getattr(func, '__code__', None) + if func_code is None: + raise ValueError('Input callable does not have a function code object.') + file_name = func_code.co_filename + func_name = func_code.co_name + func_lineno = func_code.co_firstlineno + elif isinstance(func, str): + file_name = get_absl_logger().findCaller()[0] + func_name = func + func_lineno = None + else: + raise TypeError('Input is neither callable nor a string.') + ABSLLogger.register_frame_to_skip(file_name, func_name, func_lineno) + return func + + +def _is_non_absl_fatal_record(log_record): + return (log_record.levelno >= logging.FATAL and + not log_record.__dict__.get(_ABSL_LOG_FATAL, False)) + + +def _is_absl_fatal_record(log_record): + return (log_record.levelno >= logging.FATAL and + log_record.__dict__.get(_ABSL_LOG_FATAL, False)) + + +# Indicates if we still need to warn about pre-init logs going to stderr. +_warn_preinit_stderr = True + + +class PythonHandler(logging.StreamHandler): + """The handler class used by Abseil Python logging implementation.""" + + def __init__(self, stream=None, formatter=None): + super(PythonHandler, self).__init__(stream) + self.setFormatter(formatter or PythonFormatter()) + + def start_logging_to_file(self, program_name=None, log_dir=None): + """Starts logging messages to files instead of standard error.""" + FLAGS.logtostderr = False + + actual_log_dir, file_prefix, symlink_prefix = find_log_dir_and_names( + program_name=program_name, log_dir=log_dir) + + basename = '%s.INFO.%s.%d' % ( + file_prefix, + time.strftime('%Y%m%d-%H%M%S', time.localtime(time.time())), + os.getpid()) + filename = os.path.join(actual_log_dir, basename) + + self.stream = open(filename, 'a', encoding='utf-8') + + # os.symlink is not available on Windows Python 2. + if getattr(os, 'symlink', None): + # Create a symlink to the log file with a canonical name. + symlink = os.path.join(actual_log_dir, symlink_prefix + '.INFO') + try: + if os.path.islink(symlink): + os.unlink(symlink) + os.symlink(os.path.basename(filename), symlink) + except EnvironmentError: + # If it fails, we're sad but it's no error. Commonly, this + # fails because the symlink was created by another user and so + # we can't modify it + pass + + def use_absl_log_file(self, program_name=None, log_dir=None): + """Conditionally logs to files, based on --logtostderr.""" + if FLAGS['logtostderr'].value: + self.stream = sys.stderr + else: + self.start_logging_to_file(program_name=program_name, log_dir=log_dir) + + def flush(self): + """Flushes all log files.""" + self.acquire() + try: + if self.stream and hasattr(self.stream, 'flush'): + self.stream.flush() + except (EnvironmentError, ValueError): + # A ValueError is thrown if we try to flush a closed file. + pass + finally: + self.release() + + def _log_to_stderr(self, record): + """Emits the record to stderr. + + This temporarily sets the handler stream to stderr, calls + StreamHandler.emit, then reverts the stream back. + + Args: + record: logging.LogRecord, the record to log. + """ + # emit() is protected by a lock in logging.Handler, so we don't need to + # protect here again. + old_stream = self.stream + self.stream = sys.stderr + try: + super(PythonHandler, self).emit(record) + finally: + self.stream = old_stream + + def emit(self, record): + """Prints a record out to some streams. + + 1. If ``FLAGS.logtostderr`` is set, it will print to ``sys.stderr`` ONLY. + 2. If ``FLAGS.alsologtostderr`` is set, it will print to ``sys.stderr``. + 3. If ``FLAGS.logtostderr`` is not set, it will log to the stream + associated with the current thread. + + Args: + record: :class:`logging.LogRecord`, the record to emit. + """ + # People occasionally call logging functions at import time before + # our flags may have even been defined yet, let alone even parsed, as we + # rely on the C++ side to define some flags for us and app init to + # deal with parsing. Match the C++ library behavior of notify and emit + # such messages to stderr. It encourages people to clean-up and does + # not hide the message. + level = record.levelno + if not FLAGS.is_parsed(): # Also implies "before flag has been defined". + global _warn_preinit_stderr + if _warn_preinit_stderr: + sys.stderr.write( + 'WARNING: Logging before flag parsing goes to stderr.\n') + _warn_preinit_stderr = False + self._log_to_stderr(record) + elif FLAGS['logtostderr'].value: + self._log_to_stderr(record) + else: + super(PythonHandler, self).emit(record) + stderr_threshold = converter.string_to_standard( + FLAGS['stderrthreshold'].value) + if ((FLAGS['alsologtostderr'].value or level >= stderr_threshold) and + self.stream != sys.stderr): + self._log_to_stderr(record) + # Die when the record is created from ABSLLogger and level is FATAL. + if _is_absl_fatal_record(record): + self.flush() # Flush the log before dying. + + # In threaded python, sys.exit() from a non-main thread only + # exits the thread in question. + os.abort() + + def close(self): + """Closes the stream to which we are writing.""" + self.acquire() + try: + self.flush() + try: + # Do not close the stream if it's sys.stderr|stdout. They may be + # redirected or overridden to files, which should be managed by users + # explicitly. + user_managed = sys.stderr, sys.stdout, sys.__stderr__, sys.__stdout__ + if self.stream not in user_managed and ( + not hasattr(self.stream, 'isatty') or not self.stream.isatty()): + self.stream.close() + except ValueError: + # A ValueError is thrown if we try to run isatty() on a closed file. + pass + super(PythonHandler, self).close() + finally: + self.release() + + +class ABSLHandler(logging.Handler): + """Abseil Python logging module's log handler.""" + + def __init__(self, python_logging_formatter): + super(ABSLHandler, self).__init__() + + self._python_handler = PythonHandler(formatter=python_logging_formatter) + self.activate_python_handler() + + def format(self, record): + return self._current_handler.format(record) + + def setFormatter(self, fmt): + self._current_handler.setFormatter(fmt) + + def emit(self, record): + self._current_handler.emit(record) + + def flush(self): + self._current_handler.flush() + + def close(self): + super(ABSLHandler, self).close() + self._current_handler.close() + + def handle(self, record): + rv = self.filter(record) + if rv: + return self._current_handler.handle(record) + return rv + + @property + def python_handler(self): + return self._python_handler + + def activate_python_handler(self): + """Uses the Python logging handler as the current logging handler.""" + self._current_handler = self._python_handler + + def use_absl_log_file(self, program_name=None, log_dir=None): + self._current_handler.use_absl_log_file(program_name, log_dir) + + def start_logging_to_file(self, program_name=None, log_dir=None): + self._current_handler.start_logging_to_file(program_name, log_dir) + + +class PythonFormatter(logging.Formatter): + """Formatter class used by :class:`~absl.logging.PythonHandler`.""" + + def format(self, record): + """Appends the message from the record to the results of the prefix. + + Args: + record: logging.LogRecord, the record to be formatted. + + Returns: + The formatted string representing the record. + """ + if (not FLAGS['showprefixforinfo'].value and + FLAGS['verbosity'].value == converter.ABSL_INFO and + record.levelno == logging.INFO and + _absl_handler.python_handler.stream == sys.stderr): + prefix = '' + else: + prefix = get_absl_log_prefix(record) + return prefix + super(PythonFormatter, self).format(record) + + +class ABSLLogger(logging.getLoggerClass()): + """A logger that will create LogRecords while skipping some stack frames. + + This class maintains an internal list of filenames and method names + for use when determining who called the currently executing stack + frame. Any method names from specific source files are skipped when + walking backwards through the stack. + + Client code should use the register_frame_to_skip method to let the + ABSLLogger know which method from which file should be + excluded from the walk backwards through the stack. + """ + _frames_to_skip = set() + + def findCaller(self, stack_info=False, stacklevel=1): + """Finds the frame of the calling method on the stack. + + This method skips any frames registered with the + ABSLLogger and any methods from this file, and whatever + method is currently being used to generate the prefix for the log + line. Then it returns the file name, line number, and method name + of the calling method. An optional fourth item may be returned, + callers who only need things from the first three are advised to + always slice or index the result rather than using direct unpacking + assignment. + + Args: + stack_info: bool, when True, include the stack trace as a fourth item + returned. On Python 3 there are always four items returned - the + fourth will be None when this is False. On Python 2 the stdlib + base class API only returns three items. We do the same when this + new parameter is unspecified or False for compatibility. + + Returns: + (filename, lineno, methodname[, sinfo]) of the calling method. + """ + f_to_skip = ABSLLogger._frames_to_skip + # Use sys._getframe(2) instead of logging.currentframe(), it's slightly + # faster because there is one less frame to traverse. + frame = sys._getframe(2) # pylint: disable=protected-access + + while frame: + code = frame.f_code + if (_LOGGING_FILE_PREFIX not in code.co_filename and + (code.co_filename, code.co_name, + code.co_firstlineno) not in f_to_skip and + (code.co_filename, code.co_name) not in f_to_skip): + sinfo = None + if stack_info: + out = io.StringIO() + out.write(u'Stack (most recent call last):\n') + traceback.print_stack(frame, file=out) + sinfo = out.getvalue().rstrip(u'\n') + return (code.co_filename, frame.f_lineno, code.co_name, sinfo) + frame = frame.f_back + + def critical(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``CRITICAL``.""" + self.log(logging.CRITICAL, msg, *args, **kwargs) + + def fatal(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``FATAL``.""" + self.log(logging.FATAL, msg, *args, **kwargs) + + def error(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``ERROR``.""" + self.log(logging.ERROR, msg, *args, **kwargs) + + def warn(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``WARN``.""" + warnings.warn("The 'warn' method is deprecated, use 'warning' instead", + DeprecationWarning, 2) + self.log(logging.WARN, msg, *args, **kwargs) + + def warning(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``WARNING``.""" + self.log(logging.WARNING, msg, *args, **kwargs) + + def info(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``INFO``.""" + self.log(logging.INFO, msg, *args, **kwargs) + + def debug(self, msg, *args, **kwargs): + """Logs ``msg % args`` with severity ``DEBUG``.""" + self.log(logging.DEBUG, msg, *args, **kwargs) + + def log(self, level, msg, *args, **kwargs): + """Logs a message at a cetain level substituting in the supplied arguments. + + This method behaves differently in python and c++ modes. + + Args: + level: int, the standard logging level at which to log the message. + msg: str, the text of the message to log. + *args: The arguments to substitute in the message. + **kwargs: The keyword arguments to substitute in the message. + """ + if level >= logging.FATAL: + # Add property to the LogRecord created by this logger. + # This will be used by the ABSLHandler to determine whether it should + # treat CRITICAL/FATAL logs as really FATAL. + extra = kwargs.setdefault('extra', {}) + extra[_ABSL_LOG_FATAL] = True + super(ABSLLogger, self).log(level, msg, *args, **kwargs) + + def handle(self, record): + """Calls handlers without checking ``Logger.disabled``. + + Non-root loggers are set to disabled after setup with :func:`logging.config` + if it's not explicitly specified. Historically, absl logging will not be + disabled by that. To maintaining this behavior, this function skips + checking the ``Logger.disabled`` bit. + + This logger can still be disabled by adding a filter that filters out + everything. + + Args: + record: logging.LogRecord, the record to handle. + """ + if self.filter(record): + self.callHandlers(record) + + @classmethod + def register_frame_to_skip(cls, file_name, function_name, line_number=None): + """Registers a function name to skip when walking the stack. + + The :class:`~absl.logging.ABSLLogger` sometimes skips method calls on the + stack to make the log messages meaningful in their appropriate context. + This method registers a function from a particular file as one + which should be skipped. + + Args: + file_name: str, the name of the file that contains the function. + function_name: str, the name of the function to skip. + line_number: int, if provided, only the function with this starting line + number will be skipped. Otherwise, all functions with the same name + in the file will be skipped. + """ + if line_number is not None: + cls._frames_to_skip.add((file_name, function_name, line_number)) + else: + cls._frames_to_skip.add((file_name, function_name)) + + +def _get_thread_id(): + """Gets id of current thread, suitable for logging as an unsigned quantity. + + If pywrapbase is linked, returns GetTID() for the thread ID to be + consistent with C++ logging. Otherwise, returns the numeric thread id. + The quantities are made unsigned by masking with 2*sys.maxint + 1. + + Returns: + Thread ID unique to this process (unsigned) + """ + thread_id = threading.get_ident() + return thread_id & _THREAD_ID_MASK + + +def get_absl_logger(): + """Returns the absl logger instance.""" + assert _absl_logger is not None + return _absl_logger + + +def get_absl_handler(): + """Returns the absl handler instance.""" + assert _absl_handler is not None + return _absl_handler + + +def use_python_logging(quiet=False): + """Uses the python implementation of the logging code. + + Args: + quiet: No logging message about switching logging type. + """ + get_absl_handler().activate_python_handler() + if not quiet: + info('Restoring pure python logging') + + +_attempted_to_remove_stderr_stream_handlers = False + + +def use_absl_handler(): + """Uses the ABSL logging handler for logging. + + This method is called in :func:`app.run()` so the absl handler + is used in absl apps. + """ + global _attempted_to_remove_stderr_stream_handlers + if not _attempted_to_remove_stderr_stream_handlers: + # The absl handler logs to stderr by default. To prevent double logging to + # stderr, the following code tries its best to remove other handlers that + # emit to stderr. Those handlers are most commonly added when + # logging.info/debug is called before calling use_absl_handler(). + handlers = [ + h for h in logging.root.handlers + if isinstance(h, logging.StreamHandler) and h.stream == sys.stderr] + for h in handlers: + logging.root.removeHandler(h) + _attempted_to_remove_stderr_stream_handlers = True + + absl_handler = get_absl_handler() + if absl_handler not in logging.root.handlers: + logging.root.addHandler(absl_handler) + FLAGS['verbosity']._update_logging_levels() # pylint: disable=protected-access + FLAGS['logger_levels']._update_logger_levels() # pylint: disable=protected-access + + +def _initialize(): + """Initializes loggers and handlers.""" + global _absl_logger, _absl_handler + + if _absl_logger: + return + + original_logger_class = logging.getLoggerClass() + logging.setLoggerClass(ABSLLogger) + _absl_logger = logging.getLogger('absl') + logging.setLoggerClass(original_logger_class) + + python_logging_formatter = PythonFormatter() + _absl_handler = ABSLHandler(python_logging_formatter) + + +_initialize() diff --git a/env-llmeval/lib/python3.10/site-packages/absl/logging/__init__.pyi b/env-llmeval/lib/python3.10/site-packages/absl/logging/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..5d5bb69d59d8e0c8fdaafaab7b975361031be898 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/logging/__init__.pyi @@ -0,0 +1,290 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Any, Callable, Dict, NoReturn, Optional, Tuple, TypeVar, Union + +from absl import flags + +# Logging levels. +FATAL: int +ERROR: int +WARNING: int +WARN: int # Deprecated name. +INFO: int +DEBUG: int + +ABSL_LOGGING_PREFIX_REGEX: str + +LOGTOSTDERR: flags.FlagHolder[bool] +ALSOLOGTOSTDERR: flags.FlagHolder[bool] +LOG_DIR: flags.FlagHolder[str] +VERBOSITY: flags.FlagHolder[int] +LOGGER_LEVELS: flags.FlagHolder[Dict[str, str]] +STDERRTHRESHOLD: flags.FlagHolder[str] +SHOWPREFIXFORINFO: flags.FlagHolder[bool] + + +def get_verbosity() -> int: + ... + + +def set_verbosity(v: Union[int, str]) -> None: + ... + + +def set_stderrthreshold(s: Union[int, str]) -> None: + ... + + +# TODO(b/277607978): Provide actual args+kwargs shadowing stdlib's logging functions. +def fatal(msg: Any, *args: Any, **kwargs: Any) -> NoReturn: + ... + + +def error(msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def warning(msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def warn(msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def info(msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def debug(msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def exception(msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def log_every_n(level: int, msg: Any, n: int, *args: Any) -> None: + ... + + +def log_every_n_seconds( + level: int, msg: Any, n_seconds: float, *args: Any +) -> None: + ... + + +def log_first_n(level: int, msg: Any, n: int, *args: Any) -> None: + ... + + +def log_if(level: int, msg: Any, condition: Any, *args: Any) -> None: + ... + + +def log(level: int, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def vlog(level: int, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + +def vlog_is_on(level: int) -> bool: + ... + + +def flush() -> None: + ... + + +def level_debug() -> bool: + ... + + +def level_info() -> bool: + ... + + +def level_warning() -> bool: + ... + + +level_warn = level_warning # Deprecated function. + + +def level_error() -> bool: + ... + + +def get_log_file_name(level: int = ...) -> str: + ... + + +def find_log_dir_and_names( + program_name: Optional[str] = ..., log_dir: Optional[str] = ... +) -> Tuple[str, str, str]: + ... + + +def find_log_dir(log_dir: Optional[str] = ...) -> str: + ... + + +def get_absl_log_prefix(record: logging.LogRecord) -> str: + ... + + +_SkipLogT = TypeVar('_SkipLogT', str, Callable[..., Any]) + +def skip_log_prefix(func: _SkipLogT) -> _SkipLogT: + ... + + +_StreamT = TypeVar("_StreamT") + + +class PythonHandler(logging.StreamHandler[_StreamT]): + + def __init__( + self, + stream: Optional[_StreamT] = ..., + formatter: Optional[logging.Formatter] = ..., + ) -> None: + ... + + def start_logging_to_file( + self, program_name: Optional[str] = ..., log_dir: Optional[str] = ... + ) -> None: + ... + + def use_absl_log_file( + self, program_name: Optional[str] = ..., log_dir: Optional[str] = ... + ) -> None: + ... + + def flush(self) -> None: + ... + + def emit(self, record: logging.LogRecord) -> None: + ... + + def close(self) -> None: + ... + + +class ABSLHandler(logging.Handler): + + def __init__(self, python_logging_formatter: PythonFormatter) -> None: + ... + + def format(self, record: logging.LogRecord) -> str: + ... + + def setFormatter(self, fmt) -> None: + ... + + def emit(self, record: logging.LogRecord) -> None: + ... + + def flush(self) -> None: + ... + + def close(self) -> None: + ... + + def handle(self, record: logging.LogRecord) -> bool: + ... + + @property + def python_handler(self) -> PythonHandler: + ... + + def activate_python_handler(self) -> None: + ... + + def use_absl_log_file( + self, program_name: Optional[str] = ..., log_dir: Optional[str] = ... + ) -> None: + ... + + def start_logging_to_file(self, program_name=None, log_dir=None) -> None: + ... + + +class PythonFormatter(logging.Formatter): + + def format(self, record: logging.LogRecord) -> str: + ... + + +class ABSLLogger(logging.Logger): + + def findCaller( + self, stack_info: bool = ..., stacklevel: int = ... + ) -> Tuple[str, int, str, Optional[str]]: + ... + + def critical(self, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def fatal(self, msg: Any, *args: Any, **kwargs: Any) -> NoReturn: + ... + + def error(self, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def warn(self, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def warning(self, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def info(self, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def debug(self, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def log(self, level: int, msg: Any, *args: Any, **kwargs: Any) -> None: + ... + + def handle(self, record: logging.LogRecord) -> None: + ... + + @classmethod + def register_frame_to_skip( + cls, file_name: str, function_name: str, line_number: Optional[int] = ... + ) -> None: + ... + + +# NOTE: Returns None before _initialize called but shouldn't occur after import. +def get_absl_logger() -> ABSLLogger: + ... + + +# NOTE: Returns None before _initialize called but shouldn't occur after import. +def get_absl_handler() -> ABSLHandler: + ... + + +def use_python_logging(quiet: bool = ...) -> None: + ... + + +def use_absl_handler() -> None: + ... diff --git a/env-llmeval/lib/python3.10/site-packages/absl/logging/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/logging/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fa883ba9a6d383642fdc08433ae940c7e9ea37a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/logging/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/logging/__pycache__/converter.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/logging/__pycache__/converter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..691f796e3a0028d79340366ad602ce6f86e64219 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/logging/__pycache__/converter.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/logging/converter.py b/env-llmeval/lib/python3.10/site-packages/absl/logging/converter.py new file mode 100644 index 0000000000000000000000000000000000000000..0239ab4556458b995f9cbca796281cc44acaf476 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/logging/converter.py @@ -0,0 +1,214 @@ +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Module to convert log levels between Abseil Python, C++, and Python standard. + +This converter has to convert (best effort) between three different +logging level schemes: + + * **cpp**: The C++ logging level scheme used in Abseil C++. + * **absl**: The absl.logging level scheme used in Abseil Python. + * **standard**: The python standard library logging level scheme. + +Here is a handy ascii chart for easy mental mapping:: + + LEVEL | cpp | absl | standard | + ---------+-----+--------+----------+ + DEBUG | 0 | 1 | 10 | + INFO | 0 | 0 | 20 | + WARNING | 1 | -1 | 30 | + ERROR | 2 | -2 | 40 | + CRITICAL | 3 | -3 | 50 | + FATAL | 3 | -3 | 50 | + +Note: standard logging ``CRITICAL`` is mapped to absl/cpp ``FATAL``. +However, only ``CRITICAL`` logs from the absl logger (or absl.logging.fatal) +will terminate the program. ``CRITICAL`` logs from non-absl loggers are treated +as error logs with a message prefix ``"CRITICAL - "``. + +Converting from standard to absl or cpp is a lossy conversion. +Converting back to standard will lose granularity. For this reason, +users should always try to convert to standard, the richest +representation, before manipulating the levels, and then only to cpp +or absl if those level schemes are absolutely necessary. +""" + +import logging + +STANDARD_CRITICAL = logging.CRITICAL +STANDARD_ERROR = logging.ERROR +STANDARD_WARNING = logging.WARNING +STANDARD_INFO = logging.INFO +STANDARD_DEBUG = logging.DEBUG + +# These levels are also used to define the constants +# FATAL, ERROR, WARNING, INFO, and DEBUG in the +# absl.logging module. +ABSL_FATAL = -3 +ABSL_ERROR = -2 +ABSL_WARNING = -1 +ABSL_WARN = -1 # Deprecated name. +ABSL_INFO = 0 +ABSL_DEBUG = 1 + +ABSL_LEVELS = {ABSL_FATAL: 'FATAL', + ABSL_ERROR: 'ERROR', + ABSL_WARNING: 'WARNING', + ABSL_INFO: 'INFO', + ABSL_DEBUG: 'DEBUG'} + +# Inverts the ABSL_LEVELS dictionary +ABSL_NAMES = {'FATAL': ABSL_FATAL, + 'ERROR': ABSL_ERROR, + 'WARNING': ABSL_WARNING, + 'WARN': ABSL_WARNING, # Deprecated name. + 'INFO': ABSL_INFO, + 'DEBUG': ABSL_DEBUG} + +ABSL_TO_STANDARD = {ABSL_FATAL: STANDARD_CRITICAL, + ABSL_ERROR: STANDARD_ERROR, + ABSL_WARNING: STANDARD_WARNING, + ABSL_INFO: STANDARD_INFO, + ABSL_DEBUG: STANDARD_DEBUG} + +# Inverts the ABSL_TO_STANDARD +STANDARD_TO_ABSL = dict((v, k) for (k, v) in ABSL_TO_STANDARD.items()) + + +def get_initial_for_level(level): + """Gets the initial that should start the log line for the given level. + + It returns: + + * ``'I'`` when: ``level < STANDARD_WARNING``. + * ``'W'`` when: ``STANDARD_WARNING <= level < STANDARD_ERROR``. + * ``'E'`` when: ``STANDARD_ERROR <= level < STANDARD_CRITICAL``. + * ``'F'`` when: ``level >= STANDARD_CRITICAL``. + + Args: + level: int, a Python standard logging level. + + Returns: + The first initial as it would be logged by the C++ logging module. + """ + if level < STANDARD_WARNING: + return 'I' + elif level < STANDARD_ERROR: + return 'W' + elif level < STANDARD_CRITICAL: + return 'E' + else: + return 'F' + + +def absl_to_cpp(level): + """Converts an absl log level to a cpp log level. + + Args: + level: int, an absl.logging level. + + Raises: + TypeError: Raised when level is not an integer. + + Returns: + The corresponding integer level for use in Abseil C++. + """ + if not isinstance(level, int): + raise TypeError('Expect an int level, found {}'.format(type(level))) + if level >= 0: + # C++ log levels must be >= 0 + return 0 + else: + return -level + + +def absl_to_standard(level): + """Converts an integer level from the absl value to the standard value. + + Args: + level: int, an absl.logging level. + + Raises: + TypeError: Raised when level is not an integer. + + Returns: + The corresponding integer level for use in standard logging. + """ + if not isinstance(level, int): + raise TypeError('Expect an int level, found {}'.format(type(level))) + if level < ABSL_FATAL: + level = ABSL_FATAL + if level <= ABSL_DEBUG: + return ABSL_TO_STANDARD[level] + # Maps to vlog levels. + return STANDARD_DEBUG - level + 1 + + +def string_to_standard(level): + """Converts a string level to standard logging level value. + + Args: + level: str, case-insensitive ``'debug'``, ``'info'``, ``'warning'``, + ``'error'``, ``'fatal'``. + + Returns: + The corresponding integer level for use in standard logging. + """ + return absl_to_standard(ABSL_NAMES.get(level.upper())) + + +def standard_to_absl(level): + """Converts an integer level from the standard value to the absl value. + + Args: + level: int, a Python standard logging level. + + Raises: + TypeError: Raised when level is not an integer. + + Returns: + The corresponding integer level for use in absl logging. + """ + if not isinstance(level, int): + raise TypeError('Expect an int level, found {}'.format(type(level))) + if level < 0: + level = 0 + if level < STANDARD_DEBUG: + # Maps to vlog levels. + return STANDARD_DEBUG - level + 1 + elif level < STANDARD_INFO: + return ABSL_DEBUG + elif level < STANDARD_WARNING: + return ABSL_INFO + elif level < STANDARD_ERROR: + return ABSL_WARNING + elif level < STANDARD_CRITICAL: + return ABSL_ERROR + else: + return ABSL_FATAL + + +def standard_to_cpp(level): + """Converts an integer level from the standard value to the cpp value. + + Args: + level: int, a Python standard logging level. + + Raises: + TypeError: Raised when level is not an integer. + + Returns: + The corresponding integer level for use in cpp logging. + """ + return absl_to_cpp(standard_to_absl(level)) diff --git a/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/_bazelize_command.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/_bazelize_command.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e1a2f57345fa255fbc2c903a68593b4238653191 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/_bazelize_command.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/absltest.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/absltest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ccd02e075507283342c62030aa27e07ffe08c05 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/absltest.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/flagsaver.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/flagsaver.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..304a869664a17ad11a56fc64b7adb819d914b4b5 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/absl/testing/__pycache__/flagsaver.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/absl/testing/_pretty_print_reporter.py b/env-llmeval/lib/python3.10/site-packages/absl/testing/_pretty_print_reporter.py new file mode 100644 index 0000000000000000000000000000000000000000..b0dde07e4f5a98abedf1b3c5f6ac72fe256fa716 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/absl/testing/_pretty_print_reporter.py @@ -0,0 +1,91 @@ +# Copyright 2018 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""TestResult implementing default output for test execution status.""" + +import unittest + + +class TextTestResult(unittest.TextTestResult): + """TestResult class that provides the default text result formatting.""" + + def __init__(self, stream, descriptions, verbosity): + # Disable the verbose per-test output from the superclass, since it would + # conflict with our customized output. + super(TextTestResult, self).__init__(stream, descriptions, 0) + self._per_test_output = verbosity > 0 + + def _print_status(self, tag, test): + if self._per_test_output: + test_id = test.id() + if test_id.startswith('__main__.'): + test_id = test_id[len('__main__.'):] + print('[%s] %s' % (tag, test_id), file=self.stream) + self.stream.flush() + + def startTest(self, test): + super(TextTestResult, self).startTest(test) + self._print_status(' RUN ', test) + + def addSuccess(self, test): + super(TextTestResult, self).addSuccess(test) + self._print_status(' OK ', test) + + def addError(self, test, err): + super(TextTestResult, self).addError(test, err) + self._print_status(' FAILED ', test) + + def addFailure(self, test, err): + super(TextTestResult, self).addFailure(test, err) + self._print_status(' FAILED ', test) + + def addSkip(self, test, reason): + super(TextTestResult, self).addSkip(test, reason) + self._print_status(' SKIPPED ', test) + + def addExpectedFailure(self, test, err): + super(TextTestResult, self).addExpectedFailure(test, err) + self._print_status(' OK ', test) + + def addUnexpectedSuccess(self, test): + super(TextTestResult, self).addUnexpectedSuccess(test) + self._print_status(' FAILED ', test) + + +class TextTestRunner(unittest.TextTestRunner): + """A test runner that produces formatted text results.""" + + _TEST_RESULT_CLASS = TextTestResult + + # Set this to true at the class or instance level to run tests using a + # debug-friendly method (e.g, one that doesn't catch exceptions and interacts + # better with debuggers). + # Usually this is set using --pdb_post_mortem. + run_for_debugging = False + + def run(self, test): + # type: (TestCase) -> TestResult + if self.run_for_debugging: + return self._run_debug(test) + else: + return super(TextTestRunner, self).run(test) + + def _run_debug(self, test): + # type: (TestCase) -> TestResult + test.debug() + # Return an empty result to indicate success. + return self._makeResult() + + def _makeResult(self): + return TextTestResult(self.stream, self.descriptions, self.verbosity) diff --git a/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/LICENSE b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4362b49151d7b34ef83b3067a8f9c9f877d72a0e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/LICENSE @@ -0,0 +1,502 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..3ec4d797a2de96620acc6c38a67ea3e819bf5f46 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/METADATA @@ -0,0 +1,97 @@ +Metadata-Version: 2.1 +Name: chardet +Version: 5.2.0 +Summary: Universal encoding detector for Python 3 +Home-page: https://github.com/chardet/chardet +Author: Mark Pilgrim +Author-email: mark@diveintomark.org +Maintainer: Daniel Blanchard +Maintainer-email: dan.blanchard@gmail.com +License: LGPL +Project-URL: Documentation, https://chardet.readthedocs.io/ +Project-URL: GitHub Project, https://github.com/chardet/chardet +Project-URL: Issue Tracker, https://github.com/chardet/chardet/issues +Keywords: encoding,i18n,xml +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic +Requires-Python: >=3.7 +License-File: LICENSE + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR, Johab (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252, MacRoman (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 3.7+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent original chardet port from C, and `Ian Cordasco `_'s +`charade `_ Python 3-compatible fork. + +:maintainer: Dan Blanchard diff --git a/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..7fbe61b3f1db0bf1ec74d3ad49a01ec028b6bbc5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/RECORD @@ -0,0 +1,107 @@ +../../../bin/chardetect,sha256=5zKOa0-Us5NHXrYrlOYaJ8zG-WMwfVXMk37Ldlq4oJA,247 +chardet-5.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet-5.2.0.dist-info/LICENSE,sha256=3GJlINzVOiL3J68-5Cx3DlbJemT-OtsGN5nYqwMv5VE,26530 +chardet-5.2.0.dist-info/METADATA,sha256=PAr2NQ6hQWpjyFnwlI7MoxHt2S_6oRiUsucOKMNhzGw,3418 +chardet-5.2.0.dist-info/RECORD,, +chardet-5.2.0.dist-info/WHEEL,sha256=AtBG6SXL3KF_v0NxLf0ehyVOh0cold-JbJYXNGorC6Q,92 +chardet-5.2.0.dist-info/entry_points.txt,sha256=_cdvYc4jyY68GYfsQAAthNMxO-yodcGkvNC1xOEsLmI,59 +chardet-5.2.0.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +chardet/__init__.py,sha256=57R-HSxj0PWmILMN0GFmUNqEMfrEVSamXyjD-W6_fbs,4797 +chardet/__main__.py,sha256=puNj2o_QfBRKElEkiVp1zEIL1gGYD2o-JuXLFlqHDC4,123 +chardet/__pycache__/__init__.cpython-310.pyc,, +chardet/__pycache__/__main__.cpython-310.pyc,, +chardet/__pycache__/big5freq.cpython-310.pyc,, +chardet/__pycache__/big5prober.cpython-310.pyc,, +chardet/__pycache__/chardistribution.cpython-310.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-310.pyc,, +chardet/__pycache__/charsetprober.cpython-310.pyc,, +chardet/__pycache__/codingstatemachine.cpython-310.pyc,, +chardet/__pycache__/codingstatemachinedict.cpython-310.pyc,, +chardet/__pycache__/cp949prober.cpython-310.pyc,, +chardet/__pycache__/enums.cpython-310.pyc,, +chardet/__pycache__/escprober.cpython-310.pyc,, +chardet/__pycache__/escsm.cpython-310.pyc,, +chardet/__pycache__/eucjpprober.cpython-310.pyc,, +chardet/__pycache__/euckrfreq.cpython-310.pyc,, +chardet/__pycache__/euckrprober.cpython-310.pyc,, +chardet/__pycache__/euctwfreq.cpython-310.pyc,, +chardet/__pycache__/euctwprober.cpython-310.pyc,, +chardet/__pycache__/gb2312freq.cpython-310.pyc,, +chardet/__pycache__/gb2312prober.cpython-310.pyc,, +chardet/__pycache__/hebrewprober.cpython-310.pyc,, +chardet/__pycache__/jisfreq.cpython-310.pyc,, +chardet/__pycache__/johabfreq.cpython-310.pyc,, +chardet/__pycache__/johabprober.cpython-310.pyc,, +chardet/__pycache__/jpcntx.cpython-310.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-310.pyc,, +chardet/__pycache__/langgreekmodel.cpython-310.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-310.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-310.pyc,, +chardet/__pycache__/langrussianmodel.cpython-310.pyc,, +chardet/__pycache__/langthaimodel.cpython-310.pyc,, +chardet/__pycache__/langturkishmodel.cpython-310.pyc,, +chardet/__pycache__/latin1prober.cpython-310.pyc,, +chardet/__pycache__/macromanprober.cpython-310.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-310.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-310.pyc,, +chardet/__pycache__/mbcssm.cpython-310.pyc,, +chardet/__pycache__/resultdict.cpython-310.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-310.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-310.pyc,, +chardet/__pycache__/sjisprober.cpython-310.pyc,, +chardet/__pycache__/universaldetector.cpython-310.pyc,, +chardet/__pycache__/utf1632prober.cpython-310.pyc,, +chardet/__pycache__/utf8prober.cpython-310.pyc,, +chardet/__pycache__/version.cpython-310.pyc,, +chardet/big5freq.py,sha256=ltcfP-3PjlNHCoo5e4a7C4z-2DhBTXRfY6jbMbB7P30,31274 +chardet/big5prober.py,sha256=lPMfwCX6v2AaPgvFh_cSWZcgLDbWiFCHLZ_p9RQ9uxE,1763 +chardet/chardistribution.py,sha256=13B8XUG4oXDuLdXvfbIWwLFeR-ZU21AqTS1zcdON8bU,10032 +chardet/charsetgroupprober.py,sha256=UKK3SaIZB2PCdKSIS0gnvMtLR9JJX62M-fZJu3OlWyg,3915 +chardet/charsetprober.py,sha256=L3t8_wIOov8em-vZWOcbkdsrwe43N6_gqNh5pH7WPd4,5420 +chardet/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +chardet/cli/__pycache__/__init__.cpython-310.pyc,, +chardet/cli/__pycache__/chardetect.cpython-310.pyc,, +chardet/cli/chardetect.py,sha256=zibMVg5RpKb-ME9_7EYG4ZM2Sf07NHcQzZ12U-rYJho,3242 +chardet/codingstatemachine.py,sha256=K7k69sw3jY5DmTXoSJQVsUtFIQKYPQVOSJJhBuGv_yE,3732 +chardet/codingstatemachinedict.py,sha256=0GY3Hi2qIZvDrOOJ3AtqppM1RsYxr_66ER4EHjuMiMc,542 +chardet/cp949prober.py,sha256=0jKRV7fECuWI16rNnks0ZECKA1iZYCIEaP8A1ZvjUSI,1860 +chardet/enums.py,sha256=TzECiZoCKNMqgwU76cPCeKWFBqaWvAdLMev5_bCkhY8,1683 +chardet/escprober.py,sha256=Kho48X65xE0scFylIdeJjM2bcbvRvv0h0WUbMWrJD3A,4006 +chardet/escsm.py,sha256=AqyXpA2FQFD7k-buBty_7itGEYkhmVa8X09NLRul3QM,12176 +chardet/eucjpprober.py,sha256=5KYaM9fsxkRYzw1b5k0fL-j_-ezIw-ij9r97a9MHxLY,3934 +chardet/euckrfreq.py,sha256=3mHuRvXfsq_QcQysDQFb8qSudvTiol71C6Ic2w57tKM,13566 +chardet/euckrprober.py,sha256=hiFT6wM174GIwRvqDsIcuOc-dDsq2uPKMKbyV8-1Xnc,1753 +chardet/euctwfreq.py,sha256=2alILE1Lh5eqiFJZjzRkMQXolNJRHY5oBQd-vmZYFFM,36913 +chardet/euctwprober.py,sha256=NxbpNdBtU0VFI0bKfGfDkpP7S2_8_6FlO87dVH0ogws,1753 +chardet/gb2312freq.py,sha256=49OrdXzD-HXqwavkqjo8Z7gvs58hONNzDhAyMENNkvY,20735 +chardet/gb2312prober.py,sha256=KPEBueaSLSvBpFeINMu0D6TgHcR90e5PaQawifzF4o0,1759 +chardet/hebrewprober.py,sha256=96T_Lj_OmW-fK7JrSHojYjyG3fsGgbzkoTNleZ3kfYE,14537 +chardet/jisfreq.py,sha256=mm8tfrwqhpOd3wzZKS4NJqkYBQVcDfTM2JiQ5aW932E,25796 +chardet/johabfreq.py,sha256=dBpOYG34GRX6SL8k_LbS9rxZPMjLjoMlgZ03Pz5Hmqc,42498 +chardet/johabprober.py,sha256=O1Qw9nVzRnun7vZp4UZM7wvJSv9W941mEU9uDMnY3DU,1752 +chardet/jpcntx.py,sha256=uhHrYWkLxE_rF5OkHKInm0HUsrjgKHHVQvtt3UcvotA,27055 +chardet/langbulgarianmodel.py,sha256=bGoRpxBYtrbSHa6mX6PkEA26v30pWmhDjemhdxmkew8,104550 +chardet/langgreekmodel.py,sha256=3wMlEzQ8oU2MbrL2xN8lkuOB0dCMLBhW6heekxusoc0,98472 +chardet/langhebrewmodel.py,sha256=ZUTqusxMvR_earWPs5w-rH10xoe5sPjd9FLMu1DUIvE,98184 +chardet/langhungarianmodel.py,sha256=N-YtC2EiswyS7XsUicCPRycrIzRNj47Y048odp9qOoo,101351 +chardet/langrussianmodel.py,sha256=6v7RcZKGj0VH0864BHzizKNceAYbHvGts2p00ifC7w4,128023 +chardet/langthaimodel.py,sha256=Mr673U9U8rkQFfUDtLP01pp-0TOsl2o6sb75YEjvpcs,102762 +chardet/langturkishmodel.py,sha256=LkXCjWhGUEzqKXvfasHN0SFBigwKJ3xeWNVZ0EyI0kA,95360 +chardet/latin1prober.py,sha256=p15EEmFbmQUwbKLC7lOJVGHEZwcG45ubEZYTGu01J5g,5380 +chardet/macromanprober.py,sha256=9anfzmY6TBfUPDyBDOdY07kqmTHpZ1tK0jL-p1JWcOY,6077 +chardet/mbcharsetprober.py,sha256=Wr04WNI4F3X_VxEverNG-H25g7u-MDDKlNt-JGj-_uU,3715 +chardet/mbcsgroupprober.py,sha256=iRpaNBjV0DNwYPu_z6TiHgRpwYahiM7ztI_4kZ4Uz9A,2131 +chardet/mbcssm.py,sha256=hUtPvDYgWDaA2dWdgLsshbwRfm3Q5YRlRogdmeRUNQw,30391 +chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +chardet/metadata/__pycache__/__init__.cpython-310.pyc,, +chardet/metadata/__pycache__/languages.cpython-310.pyc,, +chardet/metadata/languages.py,sha256=FhvBIdZFxRQ-dTwkb_0madRKgVBCaUMQz9I5xqjE5iQ,13560 +chardet/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +chardet/resultdict.py,sha256=ez4FRvN5KaSosJeJ2WzUyKdDdg35HDy_SSLPXKCdt5M,402 +chardet/sbcharsetprober.py,sha256=-nd3F90i7GpXLjehLVHqVBE0KlWzGvQUPETLBNn4o6U,6400 +chardet/sbcsgroupprober.py,sha256=gcgI0fOfgw_3YTClpbra_MNxwyEyJ3eUXraoLHYb59E,4137 +chardet/sjisprober.py,sha256=aqQufMzRw46ZpFlzmYaYeT2-nzmKb-hmcrApppJ862k,4007 +chardet/universaldetector.py,sha256=xYBrg4x0dd9WnT8qclfADVD9ondrUNkqPmvte1pa520,14848 +chardet/utf1632prober.py,sha256=pw1epGdMj1hDGiCu1AHqqzOEfjX8MVdiW7O1BlT8-eQ,8505 +chardet/utf8prober.py,sha256=8m08Ub5490H4jQ6LYXvFysGtgKoKsHUd2zH_i8_TnVw,2812 +chardet/version.py,sha256=jp8ePp1zC63YxruGcHSuKxtf3-fF1LYAMUZD2eDWYok,244 diff --git a/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/top_level.txt b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..79236f25cda563eb57cd7b5838256d9f3fdf18ab --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/chardet-5.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/env-llmeval/lib/python3.10/site-packages/click/__init__.py b/env-llmeval/lib/python3.10/site-packages/click/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9a1dab048917edc420af440c73bd1d689de6b3fa --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/__init__.py @@ -0,0 +1,73 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.7" diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3543ea2c61cf6dabcddb56dd60f66ca86abeea2d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3fa87d3a3de0ffaac2f250e839ec15073da57126 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/_compat.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a82d054f42da02384df6abdfdb95e0c741bec502 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/_textwrap.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92e8c3c646752eb0c0e1df2055720dad688d4f58 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/globals.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d3a91957d06c87f6c8460116df15c4d5af6be92 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/parser.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..465ad0aa42688fddace91b264901f8296a62bb8c Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/shell_completion.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..766eb2579e5733e3735d27500ff63d125ab9c82a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/testing.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f835d34baab5f2d08eff88faca6bf1fe3787ba7d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/types.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5d7aac184080e7b111555ab9be186f125abc88f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/click/__pycache__/utils.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/click/_compat.py b/env-llmeval/lib/python3.10/site-packages/click/_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..23f8866598b4b4eb836b9d9b210ebd395fd0c557 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/_compat.py @@ -0,0 +1,623 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, "os.PathLike[str]", int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: "t.Union[str, os.PathLike[str]]", + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( # noqa: F811 + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.Optional[t.TextIO]], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.Optional[t.TextIO]]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.Optional[t.TextIO]: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/env-llmeval/lib/python3.10/site-packages/click/_termui_impl.py b/env-llmeval/lib/python3.10/site-packages/click/_termui_impl.py new file mode 100644 index 0000000000000000000000000000000000000000..f744657753caa6cdef1dcc41a4f0b5e3e9503ab8 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/_termui_impl.py @@ -0,0 +1,739 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter: t.Iterable[V] = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: t.Optional[int] = None + self.entered: bool = False + self.current_item: t.Optional[V] = None + self.is_hidden: bool = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar[V]": + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/env-llmeval/lib/python3.10/site-packages/click/_textwrap.py b/env-llmeval/lib/python3.10/site-packages/click/_textwrap.py new file mode 100644 index 0000000000000000000000000000000000000000..b47dcbd4264e86715adfae1c5124c288b67a983e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/env-llmeval/lib/python3.10/site-packages/click/_winconsole.py b/env-llmeval/lib/python3.10/site-packages/click/_winconsole.py new file mode 100644 index 0000000000000000000000000000000000000000..6b20df315b23ecd1e3d0ec32c11c0b5ced577efe --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/env-llmeval/lib/python3.10/site-packages/click/core.py b/env-llmeval/lib/python3.10/site-packages/click/core.py new file mode 100644 index 0000000000000000000000000000000000000000..cc65e896bf2d754d74b54a84ac501b80127f83ca --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/core.py @@ -0,0 +1,3042 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "t.Callable[..., V]", + *args: t.Any, + **kwargs: t.Any, + ) -> V: + ... + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "Command", + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + ... + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", "t.Callable[..., V]"], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Union[t.Any, V]: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.MutableMapping[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invocable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + :param attrs: Other command arguments described in :class:`Command`. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[ + t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] + ] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.MutableMapping[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: + ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": + ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + + See :class:`MultiCommand` and :class:`Command` for the description of + ``name`` and ``attrs``. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name: t.Optional[str] + self.opts: t.List[str] + self.secondary_opts: t.List[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + self.default: t.Union[t.Any, t.Callable[[], t.Any]] + + if is_flag and default_is_missing and not self.required: + if multiple: + self.default = () + else: + self.default = False + + if flag_value is None: + flag_value = not self.default + + self.type: types.ParamType + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return t.cast(Option, param).flag_value + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/env-llmeval/lib/python3.10/site-packages/click/decorators.py b/env-llmeval/lib/python3.10/site-packages/click/decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..d9bba9502ca353bca5136f43c92436ff584f06e1 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/decorators.py @@ -0,0 +1,561 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) + + +def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: t.Type[T], ensure: bool = False +) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + ctx = get_current_context() + + obj: t.Optional[T] + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator # type: ignore[return-value] + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator # type: ignore[return-value] + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: t.Optional[str], + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: + ... + + +def command( + name: t.Union[t.Optional[str], _AnyCallable] = None, + cls: t.Optional[t.Type[CmdType]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast(t.Type[CmdType], Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + cmd = cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: t.Optional[str], + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: + ... + + +def group( + name: t.Union[str, _AnyCallable, None] = None, + cls: t.Optional[t.Type[GrpType]] = None, + **attrs: t.Any, +) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast(t.Type[GrpType], Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/env-llmeval/lib/python3.10/site-packages/click/exceptions.py b/env-llmeval/lib/python3.10/site-packages/click/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..fe68a3613f74e5e82da4e3eedc7d9451977838dd --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/exceptions.py @@ -0,0 +1,288 @@ +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/env-llmeval/lib/python3.10/site-packages/click/formatting.py b/env-llmeval/lib/python3.10/site-packages/click/formatting.py new file mode 100644 index 0000000000000000000000000000000000000000..ddd2a2f825f206164eb9efb0a5c41528365beb85 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/env-llmeval/lib/python3.10/site-packages/click/globals.py b/env-llmeval/lib/python3.10/site-packages/click/globals.py new file mode 100644 index 0000000000000000000000000000000000000000..480058f10dd6a8205d1bff0b94de7ae347a7629a --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/globals.py @@ -0,0 +1,68 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/env-llmeval/lib/python3.10/site-packages/click/parser.py b/env-llmeval/lib/python3.10/site-packages/click/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..5fa7adfac842bfa5689fd1a41ae4017be1ebff6f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: t.Set[str] = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/env-llmeval/lib/python3.10/site-packages/click/py.typed b/env-llmeval/lib/python3.10/site-packages/click/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/click/shell_completion.py b/env-llmeval/lib/python3.10/site-packages/click/shell_completion.py new file mode 100644 index 0000000000000000000000000000000000000000..dc9e00b9b0c6f4903b674f03343e887bd490b081 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/shell_completion.py @@ -0,0 +1,596 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: t.Optional[str] = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: t.Optional[str] = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + args: t.List[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/env-llmeval/lib/python3.10/site-packages/click/termui.py b/env-llmeval/lib/python3.10/site-packages/click/termui.py new file mode 100644 index 0000000000000000000000000000000000000000..db7a4b286174fdf26f3251631a2066eda2fa5bea --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/termui.py @@ -0,0 +1,784 @@ +import inspect +import io +import itertools +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/env-llmeval/lib/python3.10/site-packages/click/testing.py b/env-llmeval/lib/python3.10/site-packages/click/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..e0df0d2a657fe19523957b85964b9956e5c78a30 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env: t.Mapping[str, t.Optional[str]] = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: # noqa: B014 + pass diff --git a/env-llmeval/lib/python3.10/site-packages/click/types.py b/env-llmeval/lib/python3.10/site-packages/click/types.py new file mode 100644 index 0000000000000000000000000000000000000000..2b1d1797f2e115e9bc976bcaf7d8e1884a91e91c --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/types.py @@ -0,0 +1,1089 @@ +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats: t.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type[t.Any]] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("t.Union[str, os.PathLike[str]]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast(t.IO[t.Any], lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type[t.Any]] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: "t.Union[str, os.PathLike[str]]" + ) -> "t.Union[str, bytes, os.PathLike[str]]": + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: "t.Union[str, os.PathLike[str]]", + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> "t.Union[str, bytes, os.PathLike[str]]": + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} '{filename}' is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: + self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/env-llmeval/lib/python3.10/site-packages/click/utils.py b/env-llmeval/lib/python3.10/site-packages/click/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d536434f0bd00cd6fd910c506f5b85a8e485b964 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/click/utils.py @@ -0,0 +1,624 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: t.Union[str, "os.PathLike[str]"], + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO[t.Any]] + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO[t.Any], KeepOpenFile(f)) + + return f + + +def format_filename( + filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict". This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/LICENSE b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f2927f5f8147f137783bb5072794999e04655cfd --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2019, threadpoolctl contributors + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..6d7926f7e43d3a7b3b33e5ea3e8d201077c2bfa7 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/METADATA @@ -0,0 +1,383 @@ +Metadata-Version: 2.1 +Name: threadpoolctl +Version: 3.4.0 +Summary: threadpoolctl +Home-page: https://github.com/joblib/threadpoolctl +License: BSD-3-Clause +Author: Thomas Moreau +Author-email: thomas.moreau.2010@gmail.com +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +# Thread-pool Controls [![Build Status](https://dev.azure.com/joblib/threadpoolctl/_apis/build/status/joblib.threadpoolctl?branchName=master)](https://dev.azure.com/joblib/threadpoolctl/_build/latest?definitionId=1&branchName=master) [![codecov](https://codecov.io/gh/joblib/threadpoolctl/branch/master/graph/badge.svg)](https://codecov.io/gh/joblib/threadpoolctl) + +Python helpers to limit the number of threads used in the +threadpool-backed of common native libraries used for scientific +computing and data science (e.g. BLAS and OpenMP). + +Fine control of the underlying thread-pool size can be useful in +workloads that involve nested parallelism so as to mitigate +oversubscription issues. + +## Installation + +- For users, install the last published version from PyPI: + + ```bash + pip install threadpoolctl + ``` + +- For contributors, install from the source repository in developer + mode: + + ```bash + pip install -r dev-requirements.txt + flit install --symlink + ``` + + then you run the tests with pytest: + + ```bash + pytest + ``` + +## Usage + +### Command Line Interface + +Get a JSON description of thread-pools initialized when importing python +packages such as numpy or scipy for instance: + +``` +python -m threadpoolctl -i numpy scipy.linalg +[ + { + "filepath": "/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so", + "prefix": "libmkl_rt", + "user_api": "blas", + "internal_api": "mkl", + "version": "2019.0.4", + "num_threads": 2, + "threading_layer": "intel" + }, + { + "filepath": "/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so", + "prefix": "libiomp", + "user_api": "openmp", + "internal_api": "openmp", + "version": null, + "num_threads": 4 + } +] +``` + +The JSON information is written on STDOUT. If some of the packages are missing, +a warning message is displayed on STDERR. + +### Python Runtime Programmatic Introspection + +Introspect the current state of the threadpool-enabled runtime libraries +that are loaded when importing Python packages: + +```python +>>> from threadpoolctl import threadpool_info +>>> from pprint import pprint +>>> pprint(threadpool_info()) +[] + +>>> import numpy +>>> pprint(threadpool_info()) +[{'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'threading_layer': 'intel', + 'user_api': 'blas', + 'version': '2019.0.4'}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libiomp', + 'user_api': 'openmp', + 'version': None}] + +>>> import xgboost +>>> pprint(threadpool_info()) +[{'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'threading_layer': 'intel', + 'user_api': 'blas', + 'version': '2019.0.4'}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libiomp', + 'user_api': 'openmp', + 'version': None}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libgomp.so.1.0.0', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libgomp', + 'user_api': 'openmp', + 'version': None}] +``` + +In the above example, `numpy` was installed from the default anaconda channel and comes +with MKL and its Intel OpenMP (`libiomp5`) implementation while `xgboost` was installed +from pypi.org and links against GNU OpenMP (`libgomp`) so both OpenMP runtimes are +loaded in the same Python program. + +The state of these libraries is also accessible through the object oriented API: + +```python +>>> from threadpoolctl import ThreadpoolController, threadpool_info +>>> from pprint import pprint +>>> import numpy +>>> controller = ThreadpoolController() +>>> pprint(controller.info()) +[{'architecture': 'Haswell', + 'filepath': '/home/jeremie/miniconda/envs/dev/lib/libopenblasp-r0.3.17.so', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'threading_layer': 'pthreads', + 'user_api': 'blas', + 'version': '0.3.17'}] + +>>> controller.info() == threadpool_info() +True +``` + +### Setting the Maximum Size of Thread-Pools + +Control the number of threads used by the underlying runtime libraries +in specific sections of your Python program: + +```python +>>> from threadpoolctl import threadpool_limits +>>> import numpy as np + +>>> with threadpool_limits(limits=1, user_api='blas'): +... # In this block, calls to blas implementation (like openblas or MKL) +... # will be limited to use only one thread. They can thus be used jointly +... # with thread-parallelism. +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +``` + +The threadpools can also be controlled via the object oriented API, which is especially +useful to avoid searching through all the loaded shared libraries each time. It will +however not act on libraries loaded after the instantiation of the +`ThreadpoolController`: + +```python +>>> from threadpoolctl import ThreadpoolController +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> with controller.limit(limits=1, user_api='blas'): +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +``` + +### Restricting the limits to the scope of a function + +`threadpool_limits` and `ThreadpoolController` can also be used as decorators to set +the maximum number of threads used by the supported libraries at a function level. The +decorators are accessible through their `wrap` method: + +```python +>>> from threadpoolctl import ThreadpoolController, threadpool_limits +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> @controller.wrap(limits=1, user_api='blas') +... # or @threadpool_limits.wrap(limits=1, user_api='blas') +... def my_func(): +... # Inside this function, calls to blas implementation (like openblas or MKL) +... # will be limited to use only one thread. +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +... +``` + +### Switching the FlexiBLAS backend + +`FlexiBLAS` is a BLAS wrapper for which the BLAS backend can be switched at runtime. +`threadpoolctl` exposes python bindings for this feature. Here's an example but note +that this part of the API is experimental and subject to change without deprecation: + +```python +>>> from threadpoolctl import ThreadpoolController +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> controller.info() +[{'user_api': 'blas', + 'internal_api': 'flexiblas', + 'num_threads': 1, + 'prefix': 'libflexiblas', + 'filepath': '/usr/local/lib/libflexiblas.so.3.3', + 'version': '3.3.1', + 'available_backends': ['NETLIB', 'OPENBLASPTHREAD', 'ATLAS'], + 'loaded_backends': ['NETLIB'], + 'current_backend': 'NETLIB'}] + +# Retrieve the flexiblas controller +>>> flexiblas_ct = controller.select(internal_api="flexiblas").lib_controllers[0] + +# Switch the backend with one predefined at build time (listed in "available_backends") +>>> flexiblas_ct.switch_backend("OPENBLASPTHREAD") +>>> controller.info() +[{'user_api': 'blas', + 'internal_api': 'flexiblas', + 'num_threads': 4, + 'prefix': 'libflexiblas', + 'filepath': '/usr/local/lib/libflexiblas.so.3.3', + 'version': '3.3.1', + 'available_backends': ['NETLIB', 'OPENBLASPTHREAD', 'ATLAS'], + 'loaded_backends': ['NETLIB', 'OPENBLASPTHREAD'], + 'current_backend': 'OPENBLASPTHREAD'}, + {'user_api': 'blas', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'filepath': '/usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.8.so', + 'version': '0.3.8', + 'threading_layer': 'pthreads', + 'architecture': 'Haswell'}] + +# It's also possible to directly give the path to a shared library +>>> flexiblas_controller.switch_backend("/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so") +>>> controller.info() +[{'user_api': 'blas', + 'internal_api': 'flexiblas', + 'num_threads': 2, + 'prefix': 'libflexiblas', + 'filepath': '/usr/local/lib/libflexiblas.so.3.3', + 'version': '3.3.1', + 'available_backends': ['NETLIB', 'OPENBLASPTHREAD', 'ATLAS'], + 'loaded_backends': ['NETLIB', + 'OPENBLASPTHREAD', + '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so'], + 'current_backend': '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so'}, + {'user_api': 'openmp', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libomp', + 'filepath': '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libomp.so', + 'version': None}, + {'user_api': 'blas', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'filepath': '/usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.8.so', + 'version': '0.3.8', + 'threading_layer': 'pthreads', + 'architecture': 'Haswell'}, + {'user_api': 'blas', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'filepath': '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so.2', + 'version': '2024.0-Product', + 'threading_layer': 'gnu'}] +``` + +You can observe that the previously linked OpenBLAS shared object stays loaded by +the Python program indefinitely, but FlexiBLAS itself no longer delegates BLAS calls +to OpenBLAS as indicated by the `current_backend` attribute. +### Writing a custom library controller + +Currently, `threadpoolctl` has support for `OpenMP` and the main `BLAS` libraries. +However it can also be used to control the threadpool of other native libraries, +provided that they expose an API to get and set the limit on the number of threads. +For that, one must implement a controller for this library and register it to +`threadpoolctl`. + +A custom controller must be a subclass of the `LibController` class and implement +the attributes and methods described in the docstring of `LibController`. Then this +new controller class must be registered using the `threadpoolctl.register` function. +An complete example can be found [here]( + https://github.com/joblib/threadpoolctl/blob/master/tests/_pyMylib/__init__.py). + +### Sequential BLAS within OpenMP parallel region + +When one wants to have sequential BLAS calls within an OpenMP parallel region, it's +safer to set `limits="sequential_blas_under_openmp"` since setting `limits=1` and +`user_api="blas"` might not lead to the expected behavior in some configurations +(e.g. OpenBLAS with the OpenMP threading layer +https://github.com/xianyi/OpenBLAS/issues/2985). + +### Known Limitations + +- `threadpool_limits` can fail to limit the number of inner threads when nesting + parallel loops managed by distinct OpenMP runtime implementations (for instance + libgomp from GCC and libomp from clang/llvm or libiomp from ICC). + + See the `test_openmp_nesting` function in [tests/test_threadpoolctl.py]( + https://github.com/joblib/threadpoolctl/blob/master/tests/test_threadpoolctl.py) + for an example. More information can be found at: + https://github.com/jeremiedbb/Nested_OpenMP + + Note however that this problem does not happen when `threadpool_limits` is + used to limit the number of threads used internally by BLAS calls that are + themselves nested under OpenMP parallel loops. `threadpool_limits` works as + expected, even if the inner BLAS implementation relies on a distinct OpenMP + implementation. + +- Using Intel OpenMP (ICC) and LLVM OpenMP (clang) in the same Python program + under Linux is known to cause problems. See the following guide for more details + and workarounds: + https://github.com/joblib/threadpoolctl/blob/master/multiple_openmp.md + +- Setting the maximum number of threads of the OpenMP and BLAS libraries has a global + effect and impacts the whole Python process. There is no thread level isolation as + these libraries do not offer thread-local APIs to configure the number of threads to + use in nested parallel calls. + + +## Maintainers + +To make a release: + +Bump the version number (`__version__`) in `threadpoolctl.py`. + +Build the distribution archives: + +```bash +pip install flit +flit build +``` + +Check the contents of `dist/`. + +If everything is fine, make a commit for the release, tag it, push the +tag to github and then: + +```bash +flit publish +``` + +### Credits + +The initial dynamic library introspection code was written by @anton-malakhov +for the smp package available at https://github.com/IntelPython/smp . + +threadpoolctl extends this for other operating systems. Contrary to smp, +threadpoolctl does not attempt to limit the size of Python multiprocessing +pools (threads or processes) or set operating system-level CPU affinity +constraints: threadpoolctl only interacts with native libraries via their +public runtime APIs. + diff --git a/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..84ffc3b1ef6fff86f795014525b5acc89061e39f --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/threadpoolctl.cpython-310.pyc,, +threadpoolctl-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +threadpoolctl-3.4.0.dist-info/LICENSE,sha256=gaxhkHUkiwblNmC2UtEOSF9GdfXQrg-X6iI3DaH34js,1507 +threadpoolctl-3.4.0.dist-info/METADATA,sha256=WmvFYjEeyrhqDdncJ4DhdmIZVv3MqUNNoHf67cuImGI,13249 +threadpoolctl-3.4.0.dist-info/RECORD,, +threadpoolctl-3.4.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +threadpoolctl.py,sha256=LU6jh8Vejwl17kgLDxenbZXK2asvUc2AjTV-HERwx40,49753 diff --git a/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..3b5e64b5e6c4a210201d1676a891fd57b15cda99 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/threadpoolctl-3.4.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/INSTALLER b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/LICENSE b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f26bcf4d2de6eb136e31006ca3ab447d5e488adf --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/METADATA b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..5089b4ddde4835e1dd7dd8ef6ac52f27398c0ae5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/METADATA @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.11.0 +Summary: Backported and Experimental Type Hints for Python 3.8+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing-extensions.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) – +[PyPI](https://pypi.org/project/typing-extensions/) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +`typing_extensions` is treated specially by static type checkers such as +mypy and pyright. Objects defined in `typing_extensions` are treated the same +way as equivalent forms in `typing`. + +`typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version will be incremented only for backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +## Included items + +See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a +complete listing of module contents. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md) +for how to contribute to `typing_extensions`. + diff --git a/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/RECORD b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..b06fbb5eeff1415d272c12b9800824a544d3618d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/typing_extensions.cpython-310.pyc,, +typing_extensions-4.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.11.0.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +typing_extensions-4.11.0.dist-info/METADATA,sha256=rhGXH-iuwULXNZDAtjXcjwU2jcxotMTkFiUg0OqYXzI,2967 +typing_extensions-4.11.0.dist-info/RECORD,, +typing_extensions-4.11.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +typing_extensions.py,sha256=mvJ5zBIuLMsaVGHZjRLsGh8ouwg5Kr3uNbWCHX8-Ao0,122293 diff --git a/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/WHEEL b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..3b5e64b5e6c4a210201d1676a891fd57b15cda99 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/typing_extensions-4.11.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any