diff --git a/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..9d227a0cc43c3268d15722b763bd94ad298645a1 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..dfe37d52dfbbfdffc5b3181923e51c610046ff12 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.1.5 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Chat: https://discord.gg/pallets diff --git a/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/RECORD b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..1a961ed7a4d3a2d38028bbd9c5997c3bef5ad107 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.5.dist-info/METADATA,sha256=2dRDPam6OZLfpX0wg1JN5P3u9arqACxVSfdGmsJU7o8,3003 +MarkupSafe-2.1.5.dist-info/RECORD,, +MarkupSafe-2.1.5.dist-info/WHEEL,sha256=1FEjxEYgybphwh9S0FO9IcZ0B-NIeM2ko8OzhFZeOeQ,152 +MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=r7VOTjUq7EMQ4v3p4R1LoVOGJg6ysfYRncLr34laRBs,10958 +markupsafe/__pycache__/__init__.cpython-310.pyc,, +markupsafe/__pycache__/_native.cpython-310.pyc,, +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so,sha256=kPt-fhZ_RG7PUbDvwmyC26ZvRJ9DvUlF3hszBIB6_xs,44240 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1d812513305907d2ee59b95d161fdb54d1ab559c --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..75bf729258f9daef77370b6df1a57940f90fc23f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/llmeval-env/lib/python3.10/site-packages/dill/__diff.py b/llmeval-env/lib/python3.10/site-packages/dill/__diff.py new file mode 100644 index 0000000000000000000000000000000000000000..8a8937534fac7f4791a2883cbc65eee9d45b56e5 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/__diff.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +""" +Module to show if an object has changed since it was memorised +""" + +import builtins +import os +import sys +import types +try: + import numpy + HAS_NUMPY = True +except ImportError: + HAS_NUMPY = False + +# pypy doesn't use reference counting +getrefcount = getattr(sys, 'getrefcount', lambda x:0) + +# memo of objects indexed by id to a tuple (attributes, sequence items) +# attributes is a dict indexed by attribute name to attribute id +# sequence items is either a list of ids, of a dictionary of keys to ids +memo = {} +id_to_obj = {} +# types that cannot have changing attributes +builtins_types = set((str, list, dict, set, frozenset, int)) +dont_memo = set(id(i) for i in (memo, sys.modules, sys.path_importer_cache, + os.environ, id_to_obj)) + + +def get_attrs(obj): + """ + Gets all the attributes of an object though its __dict__ or return None + """ + if type(obj) in builtins_types \ + or type(obj) is type and obj in builtins_types: + return + return getattr(obj, '__dict__', None) + + +def get_seq(obj, cache={str: False, frozenset: False, list: True, set: True, + dict: True, tuple: True, type: False, + types.ModuleType: False, types.FunctionType: False, + types.BuiltinFunctionType: False}): + """ + Gets all the items in a sequence or return None + """ + try: + o_type = obj.__class__ + except AttributeError: + o_type = type(obj) + hsattr = hasattr + if o_type in cache: + if cache[o_type]: + if hsattr(obj, "copy"): + return obj.copy() + return obj + elif HAS_NUMPY and o_type in (numpy.ndarray, numpy.ma.core.MaskedConstant): + if obj.shape and obj.size: + return obj + else: + return [] + elif hsattr(obj, "__contains__") and hsattr(obj, "__iter__") \ + and hsattr(obj, "__len__") and hsattr(o_type, "__contains__") \ + and hsattr(o_type, "__iter__") and hsattr(o_type, "__len__"): + cache[o_type] = True + if hsattr(obj, "copy"): + return obj.copy() + return obj + else: + cache[o_type] = False + return None + + +def memorise(obj, force=False): + """ + Adds an object to the memo, and recursively adds all the objects + attributes, and if it is a container, its items. Use force=True to update + an object already in the memo. Updating is not recursively done. + """ + obj_id = id(obj) + if obj_id in memo and not force or obj_id in dont_memo: + return + id_ = id + g = get_attrs(obj) + if g is None: + attrs_id = None + else: + attrs_id = dict((key,id_(value)) for key, value in g.items()) + + s = get_seq(obj) + if s is None: + seq_id = None + elif hasattr(s, "items"): + seq_id = dict((id_(key),id_(value)) for key, value in s.items()) + elif not hasattr(s, "__len__"): #XXX: avoid TypeError from unexpected case + seq_id = None + else: + seq_id = [id_(i) for i in s] + + memo[obj_id] = attrs_id, seq_id + id_to_obj[obj_id] = obj + mem = memorise + if g is not None: + [mem(value) for key, value in g.items()] + + if s is not None: + if hasattr(s, "items"): + [(mem(key), mem(item)) + for key, item in s.items()] + else: + if hasattr(s, '__len__'): + [mem(item) for item in s] + else: mem(s) + + +def release_gone(): + itop, mp, src = id_to_obj.pop, memo.pop, getrefcount + [(itop(id_), mp(id_)) for id_, obj in list(id_to_obj.items()) + if src(obj) < 4] #XXX: correct for pypy? + + +def whats_changed(obj, seen=None, simple=False, first=True): + """ + Check an object against the memo. Returns a list in the form + (attribute changes, container changed). Attribute changes is a dict of + attribute name to attribute value. container changed is a boolean. + If simple is true, just returns a boolean. None for either item means + that it has not been checked yet + """ + # Special cases + if first: + # ignore the _ variable, which only appears in interactive sessions + if "_" in builtins.__dict__: + del builtins._ + if seen is None: + seen = {} + + obj_id = id(obj) + + if obj_id in seen: + if simple: + return any(seen[obj_id]) + return seen[obj_id] + + # Safety checks + if obj_id in dont_memo: + seen[obj_id] = [{}, False] + if simple: + return False + return seen[obj_id] + elif obj_id not in memo: + if simple: + return True + else: + raise RuntimeError("Object not memorised " + str(obj)) + + seen[obj_id] = ({}, False) + + chngd = whats_changed + id_ = id + + # compare attributes + attrs = get_attrs(obj) + if attrs is None: + changed = {} + else: + obj_attrs = memo[obj_id][0] + obj_get = obj_attrs.get + changed = dict((key,None) for key in obj_attrs if key not in attrs) + for key, o in attrs.items(): + if id_(o) != obj_get(key, None) or chngd(o, seen, True, False): + changed[key] = o + + # compare sequence + items = get_seq(obj) + seq_diff = False + if (items is not None) and (hasattr(items, '__len__')): + obj_seq = memo[obj_id][1] + if (len(items) != len(obj_seq)): + seq_diff = True + elif hasattr(obj, "items"): # dict type obj + obj_get = obj_seq.get + for key, item in items.items(): + if id_(item) != obj_get(id_(key)) \ + or chngd(key, seen, True, False) \ + or chngd(item, seen, True, False): + seq_diff = True + break + else: + for i, j in zip(items, obj_seq): # list type obj + if id_(i) != j or chngd(i, seen, True, False): + seq_diff = True + break + seen[obj_id] = changed, seq_diff + if simple: + return changed or seq_diff + return changed, seq_diff + + +def has_changed(*args, **kwds): + kwds['simple'] = True # ignore simple if passed in + return whats_changed(*args, **kwds) + +__import__ = __import__ + + +def _imp(*args, **kwds): + """ + Replaces the default __import__, to allow a module to be memorised + before the user can change it + """ + before = set(sys.modules.keys()) + mod = __import__(*args, **kwds) + after = set(sys.modules.keys()).difference(before) + for m in after: + memorise(sys.modules[m]) + return mod + +builtins.__import__ = _imp +if hasattr(builtins, "_"): + del builtins._ + +# memorise all already imported modules. This implies that this must be +# imported first for any changes to be recorded +for mod in list(sys.modules.values()): + memorise(mod) +release_gone() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/__info__.py b/llmeval-env/lib/python3.10/site-packages/dill/__info__.py new file mode 100644 index 0000000000000000000000000000000000000000..a4592c4eff9961d8b9a6c14af7f1cfa45bc4d9ab --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/__info__.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +''' +----------------------------- +dill: serialize all of Python +----------------------------- + +About Dill +========== + +``dill`` extends Python's ``pickle`` module for serializing and de-serializing +Python objects to the majority of the built-in Python types. Serialization +is the process of converting an object to a byte stream, and the inverse +of which is converting a byte stream back to a Python object hierarchy. + +``dill`` provides the user the same interface as the ``pickle`` module, and +also includes some additional features. In addition to pickling Python +objects, ``dill`` provides the ability to save the state of an interpreter +session in a single command. Hence, it would be feasible to save an +interpreter session, close the interpreter, ship the pickled file to +another computer, open a new interpreter, unpickle the session and +thus continue from the 'saved' state of the original interpreter +session. + +``dill`` can be used to store Python objects to a file, but the primary +usage is to send Python objects across the network as a byte stream. +``dill`` is quite flexible, and allows arbitrary user defined classes +and functions to be serialized. Thus ``dill`` is not intended to be +secure against erroneously or maliciously constructed data. It is +left to the user to decide whether the data they unpickle is from +a trustworthy source. + +``dill`` is part of ``pathos``, a Python framework for heterogeneous computing. +``dill`` is in active development, so any user feedback, bug reports, comments, +or suggestions are highly appreciated. A list of issues is located at +https://github.com/uqfoundation/dill/issues, with a legacy list maintained at +https://uqfoundation.github.io/project/pathos/query. + + +Major Features +============== + +``dill`` can pickle the following standard types: + + - none, type, bool, int, float, complex, bytes, str, + - tuple, list, dict, file, buffer, builtin, + - Python classes, namedtuples, dataclasses, metaclasses, + - instances of classes, + - set, frozenset, array, functions, exceptions + +``dill`` can also pickle more 'exotic' standard types: + + - functions with yields, nested functions, lambdas, + - cell, method, unboundmethod, module, code, methodwrapper, + - methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor, + - dictproxy, slice, notimplemented, ellipsis, quit + +``dill`` cannot yet pickle these standard types: + + - frame, generator, traceback + +``dill`` also provides the capability to: + + - save and load Python interpreter sessions + - save and extract the source code from functions and classes + - interactively diagnose pickling errors + + +Current Release +=============== + +The latest released version of ``dill`` is available from: + + https://pypi.org/project/dill + +``dill`` is distributed under a 3-clause BSD license. + + +Development Version +=================== + +You can get the latest development version with all the shiny new features at: + + https://github.com/uqfoundation + +If you have a new contribution, please submit a pull request. + + +Installation +============ + +``dill`` can be installed with ``pip``:: + + $ pip install dill + +To optionally include the ``objgraph`` diagnostic tool in the install:: + + $ pip install dill[graph] + +To optionally include the ``gprof2dot`` diagnostic tool in the install:: + + $ pip install dill[profile] + +For windows users, to optionally install session history tools:: + + $ pip install dill[readline] + + +Requirements +============ + +``dill`` requires: + + - ``python`` (or ``pypy``), **>=3.8** + - ``setuptools``, **>=42** + +Optional requirements: + + - ``objgraph``, **>=1.7.2** + - ``gprof2dot``, **>=2022.7.29** + - ``pyreadline``, **>=1.7.1** (on windows) + + +Basic Usage +=========== + +``dill`` is a drop-in replacement for ``pickle``. Existing code can be +updated to allow complete pickling using:: + + >>> import dill as pickle + +or:: + + >>> from dill import dumps, loads + +``dumps`` converts the object to a unique byte string, and ``loads`` performs +the inverse operation:: + + >>> squared = lambda x: x**2 + >>> loads(dumps(squared))(3) + 9 + +There are a number of options to control serialization which are provided +as keyword arguments to several ``dill`` functions: + +* with *protocol*, the pickle protocol level can be set. This uses the + same value as the ``pickle`` module, *DEFAULT_PROTOCOL*. +* with *byref=True*, ``dill`` to behave a lot more like pickle with + certain objects (like modules) pickled by reference as opposed to + attempting to pickle the object itself. +* with *recurse=True*, objects referred to in the global dictionary are + recursively traced and pickled, instead of the default behavior of + attempting to store the entire global dictionary. +* with *fmode*, the contents of the file can be pickled along with the file + handle, which is useful if the object is being sent over the wire to a + remote system which does not have the original file on disk. Options are + *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content + and *FILE_FMODE* for content and handle. +* with *ignore=False*, objects reconstructed with types defined in the + top-level script environment use the existing type in the environment + rather than a possibly different reconstructed type. + +The default serialization can also be set globally in *dill.settings*. +Thus, we can modify how ``dill`` handles references to the global dictionary +locally or globally:: + + >>> import dill.settings + >>> dumps(absolute) == dumps(absolute, recurse=True) + False + >>> dill.settings['recurse'] = True + >>> dumps(absolute) == dumps(absolute, recurse=True) + True + +``dill`` also includes source code inspection, as an alternate to pickling:: + + >>> import dill.source + >>> print(dill.source.getsource(squared)) + squared = lambda x:x**2 + +To aid in debugging pickling issues, use *dill.detect* which provides +tools like pickle tracing:: + + >>> import dill.detect + >>> with dill.detect.trace(): + >>> dumps(squared) + ┬ F1: at 0x7fe074f8c280> + ├┬ F2: + │└ # F2 [34 B] + ├┬ Co: at 0x7fe07501eb30, file "", line 1> + │├┬ F2: + ││└ # F2 [19 B] + │└ # Co [87 B] + ├┬ D1: + │└ # D1 [22 B] + ├┬ D2: + │└ # D2 [2 B] + ├┬ D2: + │├┬ D2: + ││└ # D2 [2 B] + │└ # D2 [23 B] + └ # F1 [180 B] + +With trace, we see how ``dill`` stored the lambda (``F1``) by first storing +``_create_function``, the underlying code object (``Co``) and ``_create_code`` +(which is used to handle code objects), then we handle the reference to +the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that +save the lambda object's state. A ``#`` marks when the object is actually stored. + + +More Information +================ + +Probably the best way to get started is to look at the documentation at +http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that +demonstrate how ``dill`` can serialize different Python objects. You can +run the test suite with ``python -m dill.tests``. The contents of any +pickle file can be examined with ``undill``. As ``dill`` conforms to +the ``pickle`` interface, the examples and documentation found at +http://docs.python.org/library/pickle.html also apply to ``dill`` +if one will ``import dill as pickle``. The source code is also generally +well documented, so further questions may be resolved by inspecting the +code itself. Please feel free to submit a ticket on github, or ask a +question on stackoverflow (**@Mike McKerns**). +If you would like to share how you use ``dill`` in your work, please send +an email (to **mmckerns at uqfoundation dot org**). + + +Citation +======== + +If you use ``dill`` to do research that leads to publication, we ask that you +acknowledge use of ``dill`` by citing the following in your publication:: + + M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis, + "Building a framework for predictive science", Proceedings of + the 10th Python in Science Conference, 2011; + http://arxiv.org/pdf/1202.1056 + + Michael McKerns and Michael Aivazis, + "pathos: a framework for heterogeneous computing", 2010- ; + https://uqfoundation.github.io/project/pathos + +Please see https://uqfoundation.github.io/project/pathos or +http://arxiv.org/pdf/1202.1056 for further information. + +''' + +__version__ = '0.3.8' +__author__ = 'Mike McKerns' + +__license__ = ''' +Copyright (c) 2004-2016 California Institute of Technology. +Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +All rights reserved. + +This software is available subject to the conditions and terms laid +out below. By downloading and using this software you are agreeing +to the following conditions. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + - Neither the names of the copyright holders nor the names of any of + the contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +''' diff --git a/llmeval-env/lib/python3.10/site-packages/dill/__init__.py b/llmeval-env/lib/python3.10/site-packages/dill/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..549048a438d42547473768f914d13dba3a4a7b71 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/__init__.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +# author, version, license, and long description +try: # the package is installed + from .__info__ import __version__, __author__, __doc__, __license__ +except: # pragma: no cover + import os + import sys + parent = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) + sys.path.append(parent) + # get distribution meta info + from version import (__version__, __author__, + get_license_text, get_readme_as_rst) + __license__ = get_license_text(os.path.join(parent, 'LICENSE')) + __license__ = "\n%s" % __license__ + __doc__ = get_readme_as_rst(os.path.join(parent, 'README.md')) + del os, sys, parent, get_license_text, get_readme_as_rst + + +from ._dill import ( + dump, dumps, load, loads, copy, + Pickler, Unpickler, register, pickle, pickles, check, + DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, HANDLE_FMODE, CONTENTS_FMODE, FILE_FMODE, + PickleError, PickleWarning, PicklingError, PicklingWarning, UnpicklingError, + UnpicklingWarning, +) +from .session import ( + dump_module, load_module, load_module_asdict, + dump_session, load_session # backward compatibility +) +from . import detect, logger, session, source, temp + +# get global settings +from .settings import settings + +# make sure "trace" is turned off +logger.trace(False) + +objects = {} +# local import of dill._objects +#from . import _objects +#objects.update(_objects.succeeds) +#del _objects + +# local import of dill.objtypes +from . import objtypes as types + +def load_types(pickleable=True, unpickleable=True): + """load pickleable and/or unpickleable types to ``dill.types`` + + ``dill.types`` is meant to mimic the ``types`` module, providing a + registry of object types. By default, the module is empty (for import + speed purposes). Use the ``load_types`` function to load selected object + types to the ``dill.types`` module. + + Args: + pickleable (bool, default=True): if True, load pickleable types. + unpickleable (bool, default=True): if True, load unpickleable types. + + Returns: + None + """ + from importlib import reload + # local import of dill.objects + from . import _objects + if pickleable: + objects.update(_objects.succeeds) + else: + [objects.pop(obj,None) for obj in _objects.succeeds] + if unpickleable: + objects.update(_objects.failures) + else: + [objects.pop(obj,None) for obj in _objects.failures] + objects.update(_objects.registered) + del _objects + # reset contents of types to 'empty' + [types.__dict__.pop(obj) for obj in list(types.__dict__.keys()) \ + if obj.find('Type') != -1] + # add corresponding types from objects to types + reload(types) + +def extend(use_dill=True): + '''add (or remove) dill types to/from the pickle registry + + by default, ``dill`` populates its types to ``pickle.Pickler.dispatch``. + Thus, all ``dill`` types are available upon calling ``'import pickle'``. + To drop all ``dill`` types from the ``pickle`` dispatch, *use_dill=False*. + + Args: + use_dill (bool, default=True): if True, extend the dispatch table. + + Returns: + None + ''' + from ._dill import _revert_extension, _extend + if use_dill: _extend() + else: _revert_extension() + return + +extend() + + +def license(): + """print license""" + print (__license__) + return + +def citation(): + """print citation""" + print (__doc__[-491:-118]) + return + +# end of file diff --git a/llmeval-env/lib/python3.10/site-packages/dill/_dill.py b/llmeval-env/lib/python3.10/site-packages/dill/_dill.py new file mode 100644 index 0000000000000000000000000000000000000000..d42432ff3703f3f49fd760340b77bf84ce3e3a95 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/_dill.py @@ -0,0 +1,2198 @@ +# -*- coding: utf-8 -*- +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2015 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +dill: a utility for serialization of python objects + +The primary functions in `dill` are :func:`dump` and +:func:`dumps` for serialization ("pickling") to a +file or to a string, respectively, and :func:`load` +and :func:`loads` for deserialization ("unpickling"), +similarly, from a file or from a string. Other notable +functions are :func:`~dill.dump_module` and +:func:`~dill.load_module`, which are used to save and +restore module objects, including an intepreter session. + +Based on code written by Oren Tirosh and Armin Ronacher. +Extended to a (near) full set of the builtin types (in types module), +and coded to the pickle interface, by . +Initial port to python3 by Jonathan Dobson, continued by mmckerns. +Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns. +Tested against CH16+ Std. Lib. ... TBD. +""" + +from __future__ import annotations + +__all__ = [ + 'dump','dumps','load','loads','copy', + 'Pickler','Unpickler','register','pickle','pickles','check', + 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE', + 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError', + 'UnpicklingWarning', +] + +__module__ = 'dill' + +import warnings +from .logger import adapter as logger +from .logger import trace as _trace +log = logger # backward compatibility (see issue #582) + +import os +import sys +diff = None +_use_diff = False +OLD38 = (sys.hexversion < 0x3080000) +OLD39 = (sys.hexversion < 0x3090000) +OLD310 = (sys.hexversion < 0x30a0000) +OLD312a7 = (sys.hexversion < 0x30c00a7) +#XXX: get types from .objtypes ? +import builtins as __builtin__ +from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler +from pickle import GLOBAL, POP +from _thread import LockType +from _thread import RLock as RLockType +#from io import IOBase +from types import CodeType, FunctionType, MethodType, GeneratorType, \ + TracebackType, FrameType, ModuleType, BuiltinMethodType +BufferType = memoryview #XXX: unregistered +ClassType = type # no 'old-style' classes +EllipsisType = type(Ellipsis) +#FileType = IOBase +NotImplementedType = type(NotImplemented) +SliceType = slice +TypeType = type # 'new-style' classes #XXX: unregistered +XRangeType = range +from types import MappingProxyType as DictProxyType, new_class +from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError +import __main__ as _main_module +import marshal +import gc +# import zlib +import abc +import dataclasses +from weakref import ReferenceType, ProxyType, CallableProxyType +from collections import OrderedDict +from enum import Enum, EnumMeta +from functools import partial +from operator import itemgetter, attrgetter +GENERATOR_FAIL = False +import importlib.machinery +EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES) +try: + import ctypes + HAS_CTYPES = True + # if using `pypy`, pythonapi is not found + IS_PYPY = not hasattr(ctypes, 'pythonapi') +except ImportError: + HAS_CTYPES = False + IS_PYPY = False +NumpyUfuncType = None +NumpyDType = None +NumpyArrayType = None +try: + if not importlib.machinery.PathFinder().find_spec('numpy'): + raise ImportError("No module named 'numpy'") + NumpyUfuncType = True + NumpyDType = True + NumpyArrayType = True +except ImportError: + pass +def __hook__(): + global NumpyArrayType, NumpyDType, NumpyUfuncType + from numpy import ufunc as NumpyUfuncType + from numpy import ndarray as NumpyArrayType + from numpy import dtype as NumpyDType + return True +if NumpyArrayType: # then has numpy + def ndarraysubclassinstance(obj_type): + if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__): + return False + # anything below here is a numpy array (or subclass) instance + __hook__() # import numpy (so the following works!!!) + # verify that __reduce__ has not been overridden + if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \ + or obj_type.__reduce__ is not NumpyArrayType.__reduce__: + return False + return True + def numpyufunc(obj_type): + return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__) + def numpydtype(obj_type): + if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__): + return False + # anything below here is a numpy dtype + __hook__() # import numpy (so the following works!!!) + return obj_type is type(NumpyDType) # handles subclasses +else: + def ndarraysubclassinstance(obj): return False + def numpyufunc(obj): return False + def numpydtype(obj): return False + +from types import GetSetDescriptorType, ClassMethodDescriptorType, \ + WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \ + MethodWrapperType #XXX: unused + +# make sure to add these 'hand-built' types to _typemap +CellType = type((lambda x: lambda y: x)(0).__closure__[0]) +PartialType = type(partial(int, base=2)) +SuperType = type(super(Exception, TypeError())) +ItemGetterType = type(itemgetter(0)) +AttrGetterType = type(attrgetter('__repr__')) + +try: + from functools import _lru_cache_wrapper as LRUCacheType +except ImportError: + LRUCacheType = None + +if not isinstance(LRUCacheType, type): + LRUCacheType = None + +def get_file_type(*args, **kwargs): + open = kwargs.pop("open", __builtin__.open) + f = open(os.devnull, *args, **kwargs) + t = type(f) + f.close() + return t + +IS_PYODIDE = sys.platform == 'emscripten' + +FileType = get_file_type('rb', buffering=0) +TextWrapperType = get_file_type('r', buffering=-1) +BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1) +BufferedReaderType = get_file_type('rb', buffering=-1) +BufferedWriterType = get_file_type('wb', buffering=-1) +try: + from _pyio import open as _open + PyTextWrapperType = get_file_type('r', buffering=-1, open=_open) + PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open) + PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open) + PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open) +except ImportError: + PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None +from io import BytesIO as StringIO +InputType = OutputType = None +from socket import socket as SocketType +#FIXME: additionally calls ForkingPickler.register several times +from multiprocessing.reduction import _reduce_socket as reduce_socket +try: #pragma: no cover + IS_IPYTHON = __IPYTHON__ # is True + ExitType = None # IPython.core.autocall.ExitAutocall + IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython') +except NameError: + IS_IPYTHON = False + try: ExitType = type(exit) # apparently 'exit' can be removed + except NameError: ExitType = None + IPYTHON_SINGLETONS = () + +import inspect +import typing + + +### Shims for different versions of Python and dill +class Sentinel(object): + """ + Create a unique sentinel object that is pickled as a constant. + """ + def __init__(self, name, module_name=None): + self.name = name + if module_name is None: + # Use the calling frame's module + self.__module__ = inspect.currentframe().f_back.f_globals['__name__'] + else: + self.__module__ = module_name # pragma: no cover + def __repr__(self): + return self.__module__ + '.' + self.name # pragma: no cover + def __copy__(self): + return self # pragma: no cover + def __deepcopy__(self, memo): + return self # pragma: no cover + def __reduce__(self): + return self.name + def __reduce_ex__(self, protocol): + return self.name + +from . import _shims +from ._shims import Reduce, Getattr + +### File modes +#: Pickles the file handle, preserving mode. The position of the unpickled +#: object is as for a new file handle. +HANDLE_FMODE = 0 +#: Pickles the file contents, creating a new file if on load the file does +#: not exist. The position = min(pickled position, EOF) and mode is chosen +#: as such that "best" preserves behavior of the original file. +CONTENTS_FMODE = 1 +#: Pickles the entire file (handle and contents), preserving mode and position. +FILE_FMODE = 2 + +### Shorthands (modified from python2.5/lib/pickle.py) +def copy(obj, *args, **kwds): + """ + Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`). + + See :func:`dumps` and :func:`loads` for keyword arguments. + """ + ignore = kwds.pop('ignore', Unpickler.settings['ignore']) + return loads(dumps(obj, *args, **kwds), ignore=ignore) + +def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): + """ + Pickle an object to a file. + + See :func:`dumps` for keyword arguments. + """ + from .settings import settings + protocol = settings['protocol'] if protocol is None else int(protocol) + _kwds = kwds.copy() + _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse)) + Pickler(file, protocol, **_kwds).dump(obj) + return + +def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): + """ + Pickle an object to a string. + + *protocol* is the pickler protocol, as defined for Python *pickle*. + + If *byref=True*, then dill behaves a lot more like pickle as certain + objects (like modules) are pickled by reference as opposed to attempting + to pickle the object itself. + + If *recurse=True*, then objects referred to in the global dictionary + are recursively traced and pickled, instead of the default behavior + of attempting to store the entire global dictionary. This is needed for + functions defined via *exec()*. + + *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`, + or :const:`FILE_FMODE`) indicates how file handles will be pickled. + For example, when pickling a data file handle for transfer to a remote + compute service, *FILE_FMODE* will include the file contents in the + pickle and cursor position so that a remote method can operate + transparently on an object with an open file handle. + + Default values for keyword arguments can be set in :mod:`dill.settings`. + """ + file = StringIO() + dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio) + return file.getvalue() + +def load(file, ignore=None, **kwds): + """ + Unpickle an object from a file. + + See :func:`loads` for keyword arguments. + """ + return Unpickler(file, ignore=ignore, **kwds).load() + +def loads(str, ignore=None, **kwds): + """ + Unpickle an object from a string. + + If *ignore=False* then objects whose class is defined in the module + *__main__* are updated to reference the existing class in *__main__*, + otherwise they are left to refer to the reconstructed type, which may + be different. + + Default values for keyword arguments can be set in :mod:`dill.settings`. + """ + file = StringIO(str) + return load(file, ignore, **kwds) + +# def dumpzs(obj, protocol=None): +# """pickle an object to a compressed string""" +# return zlib.compress(dumps(obj, protocol)) + +# def loadzs(str): +# """unpickle an object from a compressed string""" +# return loads(zlib.decompress(str)) + +### End: Shorthands ### + +class MetaCatchingDict(dict): + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def __missing__(self, key): + if issubclass(key, type): + return save_type + else: + raise KeyError() + +class PickleWarning(Warning, PickleError): + pass + +class PicklingWarning(PickleWarning, PicklingError): + pass + +class UnpicklingWarning(PickleWarning, UnpicklingError): + pass + +### Extend the Picklers +class Pickler(StockPickler): + """python's Pickler extended to interpreter sessions""" + dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \ + = MetaCatchingDict(StockPickler.dispatch.copy()) + """The dispatch table, a dictionary of serializing functions used + by Pickler to save objects of specific types. Use :func:`pickle` + or :func:`register` to associate types to custom functions. + + :meta hide-value: + """ + _session = False + from .settings import settings + + def __init__(self, file, *args, **kwds): + settings = Pickler.settings + _byref = kwds.pop('byref', None) + #_strictio = kwds.pop('strictio', None) + _fmode = kwds.pop('fmode', None) + _recurse = kwds.pop('recurse', None) + StockPickler.__init__(self, file, *args, **kwds) + self._main = _main_module + self._diff_cache = {} + self._byref = settings['byref'] if _byref is None else _byref + self._strictio = False #_strictio + self._fmode = settings['fmode'] if _fmode is None else _fmode + self._recurse = settings['recurse'] if _recurse is None else _recurse + self._postproc = OrderedDict() + self._file = file + + def save(self, obj, save_persistent_id=True): + # numpy hack + obj_type = type(obj) + if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch): + # register if the object is a numpy ufunc + # thanks to Paul Kienzle for pointing out ufuncs didn't pickle + if numpyufunc(obj_type): + @register(obj_type) + def save_numpy_ufunc(pickler, obj): + logger.trace(pickler, "Nu: %s", obj) + name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) + StockPickler.save_global(pickler, obj, name=name) + logger.trace(pickler, "# Nu") + return + # NOTE: the above 'save' performs like: + # import copy_reg + # def udump(f): return f.__name__ + # def uload(name): return getattr(numpy, name) + # copy_reg.pickle(NumpyUfuncType, udump, uload) + # register if the object is a numpy dtype + if numpydtype(obj_type): + @register(obj_type) + def save_numpy_dtype(pickler, obj): + logger.trace(pickler, "Dt: %s", obj) + pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj) + logger.trace(pickler, "# Dt") + return + # NOTE: the above 'save' performs like: + # import copy_reg + # def uload(name): return type(NumpyDType(name)) + # def udump(f): return uload, (f.type,) + # copy_reg.pickle(NumpyDTypeType, udump, uload) + # register if the object is a subclassed numpy array instance + if ndarraysubclassinstance(obj_type): + @register(obj_type) + def save_numpy_array(pickler, obj): + logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype) + npdict = getattr(obj, '__dict__', None) + f, args, state = obj.__reduce__() + pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj) + logger.trace(pickler, "# Nu") + return + # end numpy hack + + if GENERATOR_FAIL and obj_type is GeneratorType: + msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType + raise PicklingError(msg) + StockPickler.save(self, obj, save_persistent_id) + + save.__doc__ = StockPickler.save.__doc__ + + def dump(self, obj): #NOTE: if settings change, need to update attributes + logger.trace_setup(self) + StockPickler.dump(self, obj) + dump.__doc__ = StockPickler.dump.__doc__ + +class Unpickler(StockUnpickler): + """python's Unpickler extended to interpreter sessions and more types""" + from .settings import settings + _session = False + + def find_class(self, module, name): + if (module, name) == ('__builtin__', '__main__'): + return self._main.__dict__ #XXX: above set w/save_module_dict + elif (module, name) == ('__builtin__', 'NoneType'): + return type(None) #XXX: special case: NoneType missing + if module == 'dill.dill': module = 'dill._dill' + return StockUnpickler.find_class(self, module, name) + + def __init__(self, *args, **kwds): + settings = Pickler.settings + _ignore = kwds.pop('ignore', None) + StockUnpickler.__init__(self, *args, **kwds) + self._main = _main_module + self._ignore = settings['ignore'] if _ignore is None else _ignore + + def load(self): #NOTE: if settings change, need to update attributes + obj = StockUnpickler.load(self) + if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'): + if not self._ignore: + # point obj class to main + try: obj.__class__ = getattr(self._main, type(obj).__name__) + except (AttributeError,TypeError): pass # defined in a file + #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ? + return obj + load.__doc__ = StockUnpickler.load.__doc__ + pass + +''' +def dispatch_table(): + """get the dispatch table of registered types""" + return Pickler.dispatch +''' + +pickle_dispatch_copy = StockPickler.dispatch.copy() + +def pickle(t, func): + """expose :attr:`~Pickler.dispatch` table for user-created extensions""" + Pickler.dispatch[t] = func + return + +def register(t): + """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table""" + def proxy(func): + Pickler.dispatch[t] = func + return func + return proxy + +def _revert_extension(): + """drop dill-registered types from pickle's dispatch table""" + for type, func in list(StockPickler.dispatch.items()): + if func.__module__ == __name__: + del StockPickler.dispatch[type] + if type in pickle_dispatch_copy: + StockPickler.dispatch[type] = pickle_dispatch_copy[type] + +def use_diff(on=True): + """ + Reduces size of pickles by only including object which have changed. + + Decreases pickle size but increases CPU time needed. + Also helps avoid some unpickleable objects. + MUST be called at start of script, otherwise changes will not be recorded. + """ + global _use_diff, diff + _use_diff = on + if _use_diff and diff is None: + try: + from . import diff as d + except ImportError: + import diff as d + diff = d + +def _create_typemap(): + import types + d = dict(list(__builtin__.__dict__.items()) + \ + list(types.__dict__.items())).items() + for key, value in d: + if getattr(value, '__module__', None) == 'builtins' \ + and type(value) is type: + yield key, value + return +_reverse_typemap = dict(_create_typemap()) +_reverse_typemap.update({ + 'PartialType': PartialType, + 'SuperType': SuperType, + 'ItemGetterType': ItemGetterType, + 'AttrGetterType': AttrGetterType, +}) +if sys.hexversion < 0x30800a2: + _reverse_typemap.update({ + 'CellType': CellType, + }) + +# "Incidental" implementation specific types. Unpickling these types in another +# implementation of Python (PyPy -> CPython) is not guaranteed to work + +# This dictionary should contain all types that appear in Python implementations +# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types +x=OrderedDict() +_incedental_reverse_typemap = { + 'FileType': FileType, + 'BufferedRandomType': BufferedRandomType, + 'BufferedReaderType': BufferedReaderType, + 'BufferedWriterType': BufferedWriterType, + 'TextWrapperType': TextWrapperType, + 'PyBufferedRandomType': PyBufferedRandomType, + 'PyBufferedReaderType': PyBufferedReaderType, + 'PyBufferedWriterType': PyBufferedWriterType, + 'PyTextWrapperType': PyTextWrapperType, +} + +_incedental_reverse_typemap.update({ + "DictKeysType": type({}.keys()), + "DictValuesType": type({}.values()), + "DictItemsType": type({}.items()), + + "OdictKeysType": type(x.keys()), + "OdictValuesType": type(x.values()), + "OdictItemsType": type(x.items()), +}) + +if ExitType: + _incedental_reverse_typemap['ExitType'] = ExitType +if InputType: + _incedental_reverse_typemap['InputType'] = InputType + _incedental_reverse_typemap['OutputType'] = OutputType + +''' +try: + import symtable + _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table) +except: #FIXME: fails to pickle + pass + +if sys.hexversion >= 0x30a00a0: + _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines()) +''' + +if sys.hexversion >= 0x30b00b0: + from types import GenericAlias + _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,)))) + ''' + _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions()) + ''' + +try: + import winreg + _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType +except ImportError: + pass + +_reverse_typemap.update(_incedental_reverse_typemap) +_incedental_types = set(_incedental_reverse_typemap.values()) + +del x + +_typemap = dict((v, k) for k, v in _reverse_typemap.items()) + +def _unmarshal(string): + return marshal.loads(string) + +def _load_type(name): + return _reverse_typemap[name] + +def _create_type(typeobj, *args): + return typeobj(*args) + +def _create_function(fcode, fglobals, fname=None, fdefaults=None, + fclosure=None, fdict=None, fkwdefaults=None): + # same as FunctionType, but enable passing __dict__ to new function, + # __dict__ is the storehouse for attributes added after function creation + func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure) + if fdict is not None: + func.__dict__.update(fdict) #XXX: better copy? option to copy? + if fkwdefaults is not None: + func.__kwdefaults__ = fkwdefaults + # 'recurse' only stores referenced modules/objects in fglobals, + # thus we need to make sure that we have __builtins__ as well + if "__builtins__" not in func.__globals__: + func.__globals__["__builtins__"] = globals()["__builtins__"] + # assert id(fglobals) == id(func.__globals__) + return func + +class match: + """ + Make avaialable a limited structural pattern matching-like syntax for Python < 3.10 + + Patterns can be only tuples (without types) currently. + Inspired by the package pattern-matching-PEP634. + + Usage: + >>> with match(args) as m: + >>> if m.case(('x', 'y')): + >>> # use m.x and m.y + >>> elif m.case(('x', 'y', 'z')): + >>> # use m.x, m.y and m.z + + Equivalent native code for Python >= 3.10: + >>> match args: + >>> case (x, y): + >>> # use x and y + >>> case (x, y, z): + >>> # use x, y and z + """ + def __init__(self, value): + self.value = value + self._fields = None + def __enter__(self): + return self + def __exit__(self, *exc_info): + return False + def case(self, args): # *args, **kwargs): + """just handles tuple patterns""" + if len(self.value) != len(args): # + len(kwargs): + return False + #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())): + # return False + self.args = args # (*args, *kwargs) + return True + @property + def fields(self): + # Only bind names to values if necessary. + if self._fields is None: + self._fields = dict(zip(self.args, self.value)) + return self._fields + def __getattr__(self, item): + return self.fields[item] + +ALL_CODE_PARAMS = [ + # Version New attribute CodeType parameters + ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'), + ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'), + ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'), + ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), + ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), + ] +for version, new_attr, params in ALL_CODE_PARAMS: + if hasattr(CodeType, new_attr): + CODE_VERSION = version + CODE_PARAMS = params.split() + break +ENCODE_PARAMS = set(CODE_PARAMS).intersection( + ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable']) + +def _create_code(*args): + if not isinstance(args[0], int): # co_lnotab stored from >= 3.10 + LNOTAB, *args = args + else: # from < 3.10 (or pre-LNOTAB storage) + LNOTAB = b'' + + with match(args) as m: + # Python 3.11/3.12a (18 members) + if m.case(( + 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] + 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] + 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] + )): + if CODE_VERSION == (3,11): + return CodeType( + *args[:6], + args[6].encode() if hasattr(args[6], 'encode') else args[6], # code + *args[7:14], + args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable + args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable + args[16], + args[17], + ) + fields = m.fields + # Python 3.10 or 3.8/3.9 (16 members) + elif m.case(( + 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] + 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13] + 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:] + )): + if CODE_VERSION == (3,10) or CODE_VERSION == (3,8): + return CodeType( + *args[:6], + args[6].encode() if hasattr(args[6], 'encode') else args[6], # code + *args[7:13], + args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable + args[14], + args[15], + ) + fields = m.fields + if CODE_VERSION >= (3,10): + fields['linetable'] = m.LNOTAB_OR_LINETABLE + else: + fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE + # Python 3.7 (15 args) + elif m.case(( + 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5] + 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12] + 'lnotab', 'freevars', 'cellvars' # args[12:] + )): + if CODE_VERSION == (3,7): + return CodeType( + *args[:5], + args[5].encode() if hasattr(args[5], 'encode') else args[5], # code + *args[6:12], + args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab + args[13], + args[14], + ) + fields = m.fields + # Python 3.11a (20 members) + elif m.case(( + 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] + 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] + 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] + )): + if CODE_VERSION == (3,11,'a'): + return CodeType( + *args[:6], + args[6].encode() if hasattr(args[6], 'encode') else args[6], # code + *args[7:14], + *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable + args[18], + args[19], + ) + fields = m.fields + else: + raise UnpicklingError("pattern match for code object failed") + + # The args format doesn't match this version. + fields.setdefault('posonlyargcount', 0) # from python <= 3.7 + fields.setdefault('lnotab', LNOTAB) # from python >= 3.10 + fields.setdefault('linetable', b'') # from python <= 3.9 + fields.setdefault('qualname', fields['name']) # from python <= 3.10 + fields.setdefault('exceptiontable', b'') # from python <= 3.10 + fields.setdefault('endlinetable', None) # from python != 3.11a + fields.setdefault('columntable', None) # from python != 3.11a + + args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k] + for k in CODE_PARAMS) + return CodeType(*args) + +def _create_ftype(ftypeobj, func, args, kwds): + if kwds is None: + kwds = {} + if args is None: + args = () + return ftypeobj(func, *args, **kwds) + +def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245 + if not argz: + return typing.Tuple[()].copy_with(()) + if argz == ((),): + return typing.Tuple[()] + return typing.Tuple[argz] + +def _create_lock(locked, *args): #XXX: ignores 'blocking' + from threading import Lock + lock = Lock() + if locked: + if not lock.acquire(False): + raise UnpicklingError("Cannot acquire lock") + return lock + +def _create_rlock(count, owner, *args): #XXX: ignores 'blocking' + lock = RLockType() + if owner is not None: + lock._acquire_restore((count, owner)) + if owner and not lock._is_owned(): + raise UnpicklingError("Cannot acquire lock") + return lock + +# thanks to matsjoyce for adding all the different file modes +def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0 + # only pickles the handle, not the file contents... good? or StringIO(data)? + # (for file contents see: http://effbot.org/librarybook/copy-reg.htm) + # NOTE: handle special cases first (are there more special cases?) + names = {'':sys.__stdin__, '':sys.__stdout__, + '':sys.__stderr__} #XXX: better fileno=(0,1,2) ? + if name in list(names.keys()): + f = names[name] #XXX: safer "f=sys.stdin" + elif name == '': + f = os.tmpfile() + elif name == '': + import tempfile + f = tempfile.TemporaryFile(mode) + else: + try: + exists = os.path.exists(name) + except Exception: + exists = False + if not exists: + if strictio: + raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name) + elif "r" in mode and fmode != FILE_FMODE: + name = '' # or os.devnull? + current_size = 0 # or maintain position? + else: + current_size = os.path.getsize(name) + + if position > current_size: + if strictio: + raise ValueError("invalid buffer size") + elif fmode == CONTENTS_FMODE: + position = current_size + # try to open the file by name + # NOTE: has different fileno + try: + #FIXME: missing: *buffering*, encoding, softspace + if fmode == FILE_FMODE: + f = open(name, mode if "w" in mode else "w") + f.write(fdata) + if "w" not in mode: + f.close() + f = open(name, mode) + elif name == '': # file did not exist + import tempfile + f = tempfile.TemporaryFile(mode) + # treat x mode as w mode + elif fmode == CONTENTS_FMODE \ + and ("w" in mode or "x" in mode): + # stop truncation when opening + flags = os.O_CREAT + if "+" in mode: + flags |= os.O_RDWR + else: + flags |= os.O_WRONLY + f = os.fdopen(os.open(name, flags), mode) + # set name to the correct value + r = getattr(f, "buffer", f) + r = getattr(r, "raw", r) + r.name = name + assert f.name == name + else: + f = open(name, mode) + except (IOError, FileNotFoundError): + err = sys.exc_info()[1] + raise UnpicklingError(err) + if closed: + f.close() + elif position >= 0 and fmode != HANDLE_FMODE: + f.seek(position) + return f + +def _create_stringi(value, position, closed): + f = StringIO(value) + if closed: f.close() + else: f.seek(position) + return f + +def _create_stringo(value, position, closed): + f = StringIO() + if closed: f.close() + else: + f.write(value) + f.seek(position) + return f + +class _itemgetter_helper(object): + def __init__(self): + self.items = [] + def __getitem__(self, item): + self.items.append(item) + return + +class _attrgetter_helper(object): + def __init__(self, attrs, index=None): + self.attrs = attrs + self.index = index + def __getattribute__(self, attr): + attrs = object.__getattribute__(self, "attrs") + index = object.__getattribute__(self, "index") + if index is None: + index = len(attrs) + attrs.append(attr) + else: + attrs[index] = ".".join([attrs[index], attr]) + return type(self)(attrs, index) + +class _dictproxy_helper(dict): + def __ror__(self, a): + return a + +_dictproxy_helper_instance = _dictproxy_helper() + +__d = {} +try: + # In CPython 3.9 and later, this trick can be used to exploit the + # implementation of the __or__ function of MappingProxyType to get the true + # mapping referenced by the proxy. It may work for other implementations, + # but is not guaranteed. + MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance) +except Exception: + MAPPING_PROXY_TRICK = False +del __d + +# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill +# whose _create_cell functions do not have a default value. +# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls +# to _create_cell) once breaking changes are allowed. +_CELL_REF = None +_CELL_EMPTY = Sentinel('_CELL_EMPTY') + +def _create_cell(contents=None): + if contents is not _CELL_EMPTY: + value = contents + return (lambda: value).__closure__[0] + +def _create_weakref(obj, *args): + from weakref import ref + if obj is None: # it's dead + from collections import UserDict + return ref(UserDict(), *args) + return ref(obj, *args) + +def _create_weakproxy(obj, callable=False, *args): + from weakref import proxy + if obj is None: # it's dead + if callable: return proxy(lambda x:x, *args) + from collections import UserDict + return proxy(UserDict(), *args) + return proxy(obj, *args) + +def _eval_repr(repr_str): + return eval(repr_str) + +def _create_array(f, args, state, npdict=None): + #array = numpy.core.multiarray._reconstruct(*args) + array = f(*args) + array.__setstate__(state) + if npdict is not None: # we also have saved state in __dict__ + array.__dict__.update(npdict) + return array + +def _create_dtypemeta(scalar_type): + if NumpyDType is True: __hook__() # a bit hacky I think + if scalar_type is None: + return NumpyDType + return type(NumpyDType(scalar_type)) + +def _create_namedtuple(name, fieldnames, modulename, defaults=None): + class_ = _import_module(modulename + '.' + name, safe=True) + if class_ is not None: + return class_ + import collections + t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename) + return t + +def _create_capsule(pointer, name, context, destructor): + attr_found = False + try: + # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231 + uname = name.decode('utf8') + for i in range(1, uname.count('.')+1): + names = uname.rsplit('.', i) + try: + module = __import__(names[0]) + except ImportError: + pass + obj = module + for attr in names[1:]: + obj = getattr(obj, attr) + capsule = obj + attr_found = True + break + except Exception: + pass + + if attr_found: + if _PyCapsule_IsValid(capsule, name): + return capsule + raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name)) + else: + #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning) + capsule = _PyCapsule_New(pointer, name, destructor) + _PyCapsule_SetContext(capsule, context) + return capsule + +def _getattr(objclass, name, repr_str): + # hack to grab the reference directly + try: #XXX: works only for __builtin__ ? + attr = repr_str.split("'")[3] + return eval(attr+'.__dict__["'+name+'"]') + except Exception: + try: + attr = objclass.__dict__ + if type(attr) is DictProxyType: + attr = attr[name] + else: + attr = getattr(objclass,name) + except (AttributeError, KeyError): + attr = getattr(objclass,name) + return attr + +def _get_attr(self, name): + # stop recursive pickling + return getattr(self, name, None) or getattr(__builtin__, name) + +def _import_module(import_name, safe=False): + try: + if import_name.startswith('__runtime__.'): + return sys.modules[import_name] + elif '.' in import_name: + items = import_name.split('.') + module = '.'.join(items[:-1]) + obj = items[-1] + submodule = getattr(__import__(module, None, None, [obj]), obj) + if isinstance(submodule, (ModuleType, type)): + return submodule + return __import__(import_name, None, None, [obj]) + else: + return __import__(import_name) + except (ImportError, AttributeError, KeyError): + if safe: + return None + raise + +# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333 +def _getattribute(obj, name): + for subpath in name.split('.'): + if subpath == '': + raise AttributeError("Can't get local attribute {!r} on {!r}" + .format(name, obj)) + try: + parent = obj + obj = getattr(obj, subpath) + except AttributeError: + raise AttributeError("Can't get attribute {!r} on {!r}" + .format(name, obj)) + return obj, parent + +def _locate_function(obj, pickler=None): + module_name = getattr(obj, '__module__', None) + if module_name in ['__main__', None] or \ + pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__: + return False + if hasattr(obj, '__qualname__'): + module = _import_module(module_name, safe=True) + try: + found, _ = _getattribute(module, obj.__qualname__) + return found is obj + except AttributeError: + return False + else: + found = _import_module(module_name + '.' + obj.__name__, safe=True) + return found is obj + + +def _setitems(dest, source): + for k, v in source.items(): + dest[k] = v + + +def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None): + if obj is Getattr.NO_DEFAULT: + obj = Reduce(reduction) # pragma: no cover + + if is_pickler_dill is None: + is_pickler_dill = is_dill(pickler, child=True) + if is_pickler_dill: + # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!' + # if not hasattr(pickler, 'x'): pickler.x = 0 + # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse) + # pickler.x += 1 + if postproc_list is None: + postproc_list = [] + + # Recursive object not supported. Default to a global instead. + if id(obj) in pickler._postproc: + name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else '' + warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning) + pickler.save_global(obj) + return + pickler._postproc[id(obj)] = postproc_list + + # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations + pickler.save_reduce(*reduction, obj=obj) + + if is_pickler_dill: + # pickler.x -= 1 + # print(pickler.x*' ', 'pop', obj, id(obj)) + postproc = pickler._postproc.pop(id(obj)) + # assert postproc_list == postproc, 'Stack tampered!' + for reduction in reversed(postproc): + if reduction[0] is _setitems: + # use the internal machinery of pickle.py to speedup when + # updating a dictionary in postproc + dest, source = reduction[1] + if source: + pickler.write(pickler.get(pickler.memo[id(dest)][0])) + pickler._batch_setitems(iter(source.items())) + else: + # Updating with an empty dictionary. Same as doing nothing. + continue + else: + pickler.save_reduce(*reduction) + # pop None created by calling preprocessing step off stack + pickler.write(POP) + +#@register(CodeType) +#def save_code(pickler, obj): +# logger.trace(pickler, "Co: %s", obj) +# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj) +# logger.trace(pickler, "# Co") +# return + +# The following function is based on 'save_codeobject' from 'cloudpickle' +# Copyright (c) 2012, Regents of the University of California. +# Copyright (c) 2009 `PiCloud, Inc. `_. +# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE +@register(CodeType) +def save_code(pickler, obj): + logger.trace(pickler, "Co: %s", obj) + if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args) + args = ( + obj.co_lnotab, # for < python 3.10 [not counted in args] + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, + obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable, + obj.co_columntable, obj.co_exceptiontable, obj.co_freevars, + obj.co_cellvars + ) + elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args) + with warnings.catch_warnings(): + if not OLD312a7: # issue 597 + warnings.filterwarnings('ignore', category=DeprecationWarning) + args = ( + obj.co_lnotab, # for < python 3.10 [not counted in args] + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, + obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, + obj.co_freevars, obj.co_cellvars + ) + elif hasattr(obj, "co_linetable"): # python 3.10 (16 args) + args = ( + obj.co_lnotab, # for < python 3.10 [not counted in args] + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, + obj.co_firstlineno, obj.co_linetable, obj.co_freevars, + obj.co_cellvars + ) + elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args) + args = ( + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, + obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, + obj.co_cellvars + ) + else: # python 3.7 (15 args) + args = ( + obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, + obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, + obj.co_names, obj.co_varnames, obj.co_filename, + obj.co_name, obj.co_firstlineno, obj.co_lnotab, + obj.co_freevars, obj.co_cellvars + ) + + pickler.save_reduce(_create_code, args, obj=obj) + logger.trace(pickler, "# Co") + return + +def _repr_dict(obj): + """Make a short string representation of a dictionary.""" + return "<%s object at %#012x>" % (type(obj).__name__, id(obj)) + +@register(dict) +def save_module_dict(pickler, obj): + if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \ + not (pickler._session and pickler._first_pass): + logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj + pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8')) + logger.trace(pickler, "# D1") + elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__): + logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj + pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general? + logger.trace(pickler, "# D3") + elif '__name__' in obj and obj != _main_module.__dict__ \ + and type(obj['__name__']) is str \ + and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None): + logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj + pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8')) + logger.trace(pickler, "# D4") + else: + logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj + if is_dill(pickler, child=False) and pickler._session: + # we only care about session the first pass thru + pickler._first_pass = False + StockPickler.save_dict(pickler, obj) + logger.trace(pickler, "# D2") + return + + +if not OLD310 and MAPPING_PROXY_TRICK: + def save_dict_view(dicttype): + def save_dict_view_for_function(func): + def _save_dict_view(pickler, obj): + logger.trace(pickler, "Dkvi: <%s>", obj) + mapping = obj.mapping | _dictproxy_helper_instance + pickler.save_reduce(func, (mapping,), obj=obj) + logger.trace(pickler, "# Dkvi") + return _save_dict_view + return [ + (funcname, save_dict_view_for_function(getattr(dicttype, funcname))) + for funcname in ('keys', 'values', 'items') + ] +else: + # The following functions are based on 'cloudpickle' + # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940 + # Copyright (c) 2012, Regents of the University of California. + # Copyright (c) 2009 `PiCloud, Inc. `_. + # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE + def save_dict_view(dicttype): + def save_dict_keys(pickler, obj): + logger.trace(pickler, "Dk: <%s>", obj) + dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),)) + pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj) + logger.trace(pickler, "# Dk") + + def save_dict_values(pickler, obj): + logger.trace(pickler, "Dv: <%s>", obj) + dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),)) + pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj) + logger.trace(pickler, "# Dv") + + def save_dict_items(pickler, obj): + logger.trace(pickler, "Di: <%s>", obj) + pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj) + logger.trace(pickler, "# Di") + + return ( + ('keys', save_dict_keys), + ('values', save_dict_values), + ('items', save_dict_items) + ) + +for __dicttype in ( + dict, + OrderedDict +): + __obj = __dicttype() + for __funcname, __savefunc in save_dict_view(__dicttype): + __tview = type(getattr(__obj, __funcname)()) + if __tview not in Pickler.dispatch: + Pickler.dispatch[__tview] = __savefunc +del __dicttype, __obj, __funcname, __tview, __savefunc + + +@register(ClassType) +def save_classobj(pickler, obj): #FIXME: enable pickler._byref + if not _locate_function(obj, pickler): + logger.trace(pickler, "C1: %s", obj) + pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__, + obj.__dict__), obj=obj) + #XXX: or obj.__dict__.copy()), obj=obj) ? + logger.trace(pickler, "# C1") + else: + logger.trace(pickler, "C2: %s", obj) + name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) + StockPickler.save_global(pickler, obj, name=name) + logger.trace(pickler, "# C2") + return + +@register(typing._GenericAlias) +def save_generic_alias(pickler, obj): + args = obj.__args__ + if type(obj.__reduce__()) is str: + logger.trace(pickler, "Ga0: %s", obj) + StockPickler.save_global(pickler, obj, name=obj.__reduce__()) + logger.trace(pickler, "# Ga0") + elif obj.__origin__ is tuple and (not args or args == ((),)): + logger.trace(pickler, "Ga1: %s", obj) + pickler.save_reduce(_create_typing_tuple, (args,), obj=obj) + logger.trace(pickler, "# Ga1") + else: + logger.trace(pickler, "Ga2: %s", obj) + StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj) + logger.trace(pickler, "# Ga2") + return + +@register(LockType) +def save_lock(pickler, obj): + logger.trace(pickler, "Lo: %s", obj) + pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj) + logger.trace(pickler, "# Lo") + return + +@register(RLockType) +def save_rlock(pickler, obj): + logger.trace(pickler, "RL: %s", obj) + r = obj.__repr__() # don't use _release_save as it unlocks the lock + count = int(r.split('count=')[1].split()[0].rstrip('>')) + owner = int(r.split('owner=')[1].split()[0]) + pickler.save_reduce(_create_rlock, (count,owner,), obj=obj) + logger.trace(pickler, "# RL") + return + +#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL +def save_socket(pickler, obj): + logger.trace(pickler, "So: %s", obj) + pickler.save_reduce(*reduce_socket(obj)) + logger.trace(pickler, "# So") + return + +def _save_file(pickler, obj, open_): + if obj.closed: + position = 0 + else: + obj.flush() + if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__): + position = -1 + else: + position = obj.tell() + if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE: + f = open_(obj.name, "r") + fdata = f.read() + f.close() + else: + fdata = "" + if is_dill(pickler, child=True): + strictio = pickler._strictio + fmode = pickler._fmode + else: + strictio = False + fmode = 0 # HANDLE_FMODE + pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position, + obj.closed, open_, strictio, + fmode, fdata), obj=obj) + return + + +@register(FileType) #XXX: in 3.x has buffer=0, needs different _create? +@register(BufferedReaderType) +@register(BufferedWriterType) +@register(TextWrapperType) +def save_file(pickler, obj): + logger.trace(pickler, "Fi: %s", obj) + f = _save_file(pickler, obj, open) + logger.trace(pickler, "# Fi") + return f + +if BufferedRandomType: + @register(BufferedRandomType) + def save_file(pickler, obj): + logger.trace(pickler, "Fi: %s", obj) + f = _save_file(pickler, obj, open) + logger.trace(pickler, "# Fi") + return f + +if PyTextWrapperType: + @register(PyBufferedReaderType) + @register(PyBufferedWriterType) + @register(PyTextWrapperType) + def save_file(pickler, obj): + logger.trace(pickler, "Fi: %s", obj) + f = _save_file(pickler, obj, _open) + logger.trace(pickler, "# Fi") + return f + + if PyBufferedRandomType: + @register(PyBufferedRandomType) + def save_file(pickler, obj): + logger.trace(pickler, "Fi: %s", obj) + f = _save_file(pickler, obj, _open) + logger.trace(pickler, "# Fi") + return f + + +# The following two functions are based on 'saveCStringIoInput' +# and 'saveCStringIoOutput' from spickle +# Copyright (c) 2011 by science+computing ag +# License: http://www.apache.org/licenses/LICENSE-2.0 +if InputType: + @register(InputType) + def save_stringi(pickler, obj): + logger.trace(pickler, "Io: %s", obj) + if obj.closed: + value = ''; position = 0 + else: + value = obj.getvalue(); position = obj.tell() + pickler.save_reduce(_create_stringi, (value, position, \ + obj.closed), obj=obj) + logger.trace(pickler, "# Io") + return + + @register(OutputType) + def save_stringo(pickler, obj): + logger.trace(pickler, "Io: %s", obj) + if obj.closed: + value = ''; position = 0 + else: + value = obj.getvalue(); position = obj.tell() + pickler.save_reduce(_create_stringo, (value, position, \ + obj.closed), obj=obj) + logger.trace(pickler, "# Io") + return + +if LRUCacheType is not None: + from functools import lru_cache + @register(LRUCacheType) + def save_lru_cache(pickler, obj): + logger.trace(pickler, "LRU: %s", obj) + if OLD39: + kwargs = obj.cache_info() + args = (kwargs.maxsize,) + else: + kwargs = obj.cache_parameters() + args = (kwargs['maxsize'], kwargs['typed']) + if args != lru_cache.__defaults__: + wrapper = Reduce(lru_cache, args, is_callable=True) + else: + wrapper = lru_cache + pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj) + logger.trace(pickler, "# LRU") + return + +@register(SuperType) +def save_super(pickler, obj): + logger.trace(pickler, "Su: %s", obj) + pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj) + logger.trace(pickler, "# Su") + return + +if IS_PYPY: + @register(MethodType) + def save_instancemethod0(pickler, obj): + code = getattr(obj.__func__, '__code__', None) + if code is not None and type(code) is not CodeType \ + and getattr(obj.__self__, obj.__name__) == obj: + # Some PyPy builtin functions have no module name + logger.trace(pickler, "Me2: %s", obj) + # TODO: verify that this works for all PyPy builtin methods + pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj) + logger.trace(pickler, "# Me2") + return + + logger.trace(pickler, "Me1: %s", obj) + pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) + logger.trace(pickler, "# Me1") + return +else: + @register(MethodType) + def save_instancemethod0(pickler, obj): + logger.trace(pickler, "Me1: %s", obj) + pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) + logger.trace(pickler, "# Me1") + return + +if not IS_PYPY: + @register(MemberDescriptorType) + @register(GetSetDescriptorType) + @register(MethodDescriptorType) + @register(WrapperDescriptorType) + @register(ClassMethodDescriptorType) + def save_wrapper_descriptor(pickler, obj): + logger.trace(pickler, "Wr: %s", obj) + pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, + obj.__repr__()), obj=obj) + logger.trace(pickler, "# Wr") + return +else: + @register(MemberDescriptorType) + @register(GetSetDescriptorType) + def save_wrapper_descriptor(pickler, obj): + logger.trace(pickler, "Wr: %s", obj) + pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, + obj.__repr__()), obj=obj) + logger.trace(pickler, "# Wr") + return + +@register(CellType) +def save_cell(pickler, obj): + try: + f = obj.cell_contents + except ValueError: # cell is empty + logger.trace(pickler, "Ce3: %s", obj) + # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7. + # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in + # _shims.py. This object is not present in Python 3 because the cell's + # contents can be deleted in newer versions of Python. The reduce object + # will instead unpickle to None if unpickled in Python 3. + + # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can + # be replaced by () OR the delattr function can be removed repending on + # whichever is more convienient. + pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj) + # Call the function _delattr on the cell's cell_contents attribute + # The result of this function call will be None + pickler.save_reduce(_shims._delattr, (obj, 'cell_contents')) + # pop None created by calling _delattr off stack + pickler.write(POP) + logger.trace(pickler, "# Ce3") + return + if is_dill(pickler, child=True): + if id(f) in pickler._postproc: + # Already seen. Add to its postprocessing. + postproc = pickler._postproc[id(f)] + else: + # Haven't seen it. Add to the highest possible object and set its + # value as late as possible to prevent cycle. + postproc = next(iter(pickler._postproc.values()), None) + if postproc is not None: + logger.trace(pickler, "Ce2: %s", obj) + # _CELL_REF is defined in _shims.py to support older versions of + # dill. When breaking changes are made to dill, (_CELL_REF,) can + # be replaced by () + pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj) + postproc.append((_shims._setattr, (obj, 'cell_contents', f))) + logger.trace(pickler, "# Ce2") + return + logger.trace(pickler, "Ce1: %s", obj) + pickler.save_reduce(_create_cell, (f,), obj=obj) + logger.trace(pickler, "# Ce1") + return + +if MAPPING_PROXY_TRICK: + @register(DictProxyType) + def save_dictproxy(pickler, obj): + logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj + mapping = obj | _dictproxy_helper_instance + pickler.save_reduce(DictProxyType, (mapping,), obj=obj) + logger.trace(pickler, "# Mp") + return +else: + @register(DictProxyType) + def save_dictproxy(pickler, obj): + logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj + pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj) + logger.trace(pickler, "# Mp") + return + +@register(SliceType) +def save_slice(pickler, obj): + logger.trace(pickler, "Sl: %s", obj) + pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj) + logger.trace(pickler, "# Sl") + return + +@register(XRangeType) +@register(EllipsisType) +@register(NotImplementedType) +def save_singleton(pickler, obj): + logger.trace(pickler, "Si: %s", obj) + pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj) + logger.trace(pickler, "# Si") + return + +def _proxy_helper(obj): # a dead proxy returns a reference to None + """get memory address of proxy's reference object""" + _repr = repr(obj) + try: _str = str(obj) + except ReferenceError: # it's a dead proxy + return id(None) + if _str == _repr: return id(obj) # it's a repr + try: # either way, it's a proxy from here + address = int(_str.rstrip('>').split(' at ')[-1], base=16) + except ValueError: # special case: proxy of a 'type' + if not IS_PYPY: + address = int(_repr.rstrip('>').split(' at ')[-1], base=16) + else: + objects = iter(gc.get_objects()) + for _obj in objects: + if repr(_obj) == _str: return id(_obj) + # all bad below... nothing found so throw ReferenceError + msg = "Cannot reference object for proxy at '%s'" % id(obj) + raise ReferenceError(msg) + return address + +def _locate_object(address, module=None): + """get object located at the given memory address (inverse of id(obj))""" + special = [None, True, False] #XXX: more...? + for obj in special: + if address == id(obj): return obj + if module: + objects = iter(module.__dict__.values()) + else: objects = iter(gc.get_objects()) + for obj in objects: + if address == id(obj): return obj + # all bad below... nothing found so throw ReferenceError or TypeError + try: address = hex(address) + except TypeError: + raise TypeError("'%s' is not a valid memory address" % str(address)) + raise ReferenceError("Cannot reference object at '%s'" % address) + +@register(ReferenceType) +def save_weakref(pickler, obj): + refobj = obj() + logger.trace(pickler, "R1: %s", obj) + #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None" + pickler.save_reduce(_create_weakref, (refobj,), obj=obj) + logger.trace(pickler, "# R1") + return + +@register(ProxyType) +@register(CallableProxyType) +def save_weakproxy(pickler, obj): + # Must do string substitution here and use %r to avoid ReferenceError. + logger.trace(pickler, "R2: %r" % obj) + refobj = _locate_object(_proxy_helper(obj)) + pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj) + logger.trace(pickler, "# R2") + return + +def _is_builtin_module(module): + if not hasattr(module, "__file__"): return True + if module.__file__ is None: return False + # If a module file name starts with prefix, it should be a builtin + # module, so should always be pickled as a reference. + names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"] + rp = os.path.realpath + # See https://github.com/uqfoundation/dill/issues/566 + return ( + any( + module.__file__.startswith(getattr(sys, name)) + or rp(module.__file__).startswith(rp(getattr(sys, name))) + for name in names + if hasattr(sys, name) + ) + or module.__file__.endswith(EXTENSION_SUFFIXES) + or 'site-packages' in module.__file__ + ) + +def _is_imported_module(module): + return getattr(module, '__loader__', None) is not None or module in sys.modules.values() + +@register(ModuleType) +def save_module(pickler, obj): + if False: #_use_diff: + if obj.__name__.split('.', 1)[0] != "dill": + try: + changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0] + except RuntimeError: # not memorised module, probably part of dill + pass + else: + logger.trace(pickler, "M2: %s with diff", obj) + logger.info("Diff: %s", changed.keys()) + pickler.save_reduce(_import_module, (obj.__name__,), obj=obj, + state=changed) + logger.trace(pickler, "# M2") + return + + logger.trace(pickler, "M1: %s", obj) + pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) + logger.trace(pickler, "# M1") + else: + builtin_mod = _is_builtin_module(obj) + is_session_main = is_dill(pickler, child=True) and obj is pickler._main + if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod + or is_session_main): + logger.trace(pickler, "M1: %s", obj) + # Hack for handling module-type objects in load_module(). + mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__ + # Second references are saved as __builtin__.__main__ in save_module_dict(). + main_dict = obj.__dict__.copy() + for item in ('__builtins__', '__loader__'): + main_dict.pop(item, None) + for item in IPYTHON_SINGLETONS: #pragma: no cover + if getattr(main_dict.get(item), '__module__', '').startswith('IPython'): + del main_dict[item] + pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict) + logger.trace(pickler, "# M1") + elif obj.__name__ == "dill._dill": + logger.trace(pickler, "M2: %s", obj) + pickler.save_global(obj, name="_dill") + logger.trace(pickler, "# M2") + else: + logger.trace(pickler, "M2: %s", obj) + pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) + logger.trace(pickler, "# M2") + return + +# The following function is based on '_extract_class_dict' from 'cloudpickle' +# Copyright (c) 2012, Regents of the University of California. +# Copyright (c) 2009 `PiCloud, Inc. `_. +# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE +def _get_typedict_type(cls, clsdict, attrs, postproc_list): + """Retrieve a copy of the dict of a class without the inherited methods""" + if len(cls.__bases__) == 1: + inherited_dict = cls.__bases__[0].__dict__ + else: + inherited_dict = {} + for base in reversed(cls.__bases__): + inherited_dict.update(base.__dict__) + to_remove = [] + for name, value in dict.items(clsdict): + try: + base_value = inherited_dict[name] + if value is base_value and hasattr(value, '__qualname__'): + to_remove.append(name) + except KeyError: + pass + for name in to_remove: + dict.pop(clsdict, name) + + if issubclass(type(cls), type): + clsdict.pop('__dict__', None) + clsdict.pop('__weakref__', None) + # clsdict.pop('__prepare__', None) + return clsdict, attrs + +def _get_typedict_abc(obj, _dict, attrs, postproc_list): + if hasattr(abc, '_get_dump'): + (registry, _, _, _) = abc._get_dump(obj) + register = obj.register + postproc_list.extend((register, (reg(),)) for reg in registry) + elif hasattr(obj, '_abc_registry'): + registry = obj._abc_registry + register = obj.register + postproc_list.extend((register, (reg,)) for reg in registry) + else: + raise PicklingError("Cannot find registry of ABC %s", obj) + + if '_abc_registry' in _dict: + _dict.pop('_abc_registry', None) + _dict.pop('_abc_cache', None) + _dict.pop('_abc_negative_cache', None) + # _dict.pop('_abc_negative_cache_version', None) + else: + _dict.pop('_abc_impl', None) + return _dict, attrs + +@register(TypeType) +def save_type(pickler, obj, postproc_list=None): + if obj in _typemap: + logger.trace(pickler, "T1: %s", obj) + # if obj in _incedental_types: + # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning) + pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj) + logger.trace(pickler, "# T1") + elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]): + # special case: namedtuples + logger.trace(pickler, "T6: %s", obj) + + obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) + if obj.__name__ != obj_name: + if postproc_list is None: + postproc_list = [] + postproc_list.append((setattr, (obj, '__qualname__', obj_name))) + + if not obj._field_defaults: + _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list) + else: + defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults] + _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list) + logger.trace(pickler, "# T6") + return + + # special cases: NoneType, NotImplementedType, EllipsisType, EnumMeta + elif obj is type(None): + logger.trace(pickler, "T7: %s", obj) + #XXX: pickler.save_reduce(type, (None,), obj=obj) + pickler.write(GLOBAL + b'__builtin__\nNoneType\n') + logger.trace(pickler, "# T7") + elif obj is NotImplementedType: + logger.trace(pickler, "T7: %s", obj) + pickler.save_reduce(type, (NotImplemented,), obj=obj) + logger.trace(pickler, "# T7") + elif obj is EllipsisType: + logger.trace(pickler, "T7: %s", obj) + pickler.save_reduce(type, (Ellipsis,), obj=obj) + logger.trace(pickler, "# T7") + elif obj is EnumMeta: + logger.trace(pickler, "T7: %s", obj) + pickler.write(GLOBAL + b'enum\nEnumMeta\n') + logger.trace(pickler, "# T7") + + else: + _byref = getattr(pickler, '_byref', None) + obj_recursive = id(obj) in getattr(pickler, '_postproc', ()) + incorrectly_named = not _locate_function(obj, pickler) + if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over + if postproc_list is None: + postproc_list = [] + + # thanks to Tom Stepleton pointing out pickler._session unneeded + logger.trace(pickler, "T2: %s", obj) + _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict + + #print (_dict) + #print ("%s\n%s" % (type(obj), obj.__name__)) + #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) + slots = _dict.get('__slots__', ()) + if type(slots) == str: + # __slots__ accepts a single string + slots = (slots,) + + for name in slots: + _dict.pop(name, None) + + if isinstance(obj, abc.ABCMeta): + logger.trace(pickler, "ABC: %s", obj) + _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list) + logger.trace(pickler, "# ABC") + + qualname = getattr(obj, '__qualname__', None) + if attrs is not None: + for k, v in attrs.items(): + postproc_list.append((setattr, (obj, k, v))) + # TODO: Consider using the state argument to save_reduce? + if qualname is not None: + postproc_list.append((setattr, (obj, '__qualname__', qualname))) + + if not hasattr(obj, '__orig_bases__'): + _save_with_postproc(pickler, (_create_type, ( + type(obj), obj.__name__, obj.__bases__, _dict + )), obj=obj, postproc_list=postproc_list) + else: + # This case will always work, but might be overkill. + _metadict = { + 'metaclass': type(obj) + } + + if _dict: + _dict_update = PartialType(_setitems, source=_dict) + else: + _dict_update = None + + _save_with_postproc(pickler, (new_class, ( + obj.__name__, obj.__orig_bases__, _metadict, _dict_update + )), obj=obj, postproc_list=postproc_list) + logger.trace(pickler, "# T2") + else: + obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) + logger.trace(pickler, "T4: %s", obj) + if incorrectly_named: + warnings.warn( + "Cannot locate reference to %r." % (obj,), + PicklingWarning, + stacklevel=3, + ) + if obj_recursive: + warnings.warn( + "Cannot pickle %r: %s.%s has recursive self-references that " + "trigger a RecursionError." % (obj, obj.__module__, obj_name), + PicklingWarning, + stacklevel=3, + ) + #print (obj.__dict__) + #print ("%s\n%s" % (type(obj), obj.__name__)) + #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) + StockPickler.save_global(pickler, obj, name=obj_name) + logger.trace(pickler, "# T4") + return + +@register(property) +@register(abc.abstractproperty) +def save_property(pickler, obj): + logger.trace(pickler, "Pr: %s", obj) + pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__), + obj=obj) + logger.trace(pickler, "# Pr") + +@register(staticmethod) +@register(classmethod) +@register(abc.abstractstaticmethod) +@register(abc.abstractclassmethod) +def save_classmethod(pickler, obj): + logger.trace(pickler, "Cm: %s", obj) + orig_func = obj.__func__ + + # if type(obj.__dict__) is dict: + # if obj.__dict__: + # state = obj.__dict__ + # else: + # state = None + # else: + # state = (None, {'__dict__', obj.__dict__}) + + pickler.save_reduce(type(obj), (orig_func,), obj=obj) + logger.trace(pickler, "# Cm") + +@register(FunctionType) +def save_function(pickler, obj): + if not _locate_function(obj, pickler): + if type(obj.__code__) is not CodeType: + # Some PyPy builtin functions have no module name, and thus are not + # able to be located + module_name = getattr(obj, '__module__', None) + if module_name is None: + module_name = __builtin__.__name__ + module = _import_module(module_name, safe=True) + _pypy_builtin = False + try: + found, _ = _getattribute(module, obj.__qualname__) + if getattr(found, '__func__', None) is obj: + _pypy_builtin = True + except AttributeError: + pass + + if _pypy_builtin: + logger.trace(pickler, "F3: %s", obj) + pickler.save_reduce(getattr, (found, '__func__'), obj=obj) + logger.trace(pickler, "# F3") + return + + logger.trace(pickler, "F1: %s", obj) + _recurse = getattr(pickler, '_recurse', None) + _postproc = getattr(pickler, '_postproc', None) + _main_modified = getattr(pickler, '_main_modified', None) + _original_main = getattr(pickler, '_original_main', __builtin__)#'None' + postproc_list = [] + if _recurse: + # recurse to get all globals referred to by obj + from .detect import globalvars + globs_copy = globalvars(obj, recurse=True, builtin=True) + + # Add the name of the module to the globs dictionary to prevent + # the duplication of the dictionary. Pickle the unpopulated + # globals dictionary and set the remaining items after the function + # is created to correctly handle recursion. + globs = {'__name__': obj.__module__} + else: + globs_copy = obj.__globals__ + + # If the globals is the __dict__ from the module being saved as a + # session, substitute it by the dictionary being actually saved. + if _main_modified and globs_copy is _original_main.__dict__: + globs_copy = getattr(pickler, '_main', _original_main).__dict__ + globs = globs_copy + # If the globals is a module __dict__, do not save it in the pickle. + elif globs_copy is not None and obj.__module__ is not None and \ + getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy: + globs = globs_copy + else: + globs = {'__name__': obj.__module__} + + if globs_copy is not None and globs is not globs_copy: + # In the case that the globals are copied, we need to ensure that + # the globals dictionary is updated when all objects in the + # dictionary are already created. + glob_ids = {id(g) for g in globs_copy.values()} + for stack_element in _postproc: + if stack_element in glob_ids: + _postproc[stack_element].append((_setitems, (globs, globs_copy))) + break + else: + postproc_list.append((_setitems, (globs, globs_copy))) + + closure = obj.__closure__ + state_dict = {} + for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'): + fattr = getattr(obj, fattrname, None) + if fattr is not None: + state_dict[fattrname] = fattr + if obj.__qualname__ != obj.__name__: + state_dict['__qualname__'] = obj.__qualname__ + if '__name__' not in globs or obj.__module__ != globs['__name__']: + state_dict['__module__'] = obj.__module__ + + state = obj.__dict__ + if type(state) is not dict: + state_dict['__dict__'] = state + state = None + if state_dict: + state = state, state_dict + + _save_with_postproc(pickler, (_create_function, ( + obj.__code__, globs, obj.__name__, obj.__defaults__, + closure + ), state), obj=obj, postproc_list=postproc_list) + + # Lift closure cell update to earliest function (#458) + if _postproc: + topmost_postproc = next(iter(_postproc.values()), None) + if closure and topmost_postproc: + for cell in closure: + possible_postproc = (setattr, (cell, 'cell_contents', obj)) + try: + topmost_postproc.remove(possible_postproc) + except ValueError: + continue + + # Change the value of the cell + pickler.save_reduce(*possible_postproc) + # pop None created by calling preprocessing step off stack + pickler.write(POP) + + logger.trace(pickler, "# F1") + else: + logger.trace(pickler, "F2: %s", obj) + name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) + StockPickler.save_global(pickler, obj, name=name) + logger.trace(pickler, "# F2") + return + +if HAS_CTYPES and hasattr(ctypes, 'pythonapi'): + _PyCapsule_New = ctypes.pythonapi.PyCapsule_New + _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p) + _PyCapsule_New.restype = ctypes.py_object + _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer + _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p) + _PyCapsule_GetPointer.restype = ctypes.c_void_p + _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor + _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,) + _PyCapsule_GetDestructor.restype = ctypes.c_void_p + _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext + _PyCapsule_GetContext.argtypes = (ctypes.py_object,) + _PyCapsule_GetContext.restype = ctypes.c_void_p + _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName + _PyCapsule_GetName.argtypes = (ctypes.py_object,) + _PyCapsule_GetName.restype = ctypes.c_char_p + _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid + _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p) + _PyCapsule_IsValid.restype = ctypes.c_bool + _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext + _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p) + _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor + _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p) + _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName + _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p) + _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer + _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p) + #from _socket import CAPI as _testcapsule + _testcapsule_name = b'dill._dill._testcapsule' + _testcapsule = _PyCapsule_New( + ctypes.cast(_PyCapsule_New, ctypes.c_void_p), + ctypes.c_char_p(_testcapsule_name), + None + ) + PyCapsuleType = type(_testcapsule) + @register(PyCapsuleType) + def save_capsule(pickler, obj): + logger.trace(pickler, "Cap: %s", obj) + name = _PyCapsule_GetName(obj) + #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning) + pointer = _PyCapsule_GetPointer(obj, name) + context = _PyCapsule_GetContext(obj) + destructor = _PyCapsule_GetDestructor(obj) + pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj) + logger.trace(pickler, "# Cap") + _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType + _reverse_typemap['PyCapsuleType'] = PyCapsuleType + _incedental_types.add(PyCapsuleType) +else: + _testcapsule = None + + +############################# +# A quick fix for issue #500 +# This should be removed when a better solution is found. + +if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"): + @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS) + def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj): + logger.trace(pickler, "DcHDF: %s", obj) + pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n") + logger.trace(pickler, "# DcHDF") + +if hasattr(dataclasses, "MISSING"): + @register(type(dataclasses.MISSING)) + def save_dataclasses_MISSING_TYPE(pickler, obj): + logger.trace(pickler, "DcM: %s", obj) + pickler.write(GLOBAL + b"dataclasses\nMISSING\n") + logger.trace(pickler, "# DcM") + +if hasattr(dataclasses, "KW_ONLY"): + @register(type(dataclasses.KW_ONLY)) + def save_dataclasses_KW_ONLY_TYPE(pickler, obj): + logger.trace(pickler, "DcKWO: %s", obj) + pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n") + logger.trace(pickler, "# DcKWO") + +if hasattr(dataclasses, "_FIELD_BASE"): + @register(dataclasses._FIELD_BASE) + def save_dataclasses_FIELD_BASE(pickler, obj): + logger.trace(pickler, "DcFB: %s", obj) + pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n") + logger.trace(pickler, "# DcFB") + +############################# + +# quick sanity checking +def pickles(obj,exact=False,safe=False,**kwds): + """ + Quick check if object pickles with dill. + + If *exact=True* then an equality test is done to check if the reconstructed + object matches the original object. + + If *safe=True* then any exception will raised in copy signal that the + object is not picklable, otherwise only pickling errors will be trapped. + + Additional keyword arguments are as :func:`dumps` and :func:`loads`. + """ + if safe: exceptions = (Exception,) # RuntimeError, ValueError + else: + exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError) + try: + pik = copy(obj, **kwds) + #FIXME: should check types match first, then check content if "exact" + try: + #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ + result = bool(pik.all() == obj.all()) + except (AttributeError, TypeError): + warnings.filterwarnings('ignore') #FIXME: be specific + result = pik == obj + if warnings.filters: del warnings.filters[0] + if hasattr(result, 'toarray'): # for unusual types like sparse matrix + result = result.toarray().all() + if result: return True + if not exact: + result = type(pik) == type(obj) + if result: return result + # class instances might have been dumped with byref=False + return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType? + return False + except exceptions: + return False + +def check(obj, *args, **kwds): + """ + Check pickling of an object across another process. + + *python* is the path to the python interpreter (defaults to sys.executable) + + Set *verbose=True* to print the unpickled object in the other process. + + Additional keyword arguments are as :func:`dumps` and :func:`loads`. + """ + # == undocumented == + # python -- the string path or executable name of the selected python + # verbose -- if True, be verbose about printing warning messages + # all other args and kwds are passed to dill.dumps #FIXME: ignore on load + verbose = kwds.pop('verbose', False) + python = kwds.pop('python', None) + if python is None: + import sys + python = sys.executable + # type check + isinstance(python, str) + import subprocess + fail = True + try: + _obj = dumps(obj, *args, **kwds) + fail = False + finally: + if fail and verbose: + print("DUMP FAILED") + #FIXME: fails if python interpreter path contains spaces + # Use the following instead (which also processes the 'ignore' keyword): + # ignore = kwds.pop('ignore', None) + # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore)) + # cmd = [python, "-c", "import dill; print(%s)"%unpickle] + # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED" + msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj)) + msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED" + if verbose: + print(msg) + return + +# use to protect against missing attributes +def is_dill(pickler, child=None): + "check the dill-ness of your pickler" + if child is False or not hasattr(pickler.__class__, 'mro'): + return 'dill' in pickler.__module__ + return Pickler in pickler.__class__.mro() + +def _extend(): + """extend pickle with all of dill's registered types""" + # need to have pickle not choke on _main_module? use is_dill(pickler) + for t,func in Pickler.dispatch.items(): + try: + StockPickler.dispatch[t] = func + except Exception: #TypeError, PicklingError, UnpicklingError + logger.trace(pickler, "skip: %s", t) + return + +del diff, _use_diff, use_diff + +# EOF diff --git a/llmeval-env/lib/python3.10/site-packages/dill/_objects.py b/llmeval-env/lib/python3.10/site-packages/dill/_objects.py new file mode 100644 index 0000000000000000000000000000000000000000..b2c02d3bbcf025144c9296c5d3ef30d5318a1870 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/_objects.py @@ -0,0 +1,537 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +all Python Standard Library objects (currently: CH 1-15 @ 2.7) +and some other common objects (i.e. numpy.ndarray) +""" + +__all__ = ['registered','failures','succeeds'] + +# helper imports +import warnings; warnings.filterwarnings("ignore", category=DeprecationWarning) +import sys +import queue as Queue +import dbm as anydbm +from io import BytesIO as StringIO +import re +import array +import collections +import codecs +import struct +import dataclasses +import datetime +import calendar +import weakref +import pprint +import decimal +import numbers +import functools +import itertools +import operator +import tempfile +import shelve +import zlib +import gzip +import zipfile +import tarfile +import csv +import hashlib +import hmac +import os +import logging +import logging.handlers +import optparse +#import __hello__ +import threading +import socket +import contextlib +try: + import bz2 + import sqlite3 + import dbm.ndbm as dbm + HAS_ALL = True +except ImportError: # Ubuntu + HAS_ALL = False +try: + #import curses + #from curses import textpad, panel + HAS_CURSES = True +except ImportError: # Windows + HAS_CURSES = False +try: + import ctypes + HAS_CTYPES = True + # if using `pypy`, pythonapi is not found + IS_PYPY = not hasattr(ctypes, 'pythonapi') +except ImportError: # MacPorts + HAS_CTYPES = False + IS_PYPY = False + +# helper objects +class _class: + def _method(self): + pass +# @classmethod +# def _clsmethod(cls): #XXX: test me +# pass +# @staticmethod +# def _static(self): #XXX: test me +# pass +class _class2: + def __call__(self): + pass +_instance2 = _class2() +class _newclass(object): + def _method(self): + pass +# @classmethod +# def _clsmethod(cls): #XXX: test me +# pass +# @staticmethod +# def _static(self): #XXX: test me +# pass +class _newclass2(object): + __slots__ = ['descriptor'] +def _function(x): yield x +def _function2(): + try: raise + except Exception: + from sys import exc_info + e, er, tb = exc_info() + return er, tb +if HAS_CTYPES: + class _Struct(ctypes.Structure): + pass + _Struct._fields_ = [("_field", ctypes.c_int),("next", ctypes.POINTER(_Struct))] +_filedescrip, _tempfile = tempfile.mkstemp('r') # deleted in cleanup +if sys.hexversion < 0x30d00a1: + _tmpf = tempfile.TemporaryFile('w') # emits OSError 9 in python 3.13 +else: + _tmpf = tempfile.NamedTemporaryFile('w').file # for > python 3.9 + +# objects used by dill for type declaration +registered = d = {} +# objects dill fails to pickle +failures = x = {} +# all other type objects +succeeds = a = {} + +# types module (part of CH 8) +a['BooleanType'] = bool(1) +a['BuiltinFunctionType'] = len +a['BuiltinMethodType'] = a['BuiltinFunctionType'] +a['BytesType'] = _bytes = codecs.latin_1_encode('\x00')[0] # bytes(1) +a['ClassType'] = _class +a['ComplexType'] = complex(1) +a['DictType'] = _dict = {} +a['DictionaryType'] = a['DictType'] +a['FloatType'] = float(1) +a['FunctionType'] = _function +a['InstanceType'] = _instance = _class() +a['IntType'] = _int = int(1) +a['ListType'] = _list = [] +a['NoneType'] = None +a['ObjectType'] = object() +a['StringType'] = _str = str(1) +a['TupleType'] = _tuple = () +a['TypeType'] = type +a['LongType'] = _int +a['UnicodeType'] = _str +# built-in constants (CH 4) +a['CopyrightType'] = copyright +# built-in types (CH 5) +a['ClassObjectType'] = _newclass # +a['ClassInstanceType'] = _newclass() # +a['SetType'] = _set = set() +a['FrozenSetType'] = frozenset() +# built-in exceptions (CH 6) +a['ExceptionType'] = _exception = _function2()[0] +# string services (CH 7) +a['SREPatternType'] = _srepattern = re.compile('') +# data types (CH 8) +a['ArrayType'] = array.array("f") +a['DequeType'] = collections.deque([0]) +a['DefaultDictType'] = collections.defaultdict(_function, _dict) +a['TZInfoType'] = datetime.tzinfo() +a['DateTimeType'] = datetime.datetime.today() +a['CalendarType'] = calendar.Calendar() +# numeric and mathematical types (CH 9) +a['DecimalType'] = decimal.Decimal(1) +a['CountType'] = itertools.count(0) +# data compression and archiving (CH 12) +a['TarInfoType'] = tarfile.TarInfo() +# generic operating system services (CH 15) +a['LoggerType'] = _logger = logging.getLogger() +a['FormatterType'] = logging.Formatter() # pickle ok +a['FilterType'] = logging.Filter() # pickle ok +a['LogRecordType'] = logging.makeLogRecord(_dict) # pickle ok +a['OptionParserType'] = _oparser = optparse.OptionParser() # pickle ok +a['OptionGroupType'] = optparse.OptionGroup(_oparser,"foo") # pickle ok +a['OptionType'] = optparse.Option('--foo') # pickle ok +if HAS_CTYPES: + z = x if IS_PYPY else a + z['CCharType'] = _cchar = ctypes.c_char() + z['CWCharType'] = ctypes.c_wchar() # fail == 2.6 + z['CByteType'] = ctypes.c_byte() + z['CUByteType'] = ctypes.c_ubyte() + z['CShortType'] = ctypes.c_short() + z['CUShortType'] = ctypes.c_ushort() + z['CIntType'] = ctypes.c_int() + z['CUIntType'] = ctypes.c_uint() + z['CLongType'] = ctypes.c_long() + z['CULongType'] = ctypes.c_ulong() + z['CLongLongType'] = ctypes.c_longlong() + z['CULongLongType'] = ctypes.c_ulonglong() + z['CFloatType'] = ctypes.c_float() + z['CDoubleType'] = ctypes.c_double() + z['CSizeTType'] = ctypes.c_size_t() + del z + a['CLibraryLoaderType'] = ctypes.cdll + a['StructureType'] = _Struct + # if not IS_PYPY: + # a['BigEndianStructureType'] = ctypes.BigEndianStructure() +#NOTE: also LittleEndianStructureType and UnionType... abstract classes +#NOTE: remember for ctypesobj.contents creates a new python object +#NOTE: ctypes.c_int._objects is memberdescriptor for object's __dict__ +#NOTE: base class of all ctypes data types is non-public _CData + +import fractions +import io +from io import StringIO as TextIO +# built-in functions (CH 2) +a['ByteArrayType'] = bytearray([1]) +# numeric and mathematical types (CH 9) +a['FractionType'] = fractions.Fraction() +a['NumberType'] = numbers.Number() +# generic operating system services (CH 15) +a['IOBaseType'] = io.IOBase() +a['RawIOBaseType'] = io.RawIOBase() +a['TextIOBaseType'] = io.TextIOBase() +a['BufferedIOBaseType'] = io.BufferedIOBase() +a['UnicodeIOType'] = TextIO() # the new StringIO +a['LoggerAdapterType'] = logging.LoggerAdapter(_logger,_dict) # pickle ok +if HAS_CTYPES: + z = x if IS_PYPY else a + z['CBoolType'] = ctypes.c_bool(1) + z['CLongDoubleType'] = ctypes.c_longdouble() + del z +import argparse +# data types (CH 8) +a['OrderedDictType'] = collections.OrderedDict(_dict) +a['CounterType'] = collections.Counter(_dict) +if HAS_CTYPES: + z = x if IS_PYPY else a + z['CSSizeTType'] = ctypes.c_ssize_t() + del z +# generic operating system services (CH 15) +a['NullHandlerType'] = logging.NullHandler() # pickle ok # new 2.7 +a['ArgParseFileType'] = argparse.FileType() # pickle ok + +# -- pickle fails on all below here ----------------------------------------- +# types module (part of CH 8) +a['CodeType'] = compile('','','exec') +a['DictProxyType'] = type.__dict__ +a['DictProxyType2'] = _newclass.__dict__ +a['EllipsisType'] = Ellipsis +a['ClosedFileType'] = open(os.devnull, 'wb', buffering=0).close() +a['GetSetDescriptorType'] = array.array.typecode +a['LambdaType'] = _lambda = lambda x: lambda y: x #XXX: works when not imported! +a['MemberDescriptorType'] = _newclass2.descriptor +if not IS_PYPY: + a['MemberDescriptorType2'] = datetime.timedelta.days +a['MethodType'] = _method = _class()._method #XXX: works when not imported! +a['ModuleType'] = datetime +a['NotImplementedType'] = NotImplemented +a['SliceType'] = slice(1) +a['UnboundMethodType'] = _class._method #XXX: works when not imported! +d['TextWrapperType'] = open(os.devnull, 'r') # same as mode='w','w+','r+' +d['BufferedRandomType'] = open(os.devnull, 'r+b') # same as mode='w+b' +d['BufferedReaderType'] = open(os.devnull, 'rb') # (default: buffering=-1) +d['BufferedWriterType'] = open(os.devnull, 'wb') +try: # oddities: deprecated + from _pyio import open as _open + d['PyTextWrapperType'] = _open(os.devnull, 'r', buffering=-1) + d['PyBufferedRandomType'] = _open(os.devnull, 'r+b', buffering=-1) + d['PyBufferedReaderType'] = _open(os.devnull, 'rb', buffering=-1) + d['PyBufferedWriterType'] = _open(os.devnull, 'wb', buffering=-1) +except ImportError: + pass +# other (concrete) object types +z = d if sys.hexversion < 0x30800a2 else a +z['CellType'] = (_lambda)(0).__closure__[0] +del z +a['XRangeType'] = _xrange = range(1) +a['MethodDescriptorType'] = type.__dict__['mro'] +a['WrapperDescriptorType'] = type.__repr__ +#a['WrapperDescriptorType2'] = type.__dict__['__module__']#XXX: GetSetDescriptor +a['ClassMethodDescriptorType'] = type.__dict__['__prepare__'] +# built-in functions (CH 2) +_methodwrap = (1).__lt__ +a['MethodWrapperType'] = _methodwrap +a['StaticMethodType'] = staticmethod(_method) +a['ClassMethodType'] = classmethod(_method) +a['PropertyType'] = property() +d['SuperType'] = super(Exception, _exception) +# string services (CH 7) +_in = _bytes +a['InputType'] = _cstrI = StringIO(_in) +a['OutputType'] = _cstrO = StringIO() +# data types (CH 8) +a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary() +a['WeakValueDictionaryType'] = weakref.WeakValueDictionary() +a['ReferenceType'] = weakref.ref(_instance) +a['DeadReferenceType'] = weakref.ref(_class()) +a['ProxyType'] = weakref.proxy(_instance) +a['DeadProxyType'] = weakref.proxy(_class()) +a['CallableProxyType'] = weakref.proxy(_instance2) +a['DeadCallableProxyType'] = weakref.proxy(_class2()) +a['QueueType'] = Queue.Queue() +# numeric and mathematical types (CH 9) +d['PartialType'] = functools.partial(int,base=2) +a['IzipType'] = zip('0','1') +a['ChainType'] = itertools.chain('0','1') +d['ItemGetterType'] = operator.itemgetter(0) +d['AttrGetterType'] = operator.attrgetter('__repr__') +# file and directory access (CH 10) +_fileW = _cstrO +# data persistence (CH 11) +if HAS_ALL: + x['ConnectionType'] = _conn = sqlite3.connect(':memory:') + x['CursorType'] = _conn.cursor() +a['ShelveType'] = shelve.Shelf({}) +# data compression and archiving (CH 12) +if HAS_ALL: + x['BZ2FileType'] = bz2.BZ2File(os.devnull) + x['BZ2CompressorType'] = bz2.BZ2Compressor() + x['BZ2DecompressorType'] = bz2.BZ2Decompressor() +#x['ZipFileType'] = _zip = zipfile.ZipFile(os.devnull,'w') +#_zip.write(_tempfile,'x') [causes annoying warning/error printed on import] +#a['ZipInfoType'] = _zip.getinfo('x') +a['TarFileType'] = tarfile.open(fileobj=_fileW,mode='w') +# file formats (CH 13) +x['DialectType'] = csv.get_dialect('excel') +if sys.hexversion < 0x30d00a1: + import xdrlib + a['PackerType'] = xdrlib.Packer() +# optional operating system services (CH 16) +a['LockType'] = threading.Lock() +a['RLockType'] = threading.RLock() +# generic operating system services (CH 15) # also closed/open and r/w/etc... +a['NamedLoggerType'] = _logger = logging.getLogger(__name__) +#a['FrozenModuleType'] = __hello__ #FIXME: prints "Hello world..." +# interprocess communication (CH 17) +x['SocketType'] = _socket = socket.socket() +x['SocketPairType'] = socket.socketpair()[0] +# python runtime services (CH 27) +a['GeneratorContextManagerType'] = contextlib.contextmanager(max)([1]) + +try: # ipython + __IPYTHON__ is True # is ipython +except NameError: + # built-in constants (CH 4) + a['QuitterType'] = quit + d['ExitType'] = a['QuitterType'] +try: # numpy #FIXME: slow... 0.05 to 0.1 sec to import numpy + from numpy import ufunc as _numpy_ufunc + from numpy import array as _numpy_array + from numpy import int32 as _numpy_int32 + a['NumpyUfuncType'] = _numpy_ufunc + a['NumpyArrayType'] = _numpy_array + a['NumpyInt32Type'] = _numpy_int32 +except ImportError: + pass +# numeric and mathematical types (CH 9) +a['ProductType'] = itertools.product('0','1') +# generic operating system services (CH 15) +a['FileHandlerType'] = logging.FileHandler(os.devnull) +a['RotatingFileHandlerType'] = logging.handlers.RotatingFileHandler(os.devnull) +a['SocketHandlerType'] = logging.handlers.SocketHandler('localhost',514) +a['MemoryHandlerType'] = logging.handlers.MemoryHandler(1) +# data types (CH 8) +a['WeakSetType'] = weakref.WeakSet() # 2.7 +# generic operating system services (CH 15) [errors when dill is imported] +#a['ArgumentParserType'] = _parser = argparse.ArgumentParser('PROG') +#a['NamespaceType'] = _parser.parse_args() # pickle ok +#a['SubParsersActionType'] = _parser.add_subparsers() +#a['MutuallyExclusiveGroupType'] = _parser.add_mutually_exclusive_group() +#a['ArgumentGroupType'] = _parser.add_argument_group() + +# -- dill fails in some versions below here --------------------------------- +# types module (part of CH 8) +d['FileType'] = open(os.devnull, 'rb', buffering=0) # same 'wb','wb+','rb+' +# built-in functions (CH 2) +# Iterators: +a['ListIteratorType'] = iter(_list) # empty vs non-empty +a['SetIteratorType'] = iter(_set) #XXX: empty vs non-empty #FIXME: list_iterator +a['TupleIteratorType']= iter(_tuple) # empty vs non-empty +a['XRangeIteratorType'] = iter(_xrange) # empty vs non-empty +a["BytesIteratorType"] = iter(b'') +a["BytearrayIteratorType"] = iter(bytearray(b'')) +z = x if IS_PYPY else a +z["CallableIteratorType"] = iter(iter, None) +del z +x["MemoryIteratorType"] = iter(memoryview(b'')) +a["ListReverseiteratorType"] = reversed([]) +X = a['OrderedDictType'] +d["OdictKeysType"] = X.keys() +d["OdictValuesType"] = X.values() +d["OdictItemsType"] = X.items() +a["OdictIteratorType"] = iter(X.keys()) #FIXME: list_iterator +del X +#FIXME: list_iterator +a['DictionaryItemIteratorType'] = iter(type.__dict__.items()) +a['DictionaryKeyIteratorType'] = iter(type.__dict__.keys()) +a['DictionaryValueIteratorType'] = iter(type.__dict__.values()) +if sys.hexversion >= 0x30800a0: + a["DictReversekeyiteratorType"] = reversed({}.keys()) + a["DictReversevalueiteratorType"] = reversed({}.values()) + a["DictReverseitemiteratorType"] = reversed({}.items()) + +try: + import symtable + #FIXME: fails to pickle + x["SymtableEntryType"] = symtable.symtable("", "string", "exec")._table +except ImportError: + pass + +if sys.hexversion >= 0x30a00a0 and not IS_PYPY: + x['LineIteratorType'] = compile('3', '', 'eval').co_lines() + +if sys.hexversion >= 0x30b00b0: + from types import GenericAlias + d["GenericAliasIteratorType"] = iter(GenericAlias(list, (int,))) + x['PositionsIteratorType'] = compile('3', '', 'eval').co_positions() + +# data types (CH 8) +a['PrettyPrinterType'] = pprint.PrettyPrinter() +# numeric and mathematical types (CH 9) +a['CycleType'] = itertools.cycle('0') +# file and directory access (CH 10) +a['TemporaryFileType'] = _tmpf +# data compression and archiving (CH 12) +x['GzipFileType'] = gzip.GzipFile(fileobj=_fileW) +# generic operating system services (CH 15) +a['StreamHandlerType'] = logging.StreamHandler() +# numeric and mathematical types (CH 9) +a['PermutationsType'] = itertools.permutations('0') +a['CombinationsType'] = itertools.combinations('0',1) +a['RepeatType'] = itertools.repeat(0) +a['CompressType'] = itertools.compress('0',[1]) +#XXX: ...and etc + +# -- dill fails on all below here ------------------------------------------- +# types module (part of CH 8) +x['GeneratorType'] = _generator = _function(1) #XXX: priority +x['FrameType'] = _generator.gi_frame #XXX: inspect.currentframe() +x['TracebackType'] = _function2()[1] #(see: inspect.getouterframes,getframeinfo) +# other (concrete) object types +# (also: Capsule / CObject ?) +# built-in functions (CH 2) +# built-in types (CH 5) +# string services (CH 7) +x['StructType'] = struct.Struct('c') +x['CallableIteratorType'] = _srepattern.finditer('') +x['SREMatchType'] = _srepattern.match('') +x['SREScannerType'] = _srepattern.scanner('') +x['StreamReader'] = codecs.StreamReader(_cstrI) #XXX: ... and etc +# python object persistence (CH 11) +# x['DbShelveType'] = shelve.open('foo','n')#,protocol=2) #XXX: delete foo +if HAS_ALL: + z = a if IS_PYPY else x + z['DbmType'] = dbm.open(_tempfile,'n') + del z +# x['DbCursorType'] = _dbcursor = anydbm.open('foo','n') #XXX: delete foo +# x['DbType'] = _dbcursor.db +# data compression and archiving (CH 12) +x['ZlibCompressType'] = zlib.compressobj() +x['ZlibDecompressType'] = zlib.decompressobj() +# file formats (CH 13) +x['CSVReaderType'] = csv.reader(_cstrI) +x['CSVWriterType'] = csv.writer(_cstrO) +x['CSVDictReaderType'] = csv.DictReader(_cstrI) +x['CSVDictWriterType'] = csv.DictWriter(_cstrO,{}) +# cryptographic services (CH 14) +x['HashType'] = hashlib.md5() +if (sys.hexversion < 0x30800a1): + x['HMACType'] = hmac.new(_in) +else: + x['HMACType'] = hmac.new(_in, digestmod='md5') +# generic operating system services (CH 15) +if HAS_CURSES: pass + #x['CursesWindowType'] = _curwin = curses.initscr() #FIXME: messes up tty + #x['CursesTextPadType'] = textpad.Textbox(_curwin) + #x['CursesPanelType'] = panel.new_panel(_curwin) +if HAS_CTYPES: + x['CCharPType'] = ctypes.c_char_p() + x['CWCharPType'] = ctypes.c_wchar_p() + x['CVoidPType'] = ctypes.c_void_p() + if sys.platform[:3] == 'win': + x['CDLLType'] = _cdll = ctypes.cdll.msvcrt + else: + x['CDLLType'] = _cdll = ctypes.CDLL(None) + if not IS_PYPY: + x['PyDLLType'] = _pydll = ctypes.pythonapi + x['FuncPtrType'] = _cdll._FuncPtr() + x['CCharArrayType'] = ctypes.create_string_buffer(1) + x['CWCharArrayType'] = ctypes.create_unicode_buffer(1) + x['CParamType'] = ctypes.byref(_cchar) + x['LPCCharType'] = ctypes.pointer(_cchar) + x['LPCCharObjType'] = _lpchar = ctypes.POINTER(ctypes.c_char) + x['NullPtrType'] = _lpchar() + x['NullPyObjectType'] = ctypes.py_object() + x['PyObjectType'] = ctypes.py_object(lambda :None) + z = a if IS_PYPY else x + z['FieldType'] = _field = _Struct._field + z['CFUNCTYPEType'] = _cfunc = ctypes.CFUNCTYPE(ctypes.c_char) + if sys.hexversion < 0x30c00b3: + x['CFunctionType'] = _cfunc(str) + del z +# numeric and mathematical types (CH 9) +a['MethodCallerType'] = operator.methodcaller('mro') # 2.6 +# built-in types (CH 5) +x['MemoryType'] = memoryview(_in) # 2.7 +x['MemoryType2'] = memoryview(bytearray(_in)) # 2.7 +d['DictItemsType'] = _dict.items() # 2.7 +d['DictKeysType'] = _dict.keys() # 2.7 +d['DictValuesType'] = _dict.values() # 2.7 +# generic operating system services (CH 15) +a['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG') +a['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter('PROG') +a['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter('PROG') +z = a if IS_PYPY else x +z['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap) # 2.7, >=3.2 +z['CmpKeyObjType'] = _cmpkey('0') #2.7, >=3.2 +del z +# oddities: removed, etc +x['BufferType'] = x['MemoryType'] + +from dill._dill import _testcapsule +if _testcapsule is not None: + d['PyCapsuleType'] = _testcapsule +del _testcapsule + +if hasattr(dataclasses, '_HAS_DEFAULT_FACTORY'): + a['DataclassesHasDefaultFactoryType'] = dataclasses._HAS_DEFAULT_FACTORY + +if hasattr(dataclasses, 'MISSING'): + a['DataclassesMissingType'] = dataclasses.MISSING + +if hasattr(dataclasses, 'KW_ONLY'): + a['DataclassesKWOnlyType'] = dataclasses.KW_ONLY + +if hasattr(dataclasses, '_FIELD_BASE'): + a['DataclassesFieldBaseType'] = dataclasses._FIELD + +# -- cleanup ---------------------------------------------------------------- +a.update(d) # registered also succeed +if sys.platform[:3] == 'win': + os.close(_filedescrip) # required on win32 +os.remove(_tempfile) + + +# EOF diff --git a/llmeval-env/lib/python3.10/site-packages/dill/_shims.py b/llmeval-env/lib/python3.10/site-packages/dill/_shims.py new file mode 100644 index 0000000000000000000000000000000000000000..da1abbecc834859271d5e096f041fe299e04e072 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/_shims.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Author: Anirudh Vegesana (avegesan@cs.stanford.edu) +# Copyright (c) 2021-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +Provides shims for compatibility between versions of dill and Python. + +Compatibility shims should be provided in this file. Here are two simple example +use cases. + +Deprecation of constructor function: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Assume that we were transitioning _import_module in _dill.py to +the builtin function importlib.import_module when present. + +@move_to(_dill) +def _import_module(import_name): + ... # code already in _dill.py + +_import_module = Getattr(importlib, 'import_module', Getattr(_dill, '_import_module', None)) + +The code will attempt to find import_module in the importlib module. If not +present, it will use the _import_module function in _dill. + +Emulate new Python behavior in older Python versions: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +CellType.cell_contents behaves differently in Python 3.6 and 3.7. It is +read-only in Python 3.6 and writable and deletable in 3.7. + +if _dill.OLD37 and _dill.HAS_CTYPES and ...: + @move_to(_dill) + def _setattr(object, name, value): + if type(object) is _dill.CellType and name == 'cell_contents': + _PyCell_Set.argtypes = (ctypes.py_object, ctypes.py_object) + _PyCell_Set(object, value) + else: + setattr(object, name, value) +... # more cases below + +_setattr = Getattr(_dill, '_setattr', setattr) + +_dill._setattr will be used when present to emulate Python 3.7 functionality in +older versions of Python while defaulting to the standard setattr in 3.7+. + +See this PR for the discussion that lead to this system: +https://github.com/uqfoundation/dill/pull/443 +""" + +import inspect +import sys + +_dill = sys.modules['dill._dill'] + + +class Reduce(object): + """ + Reduce objects are wrappers used for compatibility enforcement during + unpickle-time. They should only be used in calls to pickler.save and + other Reduce objects. They are only evaluated within unpickler.load. + + Pickling a Reduce object makes the two implementations equivalent: + + pickler.save(Reduce(*reduction)) + + pickler.save_reduce(*reduction, obj=reduction) + """ + __slots__ = ['reduction'] + def __new__(cls, *reduction, **kwargs): + """ + Args: + *reduction: a tuple that matches the format given here: + https://docs.python.org/3/library/pickle.html#object.__reduce__ + is_callable: a bool to indicate that the object created by + unpickling `reduction` is callable. If true, the current Reduce + is allowed to be used as the function in further save_reduce calls + or Reduce objects. + """ + is_callable = kwargs.get('is_callable', False) # Pleases Py2. Can be removed later + if is_callable: + self = object.__new__(_CallableReduce) + else: + self = object.__new__(Reduce) + self.reduction = reduction + return self + def __repr__(self): + return 'Reduce%s' % (self.reduction,) + def __copy__(self): + return self # pragma: no cover + def __deepcopy__(self, memo): + return self # pragma: no cover + def __reduce__(self): + return self.reduction + def __reduce_ex__(self, protocol): + return self.__reduce__() + +class _CallableReduce(Reduce): + # A version of Reduce for functions. Used to trick pickler.save_reduce into + # thinking that Reduce objects of functions are themselves meaningful functions. + def __call__(self, *args, **kwargs): + reduction = self.__reduce__() + func = reduction[0] + f_args = reduction[1] + obj = func(*f_args) + return obj(*args, **kwargs) + +__NO_DEFAULT = _dill.Sentinel('Getattr.NO_DEFAULT') + +def Getattr(object, name, default=__NO_DEFAULT): + """ + A Reduce object that represents the getattr operation. When unpickled, the + Getattr will access an attribute 'name' of 'object' and return the value + stored there. If the attribute doesn't exist, the default value will be + returned if present. + + The following statements are equivalent: + + Getattr(collections, 'OrderedDict') + Getattr(collections, 'spam', None) + Getattr(*args) + + Reduce(getattr, (collections, 'OrderedDict')) + Reduce(getattr, (collections, 'spam', None)) + Reduce(getattr, args) + + During unpickling, the first two will result in collections.OrderedDict and + None respectively because the first attribute exists and the second one does + not, forcing it to use the default value given in the third argument. + """ + + if default is Getattr.NO_DEFAULT: + reduction = (getattr, (object, name)) + else: + reduction = (getattr, (object, name, default)) + + return Reduce(*reduction, is_callable=callable(default)) + +Getattr.NO_DEFAULT = __NO_DEFAULT +del __NO_DEFAULT + +def move_to(module, name=None): + def decorator(func): + if name is None: + fname = func.__name__ + else: + fname = name + module.__dict__[fname] = func + func.__module__ = module.__name__ + return func + return decorator + +def register_shim(name, default): + """ + A easier to understand and more compact way of "softly" defining a function. + These two pieces of code are equivalent: + + if _dill.OLD3X: + def _create_class(): + ... + _create_class = register_shim('_create_class', types.new_class) + + if _dill.OLD3X: + @move_to(_dill) + def _create_class(): + ... + _create_class = Getattr(_dill, '_create_class', types.new_class) + + Intuitively, it creates a function or object in the versions of dill/python + that require special reimplementations, and use a core library or default + implementation if that function or object does not exist. + """ + func = globals().get(name) + if func is not None: + _dill.__dict__[name] = func + func.__module__ = _dill.__name__ + + if default is Getattr.NO_DEFAULT: + reduction = (getattr, (_dill, name)) + else: + reduction = (getattr, (_dill, name, default)) + + return Reduce(*reduction, is_callable=callable(default)) + +###################### +## Compatibility Shims are defined below +###################### + +_CELL_EMPTY = register_shim('_CELL_EMPTY', None) + +_setattr = register_shim('_setattr', setattr) +_delattr = register_shim('_delattr', delattr) diff --git a/llmeval-env/lib/python3.10/site-packages/dill/detect.py b/llmeval-env/lib/python3.10/site-packages/dill/detect.py new file mode 100644 index 0000000000000000000000000000000000000000..6f76e729d2469fa3f028a189314977c11397edd7 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/detect.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +Methods for detecting objects leading to pickling failures. +""" + +import dis +from inspect import ismethod, isfunction, istraceback, isframe, iscode + +from .pointers import parent, reference, at, parents, children +from .logger import trace + +__all__ = ['baditems','badobjects','badtypes','code','errors','freevars', + 'getmodule','globalvars','nestedcode','nestedglobals','outermost', + 'referredglobals','referrednested','trace','varnames'] + +def getmodule(object, _filename=None, force=False): + """get the module of the object""" + from inspect import getmodule as getmod + module = getmod(object, _filename) + if module or not force: return module + import builtins + from .source import getname + name = getname(object, force=True) + return builtins if name in vars(builtins).keys() else None + +def outermost(func): # is analogous to getsource(func,enclosing=True) + """get outermost enclosing object (i.e. the outer function in a closure) + + NOTE: this is the object-equivalent of getsource(func, enclosing=True) + """ + if ismethod(func): + _globals = func.__func__.__globals__ or {} + elif isfunction(func): + _globals = func.__globals__ or {} + else: + return #XXX: or raise? no matches + _globals = _globals.items() + # get the enclosing source + from .source import getsourcelines + try: lines,lnum = getsourcelines(func, enclosing=True) + except Exception: #TypeError, IOError + lines,lnum = [],None + code = ''.join(lines) + # get all possible names,objects that are named in the enclosing source + _locals = ((name,obj) for (name,obj) in _globals if name in code) + # now only save the objects that generate the enclosing block + for name,obj in _locals: #XXX: don't really need 'name' + try: + if getsourcelines(obj) == (lines,lnum): return obj + except Exception: #TypeError, IOError + pass + return #XXX: or raise? no matches + +def nestedcode(func, recurse=True): #XXX: or return dict of {co_name: co} ? + """get the code objects for any nested functions (e.g. in a closure)""" + func = code(func) + if not iscode(func): return [] #XXX: or raise? no matches + nested = set() + for co in func.co_consts: + if co is None: continue + co = code(co) + if co: + nested.add(co) + if recurse: nested |= set(nestedcode(co, recurse=True)) + return list(nested) + +def code(func): + """get the code object for the given function or method + + NOTE: use dill.source.getsource(CODEOBJ) to get the source code + """ + if ismethod(func): func = func.__func__ + if isfunction(func): func = func.__code__ + if istraceback(func): func = func.tb_frame + if isframe(func): func = func.f_code + if iscode(func): return func + return + +#XXX: ugly: parse dis.dis for name after " len(referrednested(func)), try calling func(). + If possible, python builds code objects, but delays building functions + until func() is called. + """ + import gc + funcs = set() + # get the code objects, and try to track down by referrence + for co in nestedcode(func, recurse): + # look for function objects that refer to the code object + for obj in gc.get_referrers(co): + # get methods + _ = getattr(obj, '__func__', None) # ismethod + if getattr(_, '__code__', None) is co: funcs.add(obj) + # get functions + elif getattr(obj, '__code__', None) is co: funcs.add(obj) + # get frame objects + elif getattr(obj, 'f_code', None) is co: funcs.add(obj) + # get code objects + elif hasattr(obj, 'co_code') and obj is co: funcs.add(obj) +# frameobjs => func.__code__.co_varnames not in func.__code__.co_cellvars +# funcobjs => func.__code__.co_cellvars not in func.__code__.co_varnames +# frameobjs are not found, however funcobjs are... +# (see: test_mixins.quad ... and test_mixins.wtf) +# after execution, code objects get compiled, and then may be found by gc + return list(funcs) + + +def freevars(func): + """get objects defined in enclosing code that are referred to by func + + returns a dict of {name:object}""" + if ismethod(func): func = func.__func__ + if isfunction(func): + closures = func.__closure__ or () + func = func.__code__.co_freevars # get freevars + else: + return {} + + def get_cell_contents(): + for name, c in zip(func, closures): + try: + cell_contents = c.cell_contents + except ValueError: # cell is empty + continue + yield name, c.cell_contents + + return dict(get_cell_contents()) + +# thanks to Davies Liu for recursion of globals +def nestedglobals(func, recurse=True): + """get the names of any globals found within func""" + func = code(func) + if func is None: return list() + import sys + from .temp import capture + CAN_NULL = sys.hexversion >= 0x30b00a7 # NULL may be prepended >= 3.11a7 + names = set() + with capture('stdout') as out: + dis.dis(func) #XXX: dis.dis(None) disassembles last traceback + for line in out.getvalue().splitlines(): + if '_GLOBAL' in line: + name = line.split('(')[-1].split(')')[0] + if CAN_NULL: + names.add(name.replace('NULL + ', '').replace(' + NULL', '')) + else: + names.add(name) + for co in getattr(func, 'co_consts', tuple()): + if co and recurse and iscode(co): + names.update(nestedglobals(co, recurse=True)) + return list(names) + +def referredglobals(func, recurse=True, builtin=False): + """get the names of objects in the global scope referred to by func""" + return globalvars(func, recurse, builtin).keys() + +def globalvars(func, recurse=True, builtin=False): + """get objects defined in global scope that are referred to by func + + return a dict of {name:object}""" + if ismethod(func): func = func.__func__ + if isfunction(func): + globs = vars(getmodule(sum)).copy() if builtin else {} + # get references from within closure + orig_func, func = func, set() + for obj in orig_func.__closure__ or {}: + try: + cell_contents = obj.cell_contents + except ValueError: # cell is empty + pass + else: + _vars = globalvars(cell_contents, recurse, builtin) or {} + func.update(_vars) #XXX: (above) be wary of infinte recursion? + globs.update(_vars) + # get globals + globs.update(orig_func.__globals__ or {}) + # get names of references + if not recurse: + func.update(orig_func.__code__.co_names) + else: + func.update(nestedglobals(orig_func.__code__)) + # find globals for all entries of func + for key in func.copy(): #XXX: unnecessary...? + nested_func = globs.get(key) + if nested_func is orig_func: + #func.remove(key) if key in func else None + continue #XXX: globalvars(func, False)? + func.update(globalvars(nested_func, True, builtin)) + elif iscode(func): + globs = vars(getmodule(sum)).copy() if builtin else {} + #globs.update(globals()) + if not recurse: + func = func.co_names # get names + else: + orig_func = func.co_name # to stop infinite recursion + func = set(nestedglobals(func)) + # find globals for all entries of func + for key in func.copy(): #XXX: unnecessary...? + if key is orig_func: + #func.remove(key) if key in func else None + continue #XXX: globalvars(func, False)? + nested_func = globs.get(key) + func.update(globalvars(nested_func, True, builtin)) + else: + return {} + #NOTE: if name not in __globals__, then we skip it... + return dict((name,globs[name]) for name in func if name in globs) + + +def varnames(func): + """get names of variables defined by func + + returns a tuple (local vars, local vars referrenced by nested functions)""" + func = code(func) + if not iscode(func): + return () #XXX: better ((),())? or None? + return func.co_varnames, func.co_cellvars + + +def baditems(obj, exact=False, safe=False): #XXX: obj=globals() ? + """get items in object that fail to pickle""" + if not hasattr(obj,'__iter__'): # is not iterable + return [j for j in (badobjects(obj,0,exact,safe),) if j is not None] + obj = obj.values() if getattr(obj,'values',None) else obj + _obj = [] # can't use a set, as items may be unhashable + [_obj.append(badobjects(i,0,exact,safe)) for i in obj if i not in _obj] + return [j for j in _obj if j is not None] + + +def badobjects(obj, depth=0, exact=False, safe=False): + """get objects that fail to pickle""" + from dill import pickles + if not depth: + if pickles(obj,exact,safe): return None + return obj + return dict(((attr, badobjects(getattr(obj,attr),depth-1,exact,safe)) \ + for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe))) + +def badtypes(obj, depth=0, exact=False, safe=False): + """get types for objects that fail to pickle""" + from dill import pickles + if not depth: + if pickles(obj,exact,safe): return None + return type(obj) + return dict(((attr, badtypes(getattr(obj,attr),depth-1,exact,safe)) \ + for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe))) + +def errors(obj, depth=0, exact=False, safe=False): + """get errors for objects that fail to pickle""" + from dill import pickles, copy + if not depth: + try: + pik = copy(obj) + if exact: + assert pik == obj, \ + "Unpickling produces %s instead of %s" % (pik,obj) + assert type(pik) == type(obj), \ + "Unpickling produces %s instead of %s" % (type(pik),type(obj)) + return None + except Exception: + import sys + return sys.exc_info()[1] + _dict = {} + for attr in dir(obj): + try: + _attr = getattr(obj,attr) + except Exception: + import sys + _dict[attr] = sys.exc_info()[1] + continue + if not pickles(_attr,exact,safe): + _dict[attr] = errors(_attr,depth-1,exact,safe) + return _dict + + +# EOF diff --git a/llmeval-env/lib/python3.10/site-packages/dill/logger.py b/llmeval-env/lib/python3.10/site-packages/dill/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..d975e8efb53efbea714616cf8ad49c3020c9d9d3 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/logger.py @@ -0,0 +1,285 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Author: Leonardo Gama (@leogama) +# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +Logging utilities for dill. + +The 'logger' object is dill's top-level logger. + +The 'adapter' object wraps the logger and implements a 'trace()' method that +generates a detailed tree-style trace for the pickling call at log level INFO. + +The 'trace()' function sets and resets dill's logger log level, enabling and +disabling the pickling trace. + +The trace shows a tree structure depicting the depth of each object serialized +*with dill save functions*, but not the ones that use save functions from +'pickle._Pickler.dispatch'. If the information is available, it also displays +the size in bytes that the object contributed to the pickle stream (including +its child objects). Sample trace output: + + >>> import dill, dill.tests + >>> dill.detect.trace(True) + >>> dill.dump_session(main=dill.tests) + ┬ M1: + ├┬ F2: + │└ # F2 [32 B] + ├┬ D2: + │├┬ T4: + ││└ # T4 [35 B] + │├┬ D2: + ││├┬ T4: + │││└ # T4 [50 B] + ││├┬ D2: + │││└ # D2 [84 B] + ││└ # D2 [413 B] + │└ # D2 [763 B] + └ # M1 [813 B] +""" + +__all__ = ['adapter', 'logger', 'trace'] + +import codecs +import contextlib +import locale +import logging +import math +import os +from functools import partial +from typing import TextIO, Union + +import dill + +# Tree drawing characters: Unicode to ASCII map. +ASCII_MAP = str.maketrans({"│": "|", "├": "|", "┬": "+", "└": "`"}) + +## Notes about the design choices ## + +# Here is some domumentation of the Standard Library's logging internals that +# can't be found completely in the official documentation. dill's logger is +# obtained by calling logging.getLogger('dill') and therefore is an instance of +# logging.getLoggerClass() at the call time. As this is controlled by the user, +# in order to add some functionality to it it's necessary to use a LoggerAdapter +# to wrap it, overriding some of the adapter's methods and creating new ones. +# +# Basic calling sequence +# ====================== +# +# Python's logging functionality can be conceptually divided into five steps: +# 0. Check logging level -> abort if call level is greater than logger level +# 1. Gather information -> construct a LogRecord from passed arguments and context +# 2. Filter (optional) -> discard message if the record matches a filter +# 3. Format -> format message with args, then format output string with message plus record +# 4. Handle -> write the formatted string to output as defined in the handler +# +# dill.logging.logger.log -> # or logger.info, etc. +# Logger.log -> \ +# Logger._log -> }- accept 'extra' parameter for custom record entries +# Logger.makeRecord -> / +# LogRecord.__init__ +# Logger.handle -> +# Logger.callHandlers -> +# Handler.handle -> +# Filterer.filter -> +# Filter.filter +# StreamHandler.emit -> +# Handler.format -> +# Formatter.format -> +# LogRecord.getMessage # does: record.message = msg % args +# Formatter.formatMessage -> +# PercentStyle.format # does: self._fmt % vars(record) +# +# NOTE: All methods from the second line on are from logging.__init__.py + +class TraceAdapter(logging.LoggerAdapter): + """ + Tracks object tree depth and calculates pickled object size. + + A single instance of this wraps the module's logger, as the logging API + doesn't allow setting it directly with a custom Logger subclass. The added + 'trace()' method receives a pickle instance as the first argument and + creates extra values to be added in the LogRecord from it, then calls + 'info()'. + + Usage of logger with 'trace()' method: + + >>> from dill.logger import adapter as logger #NOTE: not dill.logger.logger + >>> ... + >>> def save_atype(pickler, obj): + >>> logger.trace(pickler, "Message with %s and %r etc. placeholders", 'text', obj) + >>> ... + """ + def __init__(self, logger): + self.logger = logger + def addHandler(self, handler): + formatter = TraceFormatter("%(prefix)s%(message)s%(suffix)s", handler=handler) + handler.setFormatter(formatter) + self.logger.addHandler(handler) + def removeHandler(self, handler): + self.logger.removeHandler(handler) + def process(self, msg, kwargs): + # A no-op override, as we don't have self.extra. + return msg, kwargs + def trace_setup(self, pickler): + # Called by Pickler.dump(). + if not dill._dill.is_dill(pickler, child=False): + return + if self.isEnabledFor(logging.INFO): + pickler._trace_depth = 1 + pickler._size_stack = [] + else: + pickler._trace_depth = None + def trace(self, pickler, msg, *args, **kwargs): + if not hasattr(pickler, '_trace_depth'): + logger.info(msg, *args, **kwargs) + return + if pickler._trace_depth is None: + return + extra = kwargs.get('extra', {}) + pushed_obj = msg.startswith('#') + size = None + try: + # Streams are not required to be tellable. + size = pickler._file.tell() + frame = pickler.framer.current_frame + try: + size += frame.tell() + except AttributeError: + # PyPy may use a BytesBuilder as frame + size += len(frame) + except (AttributeError, TypeError): + pass + if size is not None: + if not pushed_obj: + pickler._size_stack.append(size) + else: + size -= pickler._size_stack.pop() + extra['size'] = size + if pushed_obj: + pickler._trace_depth -= 1 + extra['depth'] = pickler._trace_depth + kwargs['extra'] = extra + self.info(msg, *args, **kwargs) + if not pushed_obj: + pickler._trace_depth += 1 + +class TraceFormatter(logging.Formatter): + """ + Generates message prefix and suffix from record. + + This Formatter adds prefix and suffix strings to the log message in trace + mode (an also provides empty string defaults for normal logs). + """ + def __init__(self, *args, handler=None, **kwargs): + super().__init__(*args, **kwargs) + try: + encoding = handler.stream.encoding + if encoding is None: + raise AttributeError + except AttributeError: + encoding = locale.getpreferredencoding() + try: + encoding = codecs.lookup(encoding).name + except LookupError: + self.is_utf8 = False + else: + self.is_utf8 = (encoding == codecs.lookup('utf-8').name) + def format(self, record): + fields = {'prefix': "", 'suffix': ""} + if getattr(record, 'depth', 0) > 0: + if record.msg.startswith("#"): + prefix = (record.depth - 1)*"│" + "└" + elif record.depth == 1: + prefix = "┬" + else: + prefix = (record.depth - 2)*"│" + "├┬" + if not self.is_utf8: + prefix = prefix.translate(ASCII_MAP) + "-" + fields['prefix'] = prefix + " " + if hasattr(record, 'size') and record.size is not None and record.size >= 1: + # Show object size in human-readable form. + power = int(math.log(record.size, 2)) // 10 + size = record.size >> power*10 + fields['suffix'] = " [%d %sB]" % (size, "KMGTP"[power] + "i" if power else "") + vars(record).update(fields) + return super().format(record) + +logger = logging.getLogger('dill') +logger.propagate = False +adapter = TraceAdapter(logger) +stderr_handler = logging._StderrHandler() +adapter.addHandler(stderr_handler) + +def trace(arg: Union[bool, TextIO, str, os.PathLike] = None, *, mode: str = 'a') -> None: + """print a trace through the stack when pickling; useful for debugging + + With a single boolean argument, enable or disable the tracing. + + Example usage: + + >>> import dill + >>> dill.detect.trace(True) + >>> dill.dump_session() + + Alternatively, ``trace()`` can be used as a context manager. With no + arguments, it just takes care of restoring the tracing state on exit. + Either a file handle, or a file name and (optionally) a file mode may be + specitfied to redirect the tracing output in the ``with`` block context. A + log function is yielded by the manager so the user can write extra + information to the file. + + Example usage: + + >>> from dill import detect + >>> D = {'a': 42, 'b': {'x': None}} + >>> with detect.trace(): + >>> dumps(D) + ┬ D2: + ├┬ D2: + │└ # D2 [8 B] + └ # D2 [22 B] + >>> squared = lambda x: x**2 + >>> with detect.trace('output.txt', mode='w') as log: + >>> log("> D = %r", D) + >>> dumps(D) + >>> log("> squared = %r", squared) + >>> dumps(squared) + + Arguments: + arg: a boolean value, or an optional file-like or path-like object for the context manager + mode: mode string for ``open()`` if a file name is passed as the first argument + """ + if not isinstance(arg, bool): + return TraceManager(file=arg, mode=mode) + logger.setLevel(logging.INFO if arg else logging.WARNING) + +class TraceManager(contextlib.AbstractContextManager): + """context manager version of trace(); can redirect the trace to a file""" + def __init__(self, file, mode): + self.file = file + self.mode = mode + self.redirect = file is not None + self.file_is_stream = hasattr(file, 'write') + def __enter__(self): + if self.redirect: + stderr_handler.flush() + if self.file_is_stream: + self.handler = logging.StreamHandler(self.file) + else: + self.handler = logging.FileHandler(self.file, self.mode) + adapter.removeHandler(stderr_handler) + adapter.addHandler(self.handler) + self.old_level = adapter.getEffectiveLevel() + adapter.setLevel(logging.INFO) + return adapter.info + def __exit__(self, *exc_info): + adapter.setLevel(self.old_level) + if self.redirect: + adapter.removeHandler(self.handler) + adapter.addHandler(stderr_handler) + if not self.file_is_stream: + self.handler.close() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/objtypes.py b/llmeval-env/lib/python3.10/site-packages/dill/objtypes.py new file mode 100644 index 0000000000000000000000000000000000000000..526b5835ea213fe08cf81a1d389368bd95a86cc8 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/objtypes.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +all Python Standard Library object types (currently: CH 1-15 @ 2.7) +and some other common object types (i.e. numpy.ndarray) + +to load more objects and types, use dill.load_types() +""" + +# non-local import of dill.objects +from dill import objects +for _type in objects.keys(): + exec("%s = type(objects['%s'])" % (_type,_type)) + +del objects +try: + del _type +except NameError: + pass diff --git a/llmeval-env/lib/python3.10/site-packages/dill/pointers.py b/llmeval-env/lib/python3.10/site-packages/dill/pointers.py new file mode 100644 index 0000000000000000000000000000000000000000..c3b48cae00b87830dc15dcd8fc047ca74aae5b66 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/pointers.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +__all__ = ['parent', 'reference', 'at', 'parents', 'children'] + +import gc +import sys + +from ._dill import _proxy_helper as reference +from ._dill import _locate_object as at + +def parent(obj, objtype, ignore=()): + """ +>>> listiter = iter([4,5,6,7]) +>>> obj = parent(listiter, list) +>>> obj == [4,5,6,7] # actually 'is', but don't have handle any longer +True + +NOTE: objtype can be a single type (e.g. int or list) or a tuple of types. + +WARNING: if obj is a sequence (e.g. list), may produce unexpected results. +Parent finds *one* parent (e.g. the last member of the sequence). + """ + depth = 1 #XXX: always looking for the parent (only, right?) + chain = parents(obj, objtype, depth, ignore) + parent = chain.pop() + if parent is obj: + return None + return parent + + +def parents(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ? + """Find the chain of referents for obj. Chain will end with obj. + + objtype: an object type or tuple of types to search for + depth: search depth (e.g. depth=2 is 'grandparents') + ignore: an object or tuple of objects to ignore in the search + """ + edge_func = gc.get_referents # looking for refs, not back_refs + predicate = lambda x: isinstance(x, objtype) # looking for parent type + #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ? + ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore + ignore = (id(obj) for obj in ignore) + chain = find_chain(obj, predicate, edge_func, depth)[::-1] + #XXX: should pop off obj... ? + return chain + + +def children(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ? + """Find the chain of referrers for obj. Chain will start with obj. + + objtype: an object type or tuple of types to search for + depth: search depth (e.g. depth=2 is 'grandchildren') + ignore: an object or tuple of objects to ignore in the search + + NOTE: a common thing to ignore is all globals, 'ignore=(globals(),)' + + NOTE: repeated calls may yield different results, as python stores + the last value in the special variable '_'; thus, it is often good + to execute something to replace '_' (e.g. >>> 1+1). + """ + edge_func = gc.get_referrers # looking for back_refs, not refs + predicate = lambda x: isinstance(x, objtype) # looking for child type + #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ? + ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore + ignore = (id(obj) for obj in ignore) + chain = find_chain(obj, predicate, edge_func, depth, ignore) + #XXX: should pop off obj... ? + return chain + + +# more generic helper function (cut-n-paste from objgraph) +# Source at http://mg.pov.lt/objgraph/ +# Copyright (c) 2008-2010 Marius Gedminas +# Copyright (c) 2010 Stefano Rivera +# Released under the MIT licence (see objgraph/objgrah.py) + +def find_chain(obj, predicate, edge_func, max_depth=20, extra_ignore=()): + queue = [obj] + depth = {id(obj): 0} + parent = {id(obj): None} + ignore = set(extra_ignore) + ignore.add(id(extra_ignore)) + ignore.add(id(queue)) + ignore.add(id(depth)) + ignore.add(id(parent)) + ignore.add(id(ignore)) + ignore.add(id(sys._getframe())) # this function + ignore.add(id(sys._getframe(1))) # find_chain/find_backref_chain, likely + gc.collect() + while queue: + target = queue.pop(0) + if predicate(target): + chain = [target] + while parent[id(target)] is not None: + target = parent[id(target)] + chain.append(target) + return chain + tdepth = depth[id(target)] + if tdepth < max_depth: + referrers = edge_func(target) + ignore.add(id(referrers)) + for source in referrers: + if id(source) in ignore: + continue + if id(source) not in depth: + depth[id(source)] = tdepth + 1 + parent[id(source)] = target + queue.append(source) + return [obj] # not found + + +# backward compatibility +refobject = at + + +# EOF diff --git a/llmeval-env/lib/python3.10/site-packages/dill/session.py b/llmeval-env/lib/python3.10/site-packages/dill/session.py new file mode 100644 index 0000000000000000000000000000000000000000..e91068afc5b77dffe9646f0167ab2af5e97523fb --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/session.py @@ -0,0 +1,613 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Author: Leonardo Gama (@leogama) +# Copyright (c) 2008-2015 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +Pickle and restore the intepreter session. +""" + +__all__ = [ + 'dump_module', 'load_module', 'load_module_asdict', + 'dump_session', 'load_session' # backward compatibility +] + +import re +import os +import sys +import warnings + +from dill import _dill, Pickler, Unpickler +from ._dill import ( + BuiltinMethodType, FunctionType, MethodType, ModuleType, TypeType, + _import_module, _is_builtin_module, _is_imported_module, _main_module, + _reverse_typemap, __builtin__, +) + +# Type hints. +from typing import Optional, Union + +import pathlib +import tempfile + +TEMPDIR = pathlib.PurePath(tempfile.gettempdir()) + +def _module_map(): + """get map of imported modules""" + from collections import defaultdict + from types import SimpleNamespace + modmap = SimpleNamespace( + by_name=defaultdict(list), + by_id=defaultdict(list), + top_level={}, + ) + for modname, module in sys.modules.items(): + if modname in ('__main__', '__mp_main__') or not isinstance(module, ModuleType): + continue + if '.' not in modname: + modmap.top_level[id(module)] = modname + for objname, modobj in module.__dict__.items(): + modmap.by_name[objname].append((modobj, modname)) + modmap.by_id[id(modobj)].append((modobj, objname, modname)) + return modmap + +IMPORTED_AS_TYPES = (ModuleType, TypeType, FunctionType, MethodType, BuiltinMethodType) +if 'PyCapsuleType' in _reverse_typemap: + IMPORTED_AS_TYPES += (_reverse_typemap['PyCapsuleType'],) +IMPORTED_AS_MODULES = ('ctypes', 'typing', 'subprocess', 'threading', + r'concurrent\.futures(\.\w+)?', r'multiprocessing(\.\w+)?') +IMPORTED_AS_MODULES = tuple(re.compile(x) for x in IMPORTED_AS_MODULES) + +def _lookup_module(modmap, name, obj, main_module): + """lookup name or id of obj if module is imported""" + for modobj, modname in modmap.by_name[name]: + if modobj is obj and sys.modules[modname] is not main_module: + return modname, name + __module__ = getattr(obj, '__module__', None) + if isinstance(obj, IMPORTED_AS_TYPES) or (__module__ is not None + and any(regex.fullmatch(__module__) for regex in IMPORTED_AS_MODULES)): + for modobj, objname, modname in modmap.by_id[id(obj)]: + if sys.modules[modname] is not main_module: + return modname, objname + return None, None + +def _stash_modules(main_module): + modmap = _module_map() + newmod = ModuleType(main_module.__name__) + + imported = [] + imported_as = [] + imported_top_level = [] # keep separated for backward compatibility + original = {} + for name, obj in main_module.__dict__.items(): + if obj is main_module: + original[name] = newmod # self-reference + elif obj is main_module.__dict__: + original[name] = newmod.__dict__ + # Avoid incorrectly matching a singleton value in another package (ex.: __doc__). + elif any(obj is singleton for singleton in (None, False, True)) \ + or isinstance(obj, ModuleType) and _is_builtin_module(obj): # always saved by ref + original[name] = obj + else: + source_module, objname = _lookup_module(modmap, name, obj, main_module) + if source_module is not None: + if objname == name: + imported.append((source_module, name)) + else: + imported_as.append((source_module, objname, name)) + else: + try: + imported_top_level.append((modmap.top_level[id(obj)], name)) + except KeyError: + original[name] = obj + + if len(original) < len(main_module.__dict__): + newmod.__dict__.update(original) + newmod.__dill_imported = imported + newmod.__dill_imported_as = imported_as + newmod.__dill_imported_top_level = imported_top_level + if getattr(newmod, '__loader__', None) is None and _is_imported_module(main_module): + # Trick _is_imported_module() to force saving as an imported module. + newmod.__loader__ = True # will be discarded by save_module() + return newmod + else: + return main_module + +def _restore_modules(unpickler, main_module): + try: + for modname, name in main_module.__dict__.pop('__dill_imported'): + main_module.__dict__[name] = unpickler.find_class(modname, name) + for modname, objname, name in main_module.__dict__.pop('__dill_imported_as'): + main_module.__dict__[name] = unpickler.find_class(modname, objname) + for modname, name in main_module.__dict__.pop('__dill_imported_top_level'): + main_module.__dict__[name] = __import__(modname) + except KeyError: + pass + +#NOTE: 06/03/15 renamed main_module to main +def dump_module( + filename: Union[str, os.PathLike] = None, + module: Optional[Union[ModuleType, str]] = None, + refimported: bool = False, + **kwds +) -> None: + """Pickle the current state of :py:mod:`__main__` or another module to a file. + + Save the contents of :py:mod:`__main__` (e.g. from an interactive + interpreter session), an imported module, or a module-type object (e.g. + built with :py:class:`~types.ModuleType`), to a file. The pickled + module can then be restored with the function :py:func:`load_module`. + + Args: + filename: a path-like object or a writable stream. If `None` + (the default), write to a named file in a temporary directory. + module: a module object or the name of an importable module. If `None` + (the default), :py:mod:`__main__` is saved. + refimported: if `True`, all objects identified as having been imported + into the module's namespace are saved by reference. *Note:* this is + similar but independent from ``dill.settings[`byref`]``, as + ``refimported`` refers to virtually all imported objects, while + ``byref`` only affects select objects. + **kwds: extra keyword arguments passed to :py:class:`Pickler()`. + + Raises: + :py:exc:`PicklingError`: if pickling fails. + + Examples: + + - Save current interpreter session state: + + >>> import dill + >>> squared = lambda x: x*x + >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl + + - Save the state of an imported/importable module: + + >>> import dill + >>> import pox + >>> pox.plus_one = lambda x: x+1 + >>> dill.dump_module('pox_session.pkl', module=pox) + + - Save the state of a non-importable, module-type object: + + >>> import dill + >>> from types import ModuleType + >>> foo = ModuleType('foo') + >>> foo.values = [1,2,3] + >>> import math + >>> foo.sin = math.sin + >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True) + + - Restore the state of the saved modules: + + >>> import dill + >>> dill.load_module() + >>> squared(2) + 4 + >>> pox = dill.load_module('pox_session.pkl') + >>> pox.plus_one(1) + 2 + >>> foo = dill.load_module('foo_session.pkl') + >>> [foo.sin(x) for x in foo.values] + [0.8414709848078965, 0.9092974268256817, 0.1411200080598672] + + - Use `refimported` to save imported objects by reference: + + >>> import dill + >>> from html.entities import html5 + >>> type(html5), len(html5) + (dict, 2231) + >>> import io + >>> buf = io.BytesIO() + >>> dill.dump_module(buf) # saves __main__, with html5 saved by value + >>> len(buf.getvalue()) # pickle size in bytes + 71665 + >>> buf = io.BytesIO() + >>> dill.dump_module(buf, refimported=True) # html5 saved by reference + >>> len(buf.getvalue()) + 438 + + *Changed in version 0.3.6:* Function ``dump_session()`` was renamed to + ``dump_module()``. Parameters ``main`` and ``byref`` were renamed to + ``module`` and ``refimported``, respectively. + + Note: + Currently, ``dill.settings['byref']`` and ``dill.settings['recurse']`` + don't apply to this function. + """ + for old_par, par in [('main', 'module'), ('byref', 'refimported')]: + if old_par in kwds: + message = "The argument %r has been renamed %r" % (old_par, par) + if old_par == 'byref': + message += " to distinguish it from dill.settings['byref']" + warnings.warn(message + ".", PendingDeprecationWarning) + if locals()[par]: # the defaults are None and False + raise TypeError("both %r and %r arguments were used" % (par, old_par)) + refimported = kwds.pop('byref', refimported) + module = kwds.pop('main', module) + + from .settings import settings + protocol = settings['protocol'] + main = module + if main is None: + main = _main_module + elif isinstance(main, str): + main = _import_module(main) + if not isinstance(main, ModuleType): + raise TypeError("%r is not a module" % main) + if hasattr(filename, 'write'): + file = filename + else: + if filename is None: + filename = str(TEMPDIR/'session.pkl') + file = open(filename, 'wb') + try: + pickler = Pickler(file, protocol, **kwds) + pickler._original_main = main + if refimported: + main = _stash_modules(main) + pickler._main = main #FIXME: dill.settings are disabled + pickler._byref = False # disable pickling by name reference + pickler._recurse = False # disable pickling recursion for globals + pickler._session = True # is best indicator of when pickling a session + pickler._first_pass = True + pickler._main_modified = main is not pickler._original_main + pickler.dump(main) + finally: + if file is not filename: # if newly opened file + file.close() + return + +# Backward compatibility. +def dump_session(filename=None, main=None, byref=False, **kwds): + warnings.warn("dump_session() has been renamed dump_module()", PendingDeprecationWarning) + dump_module(filename, module=main, refimported=byref, **kwds) +dump_session.__doc__ = dump_module.__doc__ + +class _PeekableReader: + """lightweight stream wrapper that implements peek()""" + def __init__(self, stream): + self.stream = stream + def read(self, n): + return self.stream.read(n) + def readline(self): + return self.stream.readline() + def tell(self): + return self.stream.tell() + def close(self): + return self.stream.close() + def peek(self, n): + stream = self.stream + try: + if hasattr(stream, 'flush'): stream.flush() + position = stream.tell() + stream.seek(position) # assert seek() works before reading + chunk = stream.read(n) + stream.seek(position) + return chunk + except (AttributeError, OSError): + raise NotImplementedError("stream is not peekable: %r", stream) from None + +def _make_peekable(stream): + """return stream as an object with a peek() method""" + import io + if hasattr(stream, 'peek'): + return stream + if not (hasattr(stream, 'tell') and hasattr(stream, 'seek')): + try: + return io.BufferedReader(stream) + except Exception: + pass + return _PeekableReader(stream) + +def _identify_module(file, main=None): + """identify the name of the module stored in the given file-type object""" + from pickletools import genops + UNICODE = {'UNICODE', 'BINUNICODE', 'SHORT_BINUNICODE'} + found_import = False + try: + for opcode, arg, pos in genops(file.peek(256)): + if not found_import: + if opcode.name in ('GLOBAL', 'SHORT_BINUNICODE') and \ + arg.endswith('_import_module'): + found_import = True + else: + if opcode.name in UNICODE: + return arg + else: + raise UnpicklingError("reached STOP without finding main module") + except (NotImplementedError, ValueError) as error: + # ValueError occours when the end of the chunk is reached (without a STOP). + if isinstance(error, NotImplementedError) and main is not None: + # file is not peekable, but we have main. + return None + raise UnpicklingError("unable to identify main module") from error + +def load_module( + filename: Union[str, os.PathLike] = None, + module: Optional[Union[ModuleType, str]] = None, + **kwds +) -> Optional[ModuleType]: + """Update the selected module (default is :py:mod:`__main__`) with + the state saved at ``filename``. + + Restore a module to the state saved with :py:func:`dump_module`. The + saved module can be :py:mod:`__main__` (e.g. an interpreter session), + an imported module, or a module-type object (e.g. created with + :py:class:`~types.ModuleType`). + + When restoring the state of a non-importable module-type object, the + current instance of this module may be passed as the argument ``main``. + Otherwise, a new instance is created with :py:class:`~types.ModuleType` + and returned. + + Args: + filename: a path-like object or a readable stream. If `None` + (the default), read from a named file in a temporary directory. + module: a module object or the name of an importable module; + the module name and kind (i.e. imported or non-imported) must + match the name and kind of the module stored at ``filename``. + **kwds: extra keyword arguments passed to :py:class:`Unpickler()`. + + Raises: + :py:exc:`UnpicklingError`: if unpickling fails. + :py:exc:`ValueError`: if the argument ``main`` and module saved + at ``filename`` are incompatible. + + Returns: + A module object, if the saved module is not :py:mod:`__main__` or + a module instance wasn't provided with the argument ``main``. + + Examples: + + - Save the state of some modules: + + >>> import dill + >>> squared = lambda x: x*x + >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl + >>> + >>> import pox # an imported module + >>> pox.plus_one = lambda x: x+1 + >>> dill.dump_module('pox_session.pkl', module=pox) + >>> + >>> from types import ModuleType + >>> foo = ModuleType('foo') # a module-type object + >>> foo.values = [1,2,3] + >>> import math + >>> foo.sin = math.sin + >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True) + + - Restore the state of the interpreter: + + >>> import dill + >>> dill.load_module() # updates __main__ from /tmp/session.pkl + >>> squared(2) + 4 + + - Load the saved state of an importable module: + + >>> import dill + >>> pox = dill.load_module('pox_session.pkl') + >>> pox.plus_one(1) + 2 + >>> import sys + >>> pox in sys.modules.values() + True + + - Load the saved state of a non-importable module-type object: + + >>> import dill + >>> foo = dill.load_module('foo_session.pkl') + >>> [foo.sin(x) for x in foo.values] + [0.8414709848078965, 0.9092974268256817, 0.1411200080598672] + >>> import math + >>> foo.sin is math.sin # foo.sin was saved by reference + True + >>> import sys + >>> foo in sys.modules.values() + False + + - Update the state of a non-importable module-type object: + + >>> import dill + >>> from types import ModuleType + >>> foo = ModuleType('foo') + >>> foo.values = ['a','b'] + >>> foo.sin = lambda x: x*x + >>> dill.load_module('foo_session.pkl', module=foo) + >>> [foo.sin(x) for x in foo.values] + [0.8414709848078965, 0.9092974268256817, 0.1411200080598672] + + *Changed in version 0.3.6:* Function ``load_session()`` was renamed to + ``load_module()``. Parameter ``main`` was renamed to ``module``. + + See also: + :py:func:`load_module_asdict` to load the contents of module saved + with :py:func:`dump_module` into a dictionary. + """ + if 'main' in kwds: + warnings.warn( + "The argument 'main' has been renamed 'module'.", + PendingDeprecationWarning + ) + if module is not None: + raise TypeError("both 'module' and 'main' arguments were used") + module = kwds.pop('main') + main = module + if hasattr(filename, 'read'): + file = filename + else: + if filename is None: + filename = str(TEMPDIR/'session.pkl') + file = open(filename, 'rb') + try: + file = _make_peekable(file) + #FIXME: dill.settings are disabled + unpickler = Unpickler(file, **kwds) + unpickler._session = True + + # Resolve unpickler._main + pickle_main = _identify_module(file, main) + if main is None and pickle_main is not None: + main = pickle_main + if isinstance(main, str): + if main.startswith('__runtime__.'): + # Create runtime module to load the session into. + main = ModuleType(main.partition('.')[-1]) + else: + main = _import_module(main) + if main is not None: + if not isinstance(main, ModuleType): + raise TypeError("%r is not a module" % main) + unpickler._main = main + else: + main = unpickler._main + + # Check against the pickle's main. + is_main_imported = _is_imported_module(main) + if pickle_main is not None: + is_runtime_mod = pickle_main.startswith('__runtime__.') + if is_runtime_mod: + pickle_main = pickle_main.partition('.')[-1] + error_msg = "can't update{} module{} %r with the saved state of{} module{} %r" + if is_runtime_mod and is_main_imported: + raise ValueError( + error_msg.format(" imported", "", "", "-type object") + % (main.__name__, pickle_main) + ) + if not is_runtime_mod and not is_main_imported: + raise ValueError( + error_msg.format("", "-type object", " imported", "") + % (pickle_main, main.__name__) + ) + if main.__name__ != pickle_main: + raise ValueError(error_msg.format("", "", "", "") % (main.__name__, pickle_main)) + + # This is for find_class() to be able to locate it. + if not is_main_imported: + runtime_main = '__runtime__.%s' % main.__name__ + sys.modules[runtime_main] = main + + loaded = unpickler.load() + finally: + if not hasattr(filename, 'read'): # if newly opened file + file.close() + try: + del sys.modules[runtime_main] + except (KeyError, NameError): + pass + assert loaded is main + _restore_modules(unpickler, main) + if main is _main_module or main is module: + return None + else: + return main + +# Backward compatibility. +def load_session(filename=None, main=None, **kwds): + warnings.warn("load_session() has been renamed load_module().", PendingDeprecationWarning) + load_module(filename, module=main, **kwds) +load_session.__doc__ = load_module.__doc__ + +def load_module_asdict( + filename: Union[str, os.PathLike] = None, + update: bool = False, + **kwds +) -> dict: + """ + Load the contents of a saved module into a dictionary. + + ``load_module_asdict()`` is the near-equivalent of:: + + lambda filename: vars(dill.load_module(filename)).copy() + + however, does not alter the original module. Also, the path of + the loaded module is stored in the ``__session__`` attribute. + + Args: + filename: a path-like object or a readable stream. If `None` + (the default), read from a named file in a temporary directory. + update: if `True`, initialize the dictionary with the current state + of the module prior to loading the state stored at filename. + **kwds: extra keyword arguments passed to :py:class:`Unpickler()` + + Raises: + :py:exc:`UnpicklingError`: if unpickling fails + + Returns: + A copy of the restored module's dictionary. + + Note: + If ``update`` is True, the corresponding module may first be imported + into the current namespace before the saved state is loaded from + filename to the dictionary. Note that any module that is imported into + the current namespace as a side-effect of using ``update`` will not be + modified by loading the saved module in filename to a dictionary. + + Example: + >>> import dill + >>> alist = [1, 2, 3] + >>> anum = 42 + >>> dill.dump_module() + >>> anum = 0 + >>> new_var = 'spam' + >>> main = dill.load_module_asdict() + >>> main['__name__'], main['__session__'] + ('__main__', '/tmp/session.pkl') + >>> main is globals() # loaded objects don't reference globals + False + >>> main['alist'] == alist + True + >>> main['alist'] is alist # was saved by value + False + >>> main['anum'] == anum # changed after the session was saved + False + >>> new_var in main # would be True if the option 'update' was set + False + """ + if 'module' in kwds: + raise TypeError("'module' is an invalid keyword argument for load_module_asdict()") + if hasattr(filename, 'read'): + file = filename + else: + if filename is None: + filename = str(TEMPDIR/'session.pkl') + file = open(filename, 'rb') + try: + file = _make_peekable(file) + main_name = _identify_module(file) + old_main = sys.modules.get(main_name) + main = ModuleType(main_name) + if update: + if old_main is None: + old_main = _import_module(main_name) + main.__dict__.update(old_main.__dict__) + else: + main.__builtins__ = __builtin__ + sys.modules[main_name] = main + load_module(file, **kwds) + finally: + if not hasattr(filename, 'read'): # if newly opened file + file.close() + try: + if old_main is None: + del sys.modules[main_name] + else: + sys.modules[main_name] = old_main + except NameError: # failed before setting old_main + pass + main.__session__ = str(filename) + return main.__dict__ + + +# Internal exports for backward compatibility with dill v0.3.5.1 +# Can't be placed in dill._dill because of circular import problems. +for name in ( + '_lookup_module', '_module_map', '_restore_modules', '_stash_modules', + 'dump_session', 'load_session' # backward compatibility functions +): + setattr(_dill, name, globals()[name]) +del name diff --git a/llmeval-env/lib/python3.10/site-packages/dill/settings.py b/llmeval-env/lib/python3.10/site-packages/dill/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..19c18fc5f5159c99ef80759bde406bf15dfecc63 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/settings.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +global settings for Pickler +""" + +from pickle import DEFAULT_PROTOCOL + +settings = { + #'main' : None, + 'protocol' : DEFAULT_PROTOCOL, + 'byref' : False, + #'strictio' : False, + 'fmode' : 0, #HANDLE_FMODE + 'recurse' : False, + 'ignore' : False, +} + +del DEFAULT_PROTOCOL + diff --git a/llmeval-env/lib/python3.10/site-packages/dill/source.py b/llmeval-env/lib/python3.10/site-packages/dill/source.py new file mode 100644 index 0000000000000000000000000000000000000000..1d7bd0d3733c5a76847c4066fe56713145c8a438 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/source.py @@ -0,0 +1,1017 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +# +# inspired by inspect.py from Python-2.7.6 +# inspect.py author: 'Ka-Ping Yee ' +# inspect.py merged into original dill.source by Mike McKerns 4/13/14 +""" +Extensions to python's 'inspect' module, which can be used +to retrieve information from live python objects. The methods +defined in this module are augmented to facilitate access to +source code of interactively defined functions and classes, +as well as provide access to source code for objects defined +in a file. +""" + +__all__ = ['findsource', 'getsourcelines', 'getsource', 'indent', 'outdent', \ + '_wrap', 'dumpsource', 'getname', '_namespace', 'getimport', \ + '_importable', 'importable','isdynamic', 'isfrommain'] + +import linecache +import re +from inspect import (getblock, getfile, getmodule, getsourcefile, indentsize, + isbuiltin, isclass, iscode, isframe, isfunction, ismethod, + ismodule, istraceback) +from tokenize import TokenError + +from ._dill import IS_IPYTHON + + +def isfrommain(obj): + "check if object was built in __main__" + module = getmodule(obj) + if module and module.__name__ == '__main__': + return True + return False + + +def isdynamic(obj): + "check if object was built in the interpreter" + try: file = getfile(obj) + except TypeError: file = None + if file == '' and isfrommain(obj): + return True + return False + + +def _matchlambda(func, line): + """check if lambda object 'func' matches raw line of code 'line'""" + from .detect import code as getcode + from .detect import freevars, globalvars, varnames + dummy = lambda : '__this_is_a_big_dummy_function__' + # process the line (removing leading whitespace, etc) + lhs,rhs = line.split('lambda ',1)[-1].split(":", 1) #FIXME: if !1 inputs + try: #FIXME: unsafe + _ = eval("lambda %s : %s" % (lhs,rhs), globals(),locals()) + except Exception: _ = dummy + # get code objects, for comparison + _, code = getcode(_).co_code, getcode(func).co_code + # check if func is in closure + _f = [line.count(i) for i in freevars(func).keys()] + if not _f: # not in closure + # check if code matches + if _ == code: return True + return False + # weak check on freevars + if not all(_f): return False #XXX: VERY WEAK + # weak check on varnames and globalvars + _f = varnames(func) + _f = [line.count(i) for i in _f[0]+_f[1]] + if _f and not all(_f): return False #XXX: VERY WEAK + _f = [line.count(i) for i in globalvars(func).keys()] + if _f and not all(_f): return False #XXX: VERY WEAK + # check if func is a double lambda + if (line.count('lambda ') > 1) and (lhs in freevars(func).keys()): + _lhs,_rhs = rhs.split('lambda ',1)[-1].split(":",1) #FIXME: if !1 inputs + try: #FIXME: unsafe + _f = eval("lambda %s : %s" % (_lhs,_rhs), globals(),locals()) + except Exception: _f = dummy + # get code objects, for comparison + _, code = getcode(_f).co_code, getcode(func).co_code + if len(_) != len(code): return False + #NOTE: should be same code same order, but except for 't' and '\x88' + _ = set((i,j) for (i,j) in zip(_,code) if i != j) + if len(_) != 1: return False #('t','\x88') + return True + # check indentsize + if not indentsize(line): return False #FIXME: is this a good check??? + # check if code 'pattern' matches + #XXX: or pattern match against dis.dis(code)? (or use uncompyle2?) + _ = _.split(_[0]) # 't' #XXX: remove matching values if starts the same? + _f = code.split(code[0]) # '\x88' + #NOTE: should be same code different order, with different first element + _ = dict(re.match(r'([\W\D\S])(.*)', _[i]).groups() for i in range(1,len(_))) + _f = dict(re.match(r'([\W\D\S])(.*)', _f[i]).groups() for i in range(1,len(_f))) + if (_.keys() == _f.keys()) and (sorted(_.values()) == sorted(_f.values())): + return True + return False + + +def findsource(object): + """Return the entire source file and starting line number for an object. + For interactively-defined objects, the 'file' is the interpreter's history. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of all the lines + in the file and the line number indexes a line in that list. An IOError + is raised if the source code cannot be retrieved, while a TypeError is + raised for objects where the source code is unavailable (e.g. builtins).""" + + module = getmodule(object) + try: file = getfile(module) + except TypeError: file = None + is_module_main = (module and module.__name__ == '__main__' and not file) + if IS_IPYTHON and is_module_main: + #FIXME: quick fix for functions and classes in IPython interpreter + try: + file = getfile(object) + sourcefile = getsourcefile(object) + except TypeError: + if isclass(object): + for object_method in filter(isfunction, object.__dict__.values()): + # look for a method of the class + file_candidate = getfile(object_method) + if not file_candidate.startswith('': pat1 = r'(.*(?': + pat1 = r'(.*(?' + if stdin: + lnum = len(lines) - 1 # can't get lnum easily, so leverage pat + if not pat1: pat1 = r'^(\s*def\s)|(.*(? 0: #XXX: won't find decorators in ? + line = lines[lnum] + if pat1.match(line): + if not stdin: break # co_firstlineno does the job + if name == '': # hackery needed to confirm a match + if _matchlambda(obj, line): break + else: # not a lambda, just look for the name + if name in line: # need to check for decorator... + hats = 0 + for _lnum in range(lnum-1,-1,-1): + if pat2.match(lines[_lnum]): hats += 1 + else: break + lnum = lnum - hats + break + lnum = lnum - 1 + return lines, lnum + + try: # turn instances into classes + if not isclass(object) and isclass(type(object)): # __class__ + object = object.__class__ #XXX: sometimes type(class) is better? + #XXX: we don't find how the instance was built + except AttributeError: pass + if isclass(object): + name = object.__name__ + pat = re.compile(r'^(\s*)class\s*' + name + r'\b') + # make some effort to find the best matching class definition: + # use the one with the least indentation, which is the one + # that's most probably not inside a function definition. + candidates = [] + for i in range(len(lines)-1,-1,-1): + match = pat.match(lines[i]) + if match: + # if it's at toplevel, it's already the best one + if lines[i][0] == 'c': + return lines, i + # else add whitespace to candidate list + candidates.append((match.group(1), i)) + if candidates: + # this will sort by whitespace, and by line number, + # less whitespace first #XXX: should sort high lnum before low + candidates.sort() + return lines, candidates[0][1] + else: + raise IOError('could not find class definition') + raise IOError('could not find code object') + + +def getblocks(object, lstrip=False, enclosing=False, locate=False): + """Return a list of source lines and starting line number for an object. + Interactively-defined objects refer to lines in the interpreter's history. + + If enclosing=True, then also return any enclosing code. + If lstrip=True, ensure there is no indentation in the first line of code. + If locate=True, then also return the line number for the block of code. + + DEPRECATED: use 'getsourcelines' instead + """ + lines, lnum = findsource(object) + + if ismodule(object): + if lstrip: lines = _outdent(lines) + return ([lines], [0]) if locate is True else [lines] + + #XXX: 'enclosing' means: closures only? or classes and files? + indent = indentsize(lines[lnum]) + block = getblock(lines[lnum:]) #XXX: catch any TokenError here? + + if not enclosing or not indent: + if lstrip: block = _outdent(block) + return ([block], [lnum]) if locate is True else [block] + + pat1 = r'^(\s*def\s)|(.*(? indent: #XXX: should be >= ? + line += len(code) - skip + elif target in ''.join(code): + blocks.append(code) # save code block as the potential winner + _lnum.append(line - skip) # save the line number for the match + line += len(code) - skip + else: + line += 1 + skip = 0 + # find skip: the number of consecutive decorators + elif pat2.match(lines[line]): + try: code = getblock(lines[line:]) + except TokenError: code = [lines[line]] + skip = 1 + for _line in code[1:]: # skip lines that are decorators + if not pat2.match(_line): break + skip += 1 + line += skip + # no match: reset skip and go to the next line + else: + line +=1 + skip = 0 + + if not blocks: + blocks = [block] + _lnum = [lnum] + if lstrip: blocks = [_outdent(block) for block in blocks] + # return last match + return (blocks, _lnum) if locate is True else blocks + + +def getsourcelines(object, lstrip=False, enclosing=False): + """Return a list of source lines and starting line number for an object. + Interactively-defined objects refer to lines in the interpreter's history. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of the lines + corresponding to the object and the line number indicates where in the + original source file the first line of code was found. An IOError is + raised if the source code cannot be retrieved, while a TypeError is + raised for objects where the source code is unavailable (e.g. builtins). + + If lstrip=True, ensure there is no indentation in the first line of code. + If enclosing=True, then also return any enclosing code.""" + code, n = getblocks(object, lstrip=lstrip, enclosing=enclosing, locate=True) + return code[-1], n[-1] + + +#NOTE: broke backward compatibility 4/16/14 (was lstrip=True, force=True) +def getsource(object, alias='', lstrip=False, enclosing=False, \ + force=False, builtin=False): + """Return the text of the source code for an object. The source code for + interactively-defined objects are extracted from the interpreter's history. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a single string. An + IOError is raised if the source code cannot be retrieved, while a + TypeError is raised for objects where the source code is unavailable + (e.g. builtins). + + If alias is provided, then add a line of code that renames the object. + If lstrip=True, ensure there is no indentation in the first line of code. + If enclosing=True, then also return any enclosing code. + If force=True, catch (TypeError,IOError) and try to use import hooks. + If builtin=True, force an import for any builtins + """ + # hascode denotes a callable + hascode = _hascode(object) + # is a class instance type (and not in builtins) + instance = _isinstance(object) + + # get source lines; if fail, try to 'force' an import + try: # fails for builtins, and other assorted object types + lines, lnum = getsourcelines(object, enclosing=enclosing) + except (TypeError, IOError): # failed to get source, resort to import hooks + if not force: # don't try to get types that findsource can't get + raise + if not getmodule(object): # get things like 'None' and '1' + if not instance: return getimport(object, alias, builtin=builtin) + # special handling (numpy arrays, ...) + _import = getimport(object, builtin=builtin) + name = getname(object, force=True) + _alias = "%s = " % alias if alias else "" + if alias == name: _alias = "" + return _import+_alias+"%s\n" % name + else: #FIXME: could use a good bit of cleanup, since using getimport... + if not instance: return getimport(object, alias, builtin=builtin) + # now we are dealing with an instance... + name = object.__class__.__name__ + module = object.__module__ + if module in ['builtins','__builtin__']: + return getimport(object, alias, builtin=builtin) + else: #FIXME: leverage getimport? use 'from module import name'? + lines, lnum = ["%s = __import__('%s', fromlist=['%s']).%s\n" % (name,module,name,name)], 0 + obj = eval(lines[0].lstrip(name + ' = ')) + lines, lnum = getsourcelines(obj, enclosing=enclosing) + + # strip leading indent (helps ensure can be imported) + if lstrip or alias: + lines = _outdent(lines) + + # instantiate, if there's a nice repr #XXX: BAD IDEA??? + if instance: #and force: #XXX: move into findsource or getsourcelines ? + if '(' in repr(object): lines.append('%r\n' % object) + #else: #XXX: better to somehow to leverage __reduce__ ? + # reconstructor,args = object.__reduce__() + # _ = reconstructor(*args) + else: # fall back to serialization #XXX: bad idea? + #XXX: better not duplicate work? #XXX: better new/enclose=True? + lines = dumpsource(object, alias='', new=force, enclose=False) + lines, lnum = [line+'\n' for line in lines.split('\n')][:-1], 0 + #else: object.__code__ # raise AttributeError + + # add an alias to the source code + if alias: + if hascode: + skip = 0 + for line in lines: # skip lines that are decorators + if not line.startswith('@'): break + skip += 1 + #XXX: use regex from findsource / getsourcelines ? + if lines[skip].lstrip().startswith('def '): # we have a function + if alias != object.__name__: + lines.append('\n%s = %s\n' % (alias, object.__name__)) + elif 'lambda ' in lines[skip]: # we have a lambda + if alias != lines[skip].split('=')[0].strip(): + lines[skip] = '%s = %s' % (alias, lines[skip]) + else: # ...try to use the object's name + if alias != object.__name__: + lines.append('\n%s = %s\n' % (alias, object.__name__)) + else: # class or class instance + if instance: + if alias != lines[-1].split('=')[0].strip(): + lines[-1] = ('%s = ' % alias) + lines[-1] + else: + name = getname(object, force=True) or object.__name__ + if alias != name: + lines.append('\n%s = %s\n' % (alias, name)) + return ''.join(lines) + + +def _hascode(object): + '''True if object has an attribute that stores it's __code__''' + return getattr(object,'__code__',None) or getattr(object,'func_code',None) + +def _isinstance(object): + '''True if object is a class instance type (and is not a builtin)''' + if _hascode(object) or isclass(object) or ismodule(object): + return False + if istraceback(object) or isframe(object) or iscode(object): + return False + # special handling (numpy arrays, ...) + if not getmodule(object) and getmodule(type(object)).__name__ in ['numpy']: + return True +# # check if is instance of a builtin +# if not getmodule(object) and getmodule(type(object)).__name__ in ['__builtin__','builtins']: +# return False + _types = ('") + if not repr(type(object)).startswith(_types): #FIXME: weak hack + return False + if not getmodule(object) or object.__module__ in ['builtins','__builtin__'] or getname(object, force=True) in ['array']: + return False + return True # by process of elimination... it's what we want + + +def _intypes(object): + '''check if object is in the 'types' module''' + import types + # allow user to pass in object or object.__name__ + if type(object) is not type(''): + object = getname(object, force=True) + if object == 'ellipsis': object = 'EllipsisType' + return True if hasattr(types, object) else False + + +def _isstring(object): #XXX: isstringlike better? + '''check if object is a string-like type''' + return isinstance(object, (str, bytes)) + + +def indent(code, spaces=4): + '''indent a block of code with whitespace (default is 4 spaces)''' + indent = indentsize(code) + if type(spaces) is int: spaces = ' '*spaces + # if '\t' is provided, will indent with a tab + nspaces = indentsize(spaces) + # blank lines (etc) need to be ignored + lines = code.split('\n') +## stq = "'''"; dtq = '"""' +## in_stq = in_dtq = False + for i in range(len(lines)): + #FIXME: works... but shouldn't indent 2nd+ lines of multiline doc + _indent = indentsize(lines[i]) + if indent > _indent: continue + lines[i] = spaces+lines[i] +## #FIXME: may fail when stq and dtq in same line (depends on ordering) +## nstq, ndtq = lines[i].count(stq), lines[i].count(dtq) +## if not in_dtq and not in_stq: +## lines[i] = spaces+lines[i] # we indent +## # entering a comment block +## if nstq%2: in_stq = not in_stq +## if ndtq%2: in_dtq = not in_dtq +## # leaving a comment block +## elif in_dtq and ndtq%2: in_dtq = not in_dtq +## elif in_stq and nstq%2: in_stq = not in_stq +## else: pass + if lines[-1].strip() == '': lines[-1] = '' + return '\n'.join(lines) + + +def _outdent(lines, spaces=None, all=True): + '''outdent lines of code, accounting for docs and line continuations''' + indent = indentsize(lines[0]) + if spaces is None or spaces > indent or spaces < 0: spaces = indent + for i in range(len(lines) if all else 1): + #FIXME: works... but shouldn't outdent 2nd+ lines of multiline doc + _indent = indentsize(lines[i]) + if spaces > _indent: _spaces = _indent + else: _spaces = spaces + lines[i] = lines[i][_spaces:] + return lines + +def outdent(code, spaces=None, all=True): + '''outdent a block of code (default is to strip all leading whitespace)''' + indent = indentsize(code) + if spaces is None or spaces > indent or spaces < 0: spaces = indent + #XXX: will this delete '\n' in some cases? + if not all: return code[spaces:] + return '\n'.join(_outdent(code.split('\n'), spaces=spaces, all=all)) + + +#XXX: not sure what the point of _wrap is... +__globals__ = globals() +__locals__ = locals() +def _wrap(f): + """ encapsulate a function and it's __import__ """ + def func(*args, **kwds): + try: + # _ = eval(getsource(f, force=True)) #XXX: safer but less robust + exec(getimportable(f, alias='_'), __globals__, __locals__) + except Exception: + raise ImportError('cannot import name ' + f.__name__) + return _(*args, **kwds) + func.__name__ = f.__name__ + func.__doc__ = f.__doc__ + return func + + +def _enclose(object, alias=''): #FIXME: needs alias to hold returned object + """create a function enclosure around the source of some object""" + #XXX: dummy and stub should append a random string + dummy = '__this_is_a_big_dummy_enclosing_function__' + stub = '__this_is_a_stub_variable__' + code = 'def %s():\n' % dummy + code += indent(getsource(object, alias=stub, lstrip=True, force=True)) + code += indent('return %s\n' % stub) + if alias: code += '%s = ' % alias + code += '%s(); del %s\n' % (dummy, dummy) + #code += "globals().pop('%s',lambda :None)()\n" % dummy + return code + + +def dumpsource(object, alias='', new=False, enclose=True): + """'dump to source', where the code includes a pickled object. + + If new=True and object is a class instance, then create a new + instance using the unpacked class source code. If enclose, then + create the object inside a function enclosure (thus minimizing + any global namespace pollution). + """ + from dill import dumps + pik = repr(dumps(object)) + code = 'import dill\n' + if enclose: + stub = '__this_is_a_stub_variable__' #XXX: *must* be same _enclose.stub + pre = '%s = ' % stub + new = False #FIXME: new=True doesn't work with enclose=True + else: + stub = alias + pre = '%s = ' % stub if alias else alias + + # if a 'new' instance is not needed, then just dump and load + if not new or not _isinstance(object): + code += pre + 'dill.loads(%s)\n' % pik + else: #XXX: other cases where source code is needed??? + code += getsource(object.__class__, alias='', lstrip=True, force=True) + mod = repr(object.__module__) # should have a module (no builtins here) + code += pre + 'dill.loads(%s.replace(b%s,bytes(__name__,"UTF-8")))\n' % (pik,mod) + #code += 'del %s' % object.__class__.__name__ #NOTE: kills any existing! + + if enclose: + # generation of the 'enclosure' + dummy = '__this_is_a_big_dummy_object__' + dummy = _enclose(dummy, alias=alias) + # hack to replace the 'dummy' with the 'real' code + dummy = dummy.split('\n') + code = dummy[0]+'\n' + indent(code) + '\n'.join(dummy[-3:]) + + return code #XXX: better 'dumpsourcelines', returning list of lines? + + +def getname(obj, force=False, fqn=False): #XXX: throw(?) to raise error on fail? + """get the name of the object. for lambdas, get the name of the pointer """ + if fqn: return '.'.join(_namespace(obj)) + module = getmodule(obj) + if not module: # things like "None" and "1" + if not force: return None + return repr(obj) + try: + #XXX: 'wrong' for decorators and curried functions ? + # if obj.func_closure: ...use logic from getimportable, etc ? + name = obj.__name__ + if name == '': + return getsource(obj).split('=',1)[0].strip() + # handle some special cases + if module.__name__ in ['builtins','__builtin__']: + if name == 'ellipsis': name = 'EllipsisType' + return name + except AttributeError: #XXX: better to just throw AttributeError ? + if not force: return None + name = repr(obj) + if name.startswith('<'): # or name.split('('): + return None + return name + + +def _namespace(obj): + """_namespace(obj); return namespace hierarchy (as a list of names) + for the given object. For an instance, find the class hierarchy. + + For example: + + >>> from functools import partial + >>> p = partial(int, base=2) + >>> _namespace(p) + [\'functools\', \'partial\'] + """ + # mostly for functions and modules and such + #FIXME: 'wrong' for decorators and curried functions + try: #XXX: needs some work and testing on different types + module = qual = str(getmodule(obj)).split()[1].strip('>').strip('"').strip("'") + qual = qual.split('.') + if ismodule(obj): + return qual + # get name of a lambda, function, etc + name = getname(obj) or obj.__name__ # failing, raise AttributeError + # check special cases (NoneType, ...) + if module in ['builtins','__builtin__']: # BuiltinFunctionType + if _intypes(name): return ['types'] + [name] + return qual + [name] #XXX: can be wrong for some aliased objects + except Exception: pass + # special case: numpy.inf and numpy.nan (we don't want them as floats) + if str(obj) in ['inf','nan','Inf','NaN']: # is more, but are they needed? + return ['numpy'] + [str(obj)] + # mostly for classes and class instances and such + module = getattr(obj.__class__, '__module__', None) + qual = str(obj.__class__) + try: qual = qual[qual.index("'")+1:-2] + except ValueError: pass # str(obj.__class__) made the 'try' unnecessary + qual = qual.split(".") + if module in ['builtins','__builtin__']: + # check special cases (NoneType, Ellipsis, ...) + if qual[-1] == 'ellipsis': qual[-1] = 'EllipsisType' + if _intypes(qual[-1]): module = 'types' #XXX: BuiltinFunctionType + qual = [module] + qual + return qual + + +#NOTE: 05/25/14 broke backward compatibility: added 'alias' as 3rd argument +def _getimport(head, tail, alias='', verify=True, builtin=False): + """helper to build a likely import string from head and tail of namespace. + ('head','tail') are used in the following context: "from head import tail" + + If verify=True, then test the import string before returning it. + If builtin=True, then force an import for builtins where possible. + If alias is provided, then rename the object on import. + """ + # special handling for a few common types + if tail in ['Ellipsis', 'NotImplemented'] and head in ['types']: + head = len.__module__ + elif tail in ['None'] and head in ['types']: + _alias = '%s = ' % alias if alias else '' + if alias == tail: _alias = '' + return _alias+'%s\n' % tail + # we don't need to import from builtins, so return '' +# elif tail in ['NoneType','int','float','long','complex']: return '' #XXX: ? + if head in ['builtins','__builtin__']: + # special cases (NoneType, Ellipsis, ...) #XXX: BuiltinFunctionType + if tail == 'ellipsis': tail = 'EllipsisType' + if _intypes(tail): head = 'types' + elif not builtin: + _alias = '%s = ' % alias if alias else '' + if alias == tail: _alias = '' + return _alias+'%s\n' % tail + else: pass # handle builtins below + # get likely import string + if not head: _str = "import %s" % tail + else: _str = "from %s import %s" % (head, tail) + _alias = " as %s\n" % alias if alias else "\n" + if alias == tail: _alias = "\n" + _str += _alias + # FIXME: fails on most decorators, currying, and such... + # (could look for magic __wrapped__ or __func__ attr) + # (could fix in 'namespace' to check obj for closure) + if verify and not head.startswith('dill.'):# weird behavior for dill + #print(_str) + try: exec(_str) #XXX: check if == obj? (name collision) + except ImportError: #XXX: better top-down or bottom-up recursion? + _head = head.rsplit(".",1)[0] #(or get all, then compare == obj?) + if not _head: raise + if _head != head: + _str = _getimport(_head, tail, alias, verify) + return _str + + +#XXX: rename builtin to force? vice versa? verify to force? (as in getsource) +#NOTE: 05/25/14 broke backward compatibility: added 'alias' as 2nd argument +def getimport(obj, alias='', verify=True, builtin=False, enclosing=False): + """get the likely import string for the given object + + obj is the object to inspect + If verify=True, then test the import string before returning it. + If builtin=True, then force an import for builtins where possible. + If enclosing=True, get the import for the outermost enclosing callable. + If alias is provided, then rename the object on import. + """ + if enclosing: + from .detect import outermost + _obj = outermost(obj) + obj = _obj if _obj else obj + # get the namespace + qual = _namespace(obj) + head = '.'.join(qual[:-1]) + tail = qual[-1] + # for named things... with a nice repr #XXX: move into _namespace? + try: # look for '<...>' and be mindful it might be in lists, dicts, etc... + name = repr(obj).split('<',1)[1].split('>',1)[1] + name = None # we have a 'object'-style repr + except Exception: # it's probably something 'importable' + if head in ['builtins','__builtin__']: + name = repr(obj) #XXX: catch [1,2], (1,2), set([1,2])... others? + else: + name = repr(obj).split('(')[0] + #if not repr(obj).startswith('<'): name = repr(obj).split('(')[0] + #else: name = None + if name: # try using name instead of tail + try: return _getimport(head, name, alias, verify, builtin) + except ImportError: pass + except SyntaxError: + if head in ['builtins','__builtin__']: + _alias = '%s = ' % alias if alias else '' + if alias == name: _alias = '' + return _alias+'%s\n' % name + else: pass + try: + #if type(obj) is type(abs): _builtin = builtin # BuiltinFunctionType + #else: _builtin = False + return _getimport(head, tail, alias, verify, builtin) + except ImportError: + raise # could do some checking against obj + except SyntaxError: + if head in ['builtins','__builtin__']: + _alias = '%s = ' % alias if alias else '' + if alias == tail: _alias = '' + return _alias+'%s\n' % tail + raise # could do some checking against obj + + +def _importable(obj, alias='', source=None, enclosing=False, force=True, \ + builtin=True, lstrip=True): + """get an import string (or the source code) for the given object + + This function will attempt to discover the name of the object, or the repr + of the object, or the source code for the object. To attempt to force + discovery of the source code, use source=True, to attempt to force the + use of an import, use source=False; otherwise an import will be sought + for objects not defined in __main__. The intent is to build a string + that can be imported from a python file. obj is the object to inspect. + If alias is provided, then rename the object with the given alias. + + If source=True, use these options: + If enclosing=True, then also return any enclosing code. + If force=True, catch (TypeError,IOError) and try to use import hooks. + If lstrip=True, ensure there is no indentation in the first line of code. + + If source=False, use these options: + If enclosing=True, get the import for the outermost enclosing callable. + If force=True, then don't test the import string before returning it. + If builtin=True, then force an import for builtins where possible. + """ + if source is None: + source = True if isfrommain(obj) else False + if source: # first try to get the source + try: + return getsource(obj, alias, enclosing=enclosing, \ + force=force, lstrip=lstrip, builtin=builtin) + except Exception: pass + try: + if not _isinstance(obj): + return getimport(obj, alias, enclosing=enclosing, \ + verify=(not force), builtin=builtin) + # first 'get the import', then 'get the instance' + _import = getimport(obj, enclosing=enclosing, \ + verify=(not force), builtin=builtin) + name = getname(obj, force=True) + if not name: + raise AttributeError("object has no atribute '__name__'") + _alias = "%s = " % alias if alias else "" + if alias == name: _alias = "" + return _import+_alias+"%s\n" % name + + except Exception: pass + if not source: # try getsource, only if it hasn't been tried yet + try: + return getsource(obj, alias, enclosing=enclosing, \ + force=force, lstrip=lstrip, builtin=builtin) + except Exception: pass + # get the name (of functions, lambdas, and classes) + # or hope that obj can be built from the __repr__ + #XXX: what to do about class instances and such? + obj = getname(obj, force=force) + # we either have __repr__ or __name__ (or None) + if not obj or obj.startswith('<'): + raise AttributeError("object has no atribute '__name__'") + _alias = '%s = ' % alias if alias else '' + if alias == obj: _alias = '' + return _alias+'%s\n' % obj + #XXX: possible failsafe... (for example, for instances when source=False) + # "import dill; result = dill.loads(); # repr()" + +def _closuredimport(func, alias='', builtin=False): + """get import for closured objects; return a dict of 'name' and 'import'""" + import re + from .detect import freevars, outermost + free_vars = freevars(func) + func_vars = {} + # split into 'funcs' and 'non-funcs' + for name,obj in list(free_vars.items()): + if not isfunction(obj): continue + # get import for 'funcs' + fobj = free_vars.pop(name) + src = getsource(fobj) + if src.lstrip().startswith('@'): # we have a decorator + src = getimport(fobj, alias=alias, builtin=builtin) + else: # we have to "hack" a bit... and maybe be lucky + encl = outermost(func) + # pattern: 'func = enclosing(fobj' + pat = r'.*[\w\s]=\s*'+getname(encl)+r'\('+getname(fobj) + mod = getname(getmodule(encl)) + #HACK: get file containing 'outer' function; is func there? + lines,_ = findsource(encl) + candidate = [line for line in lines if getname(encl) in line and \ + re.match(pat, line)] + if not candidate: + mod = getname(getmodule(fobj)) + #HACK: get file containing 'inner' function; is func there? + lines,_ = findsource(fobj) + candidate = [line for line in lines \ + if getname(fobj) in line and re.match(pat, line)] + if not len(candidate): raise TypeError('import could not be found') + candidate = candidate[-1] + name = candidate.split('=',1)[0].split()[-1].strip() + src = _getimport(mod, name, alias=alias, builtin=builtin) + func_vars[name] = src + if not func_vars: + name = outermost(func) + mod = getname(getmodule(name)) + if not mod or name is func: # then it can be handled by getimport + name = getname(func, force=True) #XXX: better key? + src = getimport(func, alias=alias, builtin=builtin) + else: + lines,_ = findsource(name) + # pattern: 'func = enclosing(' + candidate = [line for line in lines if getname(name) in line and \ + re.match(r'.*[\w\s]=\s*'+getname(name)+r'\(', line)] + if not len(candidate): raise TypeError('import could not be found') + candidate = candidate[-1] + name = candidate.split('=',1)[0].split()[-1].strip() + src = _getimport(mod, name, alias=alias, builtin=builtin) + func_vars[name] = src + return func_vars + +#XXX: should be able to use __qualname__ +def _closuredsource(func, alias=''): + """get source code for closured objects; return a dict of 'name' + and 'code blocks'""" + #FIXME: this entire function is a messy messy HACK + # - pollutes global namespace + # - fails if name of freevars are reused + # - can unnecessarily duplicate function code + from .detect import freevars + free_vars = freevars(func) + func_vars = {} + # split into 'funcs' and 'non-funcs' + for name,obj in list(free_vars.items()): + if not isfunction(obj): + # get source for 'non-funcs' + free_vars[name] = getsource(obj, force=True, alias=name) + continue + # get source for 'funcs' + fobj = free_vars.pop(name) + src = getsource(fobj, alias) # DO NOT include dependencies + # if source doesn't start with '@', use name as the alias + if not src.lstrip().startswith('@'): #FIXME: 'enclose' in dummy; + src = importable(fobj,alias=name)# wrong ref 'name' + org = getsource(func, alias, enclosing=False, lstrip=True) + src = (src, org) # undecorated first, then target + else: #NOTE: reproduces the code! + org = getsource(func, enclosing=True, lstrip=False) + src = importable(fobj, alias, source=True) # include dependencies + src = (org, src) # target first, then decorated + func_vars[name] = src + src = ''.join(free_vars.values()) + if not func_vars: #FIXME: 'enclose' in dummy; wrong ref 'name' + org = getsource(func, alias, force=True, enclosing=False, lstrip=True) + src = (src, org) # variables first, then target + else: + src = (src, None) # just variables (better '' instead of None?) + func_vars[None] = src + # FIXME: remove duplicates (however, order is important...) + return func_vars + +def importable(obj, alias='', source=None, builtin=True): + """get an importable string (i.e. source code or the import string) + for the given object, including any required objects from the enclosing + and global scope + + This function will attempt to discover the name of the object, or the repr + of the object, or the source code for the object. To attempt to force + discovery of the source code, use source=True, to attempt to force the + use of an import, use source=False; otherwise an import will be sought + for objects not defined in __main__. The intent is to build a string + that can be imported from a python file. + + obj is the object to inspect. If alias is provided, then rename the + object with the given alias. If builtin=True, then force an import for + builtins where possible. + """ + #NOTE: we always 'force', and 'lstrip' as necessary + #NOTE: for 'enclosing', use importable(outermost(obj)) + if source is None: + source = True if isfrommain(obj) else False + elif builtin and isbuiltin(obj): + source = False + tried_source = tried_import = False + while True: + if not source: # we want an import + try: + if _isinstance(obj): # for instances, punt to _importable + return _importable(obj, alias, source=False, builtin=builtin) + src = _closuredimport(obj, alias=alias, builtin=builtin) + if len(src) == 0: + raise NotImplementedError('not implemented') + if len(src) > 1: + raise NotImplementedError('not implemented') + return list(src.values())[0] + except Exception: + if tried_source: raise + tried_import = True + # we want the source + try: + src = _closuredsource(obj, alias=alias) + if len(src) == 0: + raise NotImplementedError('not implemented') + # groan... an inline code stitcher + def _code_stitcher(block): + "stitch together the strings in tuple 'block'" + if block[0] and block[-1]: block = '\n'.join(block) + elif block[0]: block = block[0] + elif block[-1]: block = block[-1] + else: block = '' + return block + # get free_vars first + _src = _code_stitcher(src.pop(None)) + _src = [_src] if _src else [] + # get func_vars + for xxx in src.values(): + xxx = _code_stitcher(xxx) + if xxx: _src.append(xxx) + # make a single source string + if not len(_src): + src = '' + elif len(_src) == 1: + src = _src[0] + else: + src = '\n'.join(_src) + # get source code of objects referred to by obj in global scope + from .detect import globalvars + obj = globalvars(obj) #XXX: don't worry about alias? recurse? etc? + obj = list(getsource(_obj,name,force=True) for (name,_obj) in obj.items() if not isbuiltin(_obj)) + obj = '\n'.join(obj) if obj else '' + # combine all referred-to source (global then enclosing) + if not obj: return src + if not src: return obj + return obj + src + except Exception: + if tried_import: raise + tried_source = True + source = not source + # should never get here + return + + +# backward compatibility +def getimportable(obj, alias='', byname=True, explicit=False): + return importable(obj,alias,source=(not byname),builtin=explicit) + #return outdent(_importable(obj,alias,source=(not byname),builtin=explicit)) +def likely_import(obj, passive=False, explicit=False): + return getimport(obj, verify=(not passive), builtin=explicit) +def _likely_import(first, last, passive=False, explicit=True): + return _getimport(first, last, verify=(not passive), builtin=explicit) +_get_name = getname +getblocks_from_history = getblocks + + + +# EOF diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__init__.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3fbec382c8b8ad0967d0056ae85ed281bb6a4541 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/__init__.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2018-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +to run this test suite, first build and install `dill`. + + $ python -m pip install ../.. + + +then run the tests with: + + $ python -m dill.tests + + +or, if `nose` is installed: + + $ nosetests + +""" diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__main__.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..b84d24cb8d27ae0c7e34b7ff626e868a6817e782 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/__main__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2018-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import glob +import os +import sys +import subprocess as sp +python = sys.executable +try: + import pox + python = pox.which_python(version=True) or python +except ImportError: + pass +shell = sys.platform[:3] == 'win' + +suite = os.path.dirname(__file__) or os.path.curdir +tests = glob.glob(suite + os.path.sep + 'test_*.py') + + +if __name__ == '__main__': + + failed = 0 + for test in tests: + p = sp.Popen([python, test], shell=shell).wait() + if p: + print('F', end='', flush=True) + failed = 1 + else: + print('.', end='', flush=True) + print('') + exit(failed) diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a07754a11391d3e463258ae5158df9dab2e6f50 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_fglobals.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_fglobals.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a6e7141fdf4f04f722e603ba14ff2be4e682e01f Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_fglobals.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..00fa531db14e3a2d338fa47105a026fb3fe6ff7f Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..02ab0ccdddd66888785337a3ef26bea78a97444b Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_session.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_session.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed60c9c80100dc9e9a79ef57c80e686cdcdf41c2 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_session.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_source.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_source.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..340f8934d0efd761bb7c3958ae590ef624486998 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_source.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_abc.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_abc.py new file mode 100644 index 0000000000000000000000000000000000000000..b16d30920a079894d26d72b3418c118377d11d73 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_abc.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2023-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +test dill's ability to pickle abstract base class objects +""" +import dill +import abc +from abc import ABC +import warnings + +from types import FunctionType + +dill.settings['recurse'] = True + +class OneTwoThree(ABC): + @abc.abstractmethod + def foo(self): + """A method""" + pass + + @property + @abc.abstractmethod + def bar(self): + """Property getter""" + pass + + @bar.setter + @abc.abstractmethod + def bar(self, value): + """Property setter""" + pass + + @classmethod + @abc.abstractmethod + def cfoo(cls): + """Class method""" + pass + + @staticmethod + @abc.abstractmethod + def sfoo(): + """Static method""" + pass + +class EasyAsAbc(OneTwoThree): + def __init__(self): + self._bar = None + + def foo(self): + return "Instance Method FOO" + + @property + def bar(self): + return self._bar + + @bar.setter + def bar(self, value): + self._bar = value + + @classmethod + def cfoo(cls): + return "Class Method CFOO" + + @staticmethod + def sfoo(): + return "Static Method SFOO" + +def test_abc_non_local(): + assert dill.copy(OneTwoThree) is not OneTwoThree + assert dill.copy(EasyAsAbc) is not EasyAsAbc + + with warnings.catch_warnings(): + warnings.simplefilter("ignore", dill.PicklingWarning) + assert dill.copy(OneTwoThree, byref=True) is OneTwoThree + assert dill.copy(EasyAsAbc, byref=True) is EasyAsAbc + + instance = EasyAsAbc() + # Set a property that StockPickle can't preserve + instance.bar = lambda x: x**2 + depickled = dill.copy(instance) + assert type(depickled) is type(instance) #NOTE: issue #612, test_abc_local + #NOTE: dill.copy of local (or non-local) classes should (not) be the same? + assert type(depickled.bar) is FunctionType + assert depickled.bar(3) == 9 + assert depickled.sfoo() == "Static Method SFOO" + assert depickled.cfoo() == "Class Method CFOO" + assert depickled.foo() == "Instance Method FOO" + +def test_abc_local(): + """ + Test using locally scoped ABC class + """ + class LocalABC(ABC): + @abc.abstractmethod + def foo(self): + pass + + def baz(self): + return repr(self) + + labc = dill.copy(LocalABC) + assert labc is not LocalABC + assert type(labc) is type(LocalABC) + #NOTE: dill.copy of local (or non-local) classes should (not) be the same? + # + # .LocalABC'> + + class Real(labc): + def foo(self): + return "True!" + + def baz(self): + return "My " + super(Real, self).baz() + + real = Real() + assert real.foo() == "True!" + + try: + labc() + except TypeError as e: + # Expected error + pass + else: + print('Failed to raise type error') + assert False + + labc2, pik = dill.copy((labc, Real())) + assert 'Real' == type(pik).__name__ + assert '.Real' in type(pik).__qualname__ + assert type(pik) is not Real + assert labc2 is not LocalABC + assert labc2 is not labc + assert isinstance(pik, labc2) + assert not isinstance(pik, labc) + assert not isinstance(pik, LocalABC) + assert pik.baz() == "My " + repr(pik) + +def test_meta_local_no_cache(): + """ + Test calling metaclass and cache registration + """ + LocalMetaABC = abc.ABCMeta('LocalMetaABC', (), {}) + + class ClassyClass: + pass + + class KlassyClass: + pass + + LocalMetaABC.register(ClassyClass) + + assert not issubclass(KlassyClass, LocalMetaABC) + assert issubclass(ClassyClass, LocalMetaABC) + + res = dill.dumps((LocalMetaABC, ClassyClass, KlassyClass)) + + lmabc, cc, kc = dill.loads(res) + assert type(lmabc) == type(LocalMetaABC) + assert not issubclass(kc, lmabc) + assert issubclass(cc, lmabc) + +if __name__ == '__main__': + test_abc_non_local() + test_abc_local() + test_meta_local_no_cache() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_dataclasses.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_dataclasses.py new file mode 100644 index 0000000000000000000000000000000000000000..10dc51c50a129b7a28b4959d4a01c68e2c76a346 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_dataclasses.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Author: Anirudh Vegesana (avegesan@cs.stanford.edu) +# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +test pickling a dataclass +""" + +import dill +import dataclasses + +def test_dataclasses(): + # Issue #500 + @dataclasses.dataclass + class A: + x: int + y: str + + @dataclasses.dataclass + class B: + a: A + + a = A(1, "test") + before = B(a) + save = dill.dumps(before) + after = dill.loads(save) + assert before != after # classes don't match + assert before == B(A(**dataclasses.asdict(after.a))) + assert dataclasses.asdict(before) == dataclasses.asdict(after) + +if __name__ == '__main__': + test_dataclasses() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_dictviews.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_dictviews.py new file mode 100644 index 0000000000000000000000000000000000000000..4e94ce32c5e50442ff991dd85e061d24f7e50223 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_dictviews.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Author: Anirudh Vegesana (avegesan@cs.stanford.edu) +# Copyright (c) 2021-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import dill +from dill._dill import OLD310, MAPPING_PROXY_TRICK, DictProxyType + +def test_dictproxy(): + assert dill.copy(DictProxyType({'a': 2})) + +def test_dictviews(): + x = {'a': 1} + assert dill.copy(x.keys()) + assert dill.copy(x.values()) + assert dill.copy(x.items()) + +def test_dictproxy_trick(): + if not OLD310 and MAPPING_PROXY_TRICK: + x = {'a': 1} + all_views = (x.values(), x.items(), x.keys(), x) + seperate_views = dill.copy(all_views) + new_x = seperate_views[-1] + new_x['b'] = 2 + new_x['c'] = 1 + assert len(new_x) == 3 and len(x) == 1 + assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1 + assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1 + assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1 + assert dict(all_views[1]) == x + assert dict(seperate_views[1]) == new_x + +if __name__ == '__main__': + test_dictproxy() + test_dictviews() + test_dictproxy_trick() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_diff.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_diff.py new file mode 100644 index 0000000000000000000000000000000000000000..a175305344aeaf8a61c52ab2d16e1084671a0887 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_diff.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +from dill import __diff as diff + +import sys +IS_PYPY = not hasattr(sys, 'getrefcount') + +class A: + pass + +def test_diff(): + a = A() + b = A() + c = A() + a.a = b + b.a = c + diff.memorise(a) + assert not diff.has_changed(a) + c.a = 1 + assert diff.has_changed(a) + diff.memorise(c, force=True) + assert not diff.has_changed(a) + c.a = 2 + assert diff.has_changed(a) + changed = diff.whats_changed(a) + assert list(changed[0].keys()) == ["a"] + assert not changed[1] + + a2 = [] + b2 = [a2] + c2 = [b2] + diff.memorise(c2) + assert not diff.has_changed(c2) + a2.append(1) + assert diff.has_changed(c2) + changed = diff.whats_changed(c2) + assert changed[0] == {} + assert changed[1] + + a3 = {} + b3 = {1: a3} + c3 = {1: b3} + diff.memorise(c3) + assert not diff.has_changed(c3) + a3[1] = 1 + assert diff.has_changed(c3) + changed = diff.whats_changed(c3) + assert changed[0] == {} + assert changed[1] + + if not IS_PYPY: + import abc + # make sure the "_abc_invaldation_counter" doesn't make test fail + diff.memorise(abc.ABCMeta, force=True) + assert not diff.has_changed(abc) + abc.ABCMeta.zzz = 1 + assert diff.has_changed(abc) + changed = diff.whats_changed(abc) + assert list(changed[0].keys()) == ["ABCMeta"] + assert not changed[1] + + ''' + import Queue + diff.memorise(Queue, force=True) + assert not diff.has_changed(Queue) + Queue.Queue.zzz = 1 + assert diff.has_changed(Queue) + changed = diff.whats_changed(Queue) + assert list(changed[0].keys()) == ["Queue"] + assert not changed[1] + + import math + diff.memorise(math, force=True) + assert not diff.has_changed(math) + math.zzz = 1 + assert diff.has_changed(math) + changed = diff.whats_changed(math) + assert list(changed[0].keys()) == ["zzz"] + assert not changed[1] + ''' + + a = A() + b = A() + c = A() + a.a = b + b.a = c + diff.memorise(a) + assert not diff.has_changed(a) + c.a = 1 + assert diff.has_changed(a) + diff.memorise(c, force=True) + assert not diff.has_changed(a) + del c.a + assert diff.has_changed(a) + changed = diff.whats_changed(a) + assert list(changed[0].keys()) == ["a"] + assert not changed[1] + + +if __name__ == '__main__': + test_diff() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_extendpickle.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_extendpickle.py new file mode 100644 index 0000000000000000000000000000000000000000..3b274d4bf2714c25ecddbbe13904b72759211d66 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_extendpickle.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import dill as pickle +from io import BytesIO as StringIO + + +def my_fn(x): + return x * 17 + + +def test_extend(): + obj = lambda : my_fn(34) + assert obj() == 578 + + obj_io = StringIO() + pickler = pickle.Pickler(obj_io) + pickler.dump(obj) + + obj_str = obj_io.getvalue() + + obj2_io = StringIO(obj_str) + unpickler = pickle.Unpickler(obj2_io) + obj2 = unpickler.load() + + assert obj2() == 578 + + +def test_isdill(): + obj_io = StringIO() + pickler = pickle.Pickler(obj_io) + assert pickle._dill.is_dill(pickler) is True + + pickler = pickle._dill.StockPickler(obj_io) + assert pickle._dill.is_dill(pickler) is False + + try: + import multiprocess as mp + pickler = mp.reduction.ForkingPickler(obj_io) + assert pickle._dill.is_dill(pickler, child=True) is True + assert pickle._dill.is_dill(pickler, child=False) is False + except Exception: + pass + + +if __name__ == '__main__': + test_extend() + test_isdill() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_functions.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_functions.py new file mode 100644 index 0000000000000000000000000000000000000000..305acc6571e969ca755f79220a3d2fc2a68cdef4 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_functions.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2019-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import functools +import dill +import sys +dill.settings['recurse'] = True + + +def function_a(a): + return a + + +def function_b(b, b1): + return b + b1 + + +def function_c(c, c1=1): + return c + c1 + + +def function_d(d, d1, d2=1): + """doc string""" + return d + d1 + d2 + +function_d.__module__ = 'a module' + + +exec(''' +def function_e(e, *e1, e2=1, e3=2): + return e + sum(e1) + e2 + e3''') + +globalvar = 0 + +@functools.lru_cache(None) +def function_with_cache(x): + global globalvar + globalvar += x + return globalvar + + +def function_with_unassigned_variable(): + if False: + value = None + return (lambda: value) + + +def test_issue_510(): + # A very bizzare use of functions and methods that pickle doesn't get + # correctly for odd reasons. + class Foo: + def __init__(self): + def f2(self): + return self + self.f2 = f2.__get__(self) + + import dill, pickletools + f = Foo() + f1 = dill.copy(f) + assert f1.f2() is f1 + + +def test_functions(): + dumped_func_a = dill.dumps(function_a) + assert dill.loads(dumped_func_a)(0) == 0 + + dumped_func_b = dill.dumps(function_b) + assert dill.loads(dumped_func_b)(1,2) == 3 + + dumped_func_c = dill.dumps(function_c) + assert dill.loads(dumped_func_c)(1) == 2 + assert dill.loads(dumped_func_c)(1, 2) == 3 + + dumped_func_d = dill.dumps(function_d) + assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__ + assert dill.loads(dumped_func_d).__module__ == function_d.__module__ + assert dill.loads(dumped_func_d)(1, 2) == 4 + assert dill.loads(dumped_func_d)(1, 2, 3) == 6 + assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6 + + function_with_cache(1) + globalvar = 0 + dumped_func_cache = dill.dumps(function_with_cache) + assert function_with_cache(2) == 3 + assert function_with_cache(1) == 1 + assert function_with_cache(3) == 6 + assert function_with_cache(2) == 3 + + empty_cell = function_with_unassigned_variable() + cell_copy = dill.loads(dill.dumps(empty_cell)) + assert 'empty' in str(cell_copy.__closure__[0]) + try: + cell_copy() + except Exception: + # this is good + pass + else: + raise AssertionError('cell_copy() did not read an empty cell') + + exec(''' +dumped_func_e = dill.dumps(function_e) +assert dill.loads(dumped_func_e)(1, 2) == 6 +assert dill.loads(dumped_func_e)(1, 2, 3) == 9 +assert dill.loads(dumped_func_e)(1, 2, e2=3) == 8 +assert dill.loads(dumped_func_e)(1, 2, e2=3, e3=4) == 10 +assert dill.loads(dumped_func_e)(1, 2, 3, e2=4) == 12 +assert dill.loads(dumped_func_e)(1, 2, 3, e2=4, e3=5) == 15''') + +def test_code_object(): + import warnings + from dill._dill import ALL_CODE_PARAMS, CODE_PARAMS, CODE_VERSION, _create_code + code = function_c.__code__ + warnings.filterwarnings('ignore', category=DeprecationWarning) # issue 597 + LNOTAB = getattr(code, 'co_lnotab', b'') + if warnings.filters: del warnings.filters[0] + fields = {f: getattr(code, 'co_'+f) for f in CODE_PARAMS} + fields.setdefault('posonlyargcount', 0) # python >= 3.8 + fields.setdefault('lnotab', LNOTAB) # python <= 3.9 + fields.setdefault('linetable', b'') # python >= 3.10 + fields.setdefault('qualname', fields['name']) # python >= 3.11 + fields.setdefault('exceptiontable', b'') # python >= 3.11 + fields.setdefault('endlinetable', None) # python == 3.11a + fields.setdefault('columntable', None) # python == 3.11a + + for version, _, params in ALL_CODE_PARAMS: + args = tuple(fields[p] for p in params.split()) + try: + _create_code(*args) + if version >= (3,10): + _create_code(fields['lnotab'], *args) + except Exception as error: + raise Exception("failed to construct code object with format version {}".format(version)) from error + +if __name__ == '__main__': + test_functions() + test_issue_510() + test_code_object() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_functors.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_functors.py new file mode 100644 index 0000000000000000000000000000000000000000..1008be691537ca8e1089031fd5bbe924506f4279 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_functors.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import functools +import dill +dill.settings['recurse'] = True + + +def f(a, b, c): # without keywords + pass + + +def g(a, b, c=2): # with keywords + pass + + +def h(a=1, b=2, c=3): # without args + pass + + +def test_functools(): + fp = functools.partial(f, 1, 2) + gp = functools.partial(g, 1, c=2) + hp = functools.partial(h, 1, c=2) + bp = functools.partial(int, base=2) + + assert dill.pickles(fp, safe=True) + assert dill.pickles(gp, safe=True) + assert dill.pickles(hp, safe=True) + assert dill.pickles(bp, safe=True) + + +if __name__ == '__main__': + test_functools() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_logger.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_logger.py new file mode 100644 index 0000000000000000000000000000000000000000..b878a1054b36bfc4e39e3a575ef407c094ebd3a8 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_logger.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +# Author: Leonardo Gama (@leogama) +# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import logging +import re +import tempfile + +import dill +from dill import detect +from dill.logger import stderr_handler, adapter as logger + +try: + from StringIO import StringIO +except ImportError: + from io import StringIO + +test_obj = {'a': (1, 2), 'b': object(), 'f': lambda x: x**2, 'big': list(range(10))} + +def test_logging(should_trace): + buffer = StringIO() + handler = logging.StreamHandler(buffer) + logger.addHandler(handler) + try: + dill.dumps(test_obj) + if should_trace: + regex = re.compile(r'(\S*┬ \w.*[^)]' # begin pickling object + r'|│*└ # \w.* \[\d+ (\wi)?B])' # object written (with size) + ) + for line in buffer.getvalue().splitlines(): + assert regex.fullmatch(line) + return buffer.getvalue() + else: + assert buffer.getvalue() == "" + finally: + logger.removeHandler(handler) + buffer.close() + +def test_trace_to_file(stream_trace): + file = tempfile.NamedTemporaryFile(mode='r') + with detect.trace(file.name, mode='w'): + dill.dumps(test_obj) + file_trace = file.read() + file.close() + # Apparently, objects can change location in memory... + reghex = re.compile(r'0x[0-9A-Za-z]+') + file_trace, stream_trace = reghex.sub('0x', file_trace), reghex.sub('0x', stream_trace) + # PyPy prints dictionary contents with repr(dict)... + regdict = re.compile(r'(dict\.__repr__ of ).*') + file_trace, stream_trace = regdict.sub(r'\1{}>', file_trace), regdict.sub(r'\1{}>', stream_trace) + assert file_trace == stream_trace + +if __name__ == '__main__': + logger.removeHandler(stderr_handler) + test_logging(should_trace=False) + detect.trace(True) + test_logging(should_trace=True) + detect.trace(False) + test_logging(should_trace=False) + + loglevel = logging.ERROR + logger.setLevel(loglevel) + with detect.trace(): + stream_trace = test_logging(should_trace=True) + test_logging(should_trace=False) + assert logger.getEffectiveLevel() == loglevel + test_trace_to_file(stream_trace) diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_mixins.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..6d67dadedf4cb84773cddb9dbdb3ab919ff8dbaf --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_mixins.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import dill +dill.settings['recurse'] = True + + +def wtf(x,y,z): + def zzz(): + return x + def yyy(): + return y + def xxx(): + return z + return zzz,yyy + + +def quad(a=1, b=1, c=0): + inverted = [False] + def invert(): + inverted[0] = not inverted[0] + def dec(f): + def func(*args, **kwds): + x = f(*args, **kwds) + if inverted[0]: x = -x + return a*x**2 + b*x + c + func.__wrapped__ = f + func.invert = invert + func.inverted = inverted + return func + return dec + + +@quad(a=0,b=2) +def double_add(*args): + return sum(args) + + +fx = sum([1,2,3]) + + +### to make it interesting... +def quad_factory(a=1,b=1,c=0): + def dec(f): + def func(*args,**kwds): + fx = f(*args,**kwds) + return a*fx**2 + b*fx + c + return func + return dec + + +@quad_factory(a=0,b=4,c=0) +def quadish(x): + return x+1 + + +quadratic = quad_factory() + + +def doubler(f): + def inner(*args, **kwds): + fx = f(*args, **kwds) + return 2*fx + return inner + + +@doubler +def quadruple(x): + return 2*x + + +def test_mixins(): + # test mixins + assert double_add(1,2,3) == 2*fx + double_add.invert() + assert double_add(1,2,3) == -2*fx + + _d = dill.copy(double_add) + assert _d(1,2,3) == -2*fx + #_d.invert() #FIXME: fails seemingly randomly + #assert _d(1,2,3) == 2*fx + + assert _d.__wrapped__(1,2,3) == fx + + # XXX: issue or feature? in python3.4, inverted is linked through copy + if not double_add.inverted[0]: + double_add.invert() + + # test some stuff from source and pointers + ds = dill.source + dd = dill.detect + assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' + assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n' + assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__ + assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__ + assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__ + assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__ + assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n' + #***** #FIXME: this needs work + result = ds.importable(quadish, source=True) + a,b,c,_,result = result.split('\n',4) + assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' + assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4']) + result = ds.importable(quadratic, source=True) + a,b,c,result = result.split('\n',3) + assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' + assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1']) + result = ds.importable(double_add, source=True) + a,b,c,d,_,result = result.split('\n',5) + assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n' + assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]']) + #***** + + +if __name__ == '__main__': + test_mixins() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_module.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_module.py new file mode 100644 index 0000000000000000000000000000000000000000..b696d728eb83cb2f146100b3bff1eb33f88440a7 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_module.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import sys +import dill +import test_mixins as module +from importlib import reload +dill.settings['recurse'] = True + +cached = (module.__cached__ if hasattr(module, "__cached__") + else module.__file__.split(".", 1)[0] + ".pyc") + +module.a = 1234 + +pik_mod = dill.dumps(module) + +module.a = 0 + +# remove module +del sys.modules[module.__name__] +del module + +module = dill.loads(pik_mod) +def test_attributes(): + #assert hasattr(module, "a") and module.a == 1234 #FIXME: -m dill.tests + assert module.double_add(1, 2, 3) == 2 * module.fx + +# Restart, and test use_diff + +reload(module) + +try: + dill.use_diff() + + module.a = 1234 + + pik_mod = dill.dumps(module) + + module.a = 0 + + # remove module + del sys.modules[module.__name__] + del module + + module = dill.loads(pik_mod) + def test_diff_attributes(): + assert hasattr(module, "a") and module.a == 1234 + assert module.double_add(1, 2, 3) == 2 * module.fx + +except AttributeError: + def test_diff_attributes(): + pass + +# clean up +import os +if os.path.exists(cached): + os.remove(cached) +pycache = os.path.join(os.path.dirname(module.__file__), "__pycache__") +if os.path.exists(pycache) and not os.listdir(pycache): + os.removedirs(pycache) + + +# test when module is None +import math + +def get_lambda(str, **kwarg): + return eval(str, kwarg, None) + +obj = get_lambda('lambda x: math.exp(x)', math=math) + +def test_module_is_none(): + assert obj.__module__ is None + assert dill.copy(obj)(3) == obj(3) + + +if __name__ == '__main__': + test_attributes() + test_diff_attributes() + test_module_is_none() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_moduledict.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_moduledict.py new file mode 100644 index 0000000000000000000000000000000000000000..5e6e87b365604ba4286488b7e799c1c9a88bcc14 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_moduledict.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import dill +dill.settings['recurse'] = True + +def f(func): + def w(*args): + return f(*args) + return w + +@f +def f2(): pass + +# check when __main__ and on import +def test_decorated(): + assert dill.pickles(f2) + + +import doctest +import logging +logging.basicConfig(level=logging.DEBUG) + +class SomeUnreferencedUnpicklableClass(object): + def __reduce__(self): + raise Exception + +unpicklable = SomeUnreferencedUnpicklableClass() + +# This works fine outside of Doctest: +def test_normal(): + serialized = dill.dumps(lambda x: x) + +# should not try to pickle unpicklable object in __globals__ +def tests(): + """ + >>> serialized = dill.dumps(lambda x: x) + """ + return + +#print("\n\nRunning Doctest:") +def test_doctest(): + doctest.testmod() + + +if __name__ == '__main__': + test_decorated() + test_normal() + test_doctest() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_nested.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_nested.py new file mode 100644 index 0000000000000000000000000000000000000000..9109f55c86b0539632a7bde27e8172c8ba714acd --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_nested.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +test dill's ability to handle nested functions +""" + +import os +import math + +import dill as pickle +pickle.settings['recurse'] = True + + +# the nested function: pickle should fail here, but dill is ok. +def adder(augend): + zero = [0] + + def inner(addend): + return addend + augend + zero[0] + return inner + + +# rewrite the nested function using a class: standard pickle should work here. +class cadder(object): + def __init__(self, augend): + self.augend = augend + self.zero = [0] + + def __call__(self, addend): + return addend + self.augend + self.zero[0] + + +# rewrite again, but as an old-style class +class c2adder: + def __init__(self, augend): + self.augend = augend + self.zero = [0] + + def __call__(self, addend): + return addend + self.augend + self.zero[0] + + +# some basic class stuff +class basic(object): + pass + + +class basic2: + pass + + +x = 5 +y = 1 + + +def test_basic(): + a = [0, 1, 2] + pa = pickle.dumps(a) + pmath = pickle.dumps(math) #XXX: FAILS in pickle + pmap = pickle.dumps(map) + # ... + la = pickle.loads(pa) + lmath = pickle.loads(pmath) + lmap = pickle.loads(pmap) + assert list(map(math.sin, a)) == list(lmap(lmath.sin, la)) + + +def test_basic_class(): + pbasic2 = pickle.dumps(basic2) + _pbasic2 = pickle.loads(pbasic2)() + pbasic = pickle.dumps(basic) + _pbasic = pickle.loads(pbasic)() + + +def test_c2adder(): + pc2adder = pickle.dumps(c2adder) + pc2add5 = pickle.loads(pc2adder)(x) + assert pc2add5(y) == x+y + + +def test_pickled_cadder(): + pcadder = pickle.dumps(cadder) + pcadd5 = pickle.loads(pcadder)(x) + assert pcadd5(y) == x+y + + +def test_raw_adder_and_inner(): + add5 = adder(x) + assert add5(y) == x+y + + +def test_pickled_adder(): + padder = pickle.dumps(adder) + padd5 = pickle.loads(padder)(x) + assert padd5(y) == x+y + + +def test_pickled_inner(): + add5 = adder(x) + pinner = pickle.dumps(add5) #XXX: FAILS in pickle + p5add = pickle.loads(pinner) + assert p5add(y) == x+y + + +def test_moduledict_where_not_main(): + try: + from . import test_moduledict + except ImportError: + import test_moduledict + name = 'test_moduledict.py' + if os.path.exists(name) and os.path.exists(name+'c'): + os.remove(name+'c') + + if os.path.exists(name) and hasattr(test_moduledict, "__cached__") \ + and os.path.exists(test_moduledict.__cached__): + os.remove(getattr(test_moduledict, "__cached__")) + + if os.path.exists("__pycache__") and not os.listdir("__pycache__"): + os.removedirs("__pycache__") + + +if __name__ == '__main__': + test_basic() + test_basic_class() + test_c2adder() + test_pickled_cadder() + test_raw_adder_and_inner() + test_pickled_adder() + test_pickled_inner() + test_moduledict_where_not_main() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_objects.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_objects.py new file mode 100644 index 0000000000000000000000000000000000000000..7db288de0fc313d52f73f70e99a636242f45cc57 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_objects.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +demonstrate dill's ability to pickle different python types +test pickling of all Python Standard Library objects (currently: CH 1-14 @ 2.7) +""" + +import dill as pickle +pickle.settings['recurse'] = True +#pickle.detect.trace(True) +#import pickle + +# get all objects for testing +from dill import load_types, objects, extend +load_types(pickleable=True,unpickleable=False) + +# uncomment the next two lines to test cloudpickle +#extend(False) +#import cloudpickle as pickle + +# helper objects +class _class: + def _method(self): + pass + +# objects that *fail* if imported +special = {} +special['LambdaType'] = _lambda = lambda x: lambda y: x +special['MethodType'] = _method = _class()._method +special['UnboundMethodType'] = _class._method +objects.update(special) + +def pickles(name, exact=False, verbose=True): + """quick check if object pickles with dill""" + obj = objects[name] + try: + pik = pickle.loads(pickle.dumps(obj)) + if exact: + try: + assert pik == obj + except AssertionError: + assert type(obj) == type(pik) + if verbose: print ("weak: %s %s" % (name, type(obj))) + else: + assert type(obj) == type(pik) + except Exception: + if verbose: print ("fails: %s %s" % (name, type(obj))) + + +def test_objects(verbose=True): + for member in objects.keys(): + #pickles(member, exact=True, verbose=verbose) + pickles(member, exact=False, verbose=verbose) + +if __name__ == '__main__': + import warnings + warnings.simplefilter('ignore') + test_objects(verbose=False) diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_properties.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_properties.py new file mode 100644 index 0000000000000000000000000000000000000000..df3f5b58fac06d7dad962916eccd96f18d835190 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_properties.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import sys + +import dill +dill.settings['recurse'] = True + + +class Foo(object): + def __init__(self): + self._data = 1 + + def _get_data(self): + return self._data + + def _set_data(self, x): + self._data = x + + data = property(_get_data, _set_data) + + +def test_data_not_none(): + FooS = dill.copy(Foo) + assert FooS.data.fget is not None + assert FooS.data.fset is not None + assert FooS.data.fdel is None + + +def test_data_unchanged(): + FooS = dill.copy(Foo) + try: + res = FooS().data + except Exception: + e = sys.exc_info()[1] + raise AssertionError(str(e)) + else: + assert res == 1 + + +def test_data_changed(): + FooS = dill.copy(Foo) + try: + f = FooS() + f.data = 1024 + res = f.data + except Exception: + e = sys.exc_info()[1] + raise AssertionError(str(e)) + else: + assert res == 1024 + + +if __name__ == '__main__': + test_data_not_none() + test_data_unchanged() + test_data_changed() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_registered.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_registered.py new file mode 100644 index 0000000000000000000000000000000000000000..393e2f3ea68627f79e91710934351a76f281ff5b --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_registered.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +test pickling registered objects +""" + +import dill +from dill._objects import failures, registered, succeeds +import warnings +warnings.filterwarnings('ignore') + +def check(d, ok=True): + res = [] + for k,v in d.items(): + try: + z = dill.copy(v) + if ok: res.append(k) + except: + if not ok: res.append(k) + return res + +fails = check(failures) +try: + assert not bool(fails) +except AssertionError as e: + print("FAILS: %s" % fails) + raise e from None + +register = check(registered, ok=False) +try: + assert not bool(register) +except AssertionError as e: + print("REGISTER: %s" % register) + raise e from None + +success = check(succeeds, ok=False) +try: + assert not bool(success) +except AssertionError as e: + print("SUCCESS: %s" % success) + raise e from None + +import builtins +import types +q = dill._dill._reverse_typemap +p = {k:v for k,v in q.items() if k not in vars(builtins) and k not in vars(types)} + +diff = set(p.keys()).difference(registered.keys()) +try: + assert not bool(diff) +except AssertionError as e: + print("DIFF: %s" % diff) + raise e from None + +miss = set(registered.keys()).difference(p.keys()) +try: + assert not bool(miss) +except AssertionError as e: + print("MISS: %s" % miss) + raise e from None diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_restricted.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_restricted.py new file mode 100644 index 0000000000000000000000000000000000000000..cdb773e36df6304e44c5a883f9f4e5beaee0d674 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_restricted.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# +# Author: Kirill Makhonin (@kirillmakhonin) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import dill + +class RestrictedType: + def __bool__(*args, **kwargs): + raise Exception('Restricted function') + + __eq__ = __lt__ = __le__ = __ne__ = __gt__ = __ge__ = __hash__ = __bool__ + +glob_obj = RestrictedType() + +def restricted_func(): + a = glob_obj + +def test_function_with_restricted_object(): + deserialized = dill.loads(dill.dumps(restricted_func, recurse=True)) + + +if __name__ == '__main__': + test_function_with_restricted_object() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_selected.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_selected.py new file mode 100644 index 0000000000000000000000000000000000000000..d8db5c04578c98531237188bb4b425e6e0e4d92e --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_selected.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE +""" +testing some selected object types +""" + +import dill +dill.settings['recurse'] = True + +verbose = False + +def test_dict_contents(): + c = type.__dict__ + for i,j in c.items(): + #try: + ok = dill.pickles(j) + #except Exception: + # print ("FAIL: %s with %s" % (i, dill.detect.errors(j))) + if verbose: print ("%s: %s, %s" % (ok, type(j), j)) + assert ok + if verbose: print ("") + +def _g(x): yield x; + +def _f(): + try: raise + except Exception: + from sys import exc_info + e, er, tb = exc_info() + return er, tb + +class _d(object): + def _method(self): + pass + +from dill import objects +from dill import load_types +load_types(pickleable=True,unpickleable=False) +_newclass = objects['ClassObjectType'] +# some clean-up #FIXME: should happen internal to dill +objects['TemporaryFileType'].close() +objects['TextWrapperType'].close() +objects['BufferedRandomType'].close() +objects['BufferedReaderType'].close() +objects['BufferedWriterType'].close() +objects['FileType'].close() +del objects + +# getset_descriptor for new-style classes (fails on '_method', if not __main__) +def test_class_descriptors(): + d = _d.__dict__ + for i in d.values(): + ok = dill.pickles(i) + if verbose: print ("%s: %s, %s" % (ok, type(i), i)) + assert ok + if verbose: print ("") + od = _newclass.__dict__ + for i in od.values(): + ok = dill.pickles(i) + if verbose: print ("%s: %s, %s" % (ok, type(i), i)) + assert ok + if verbose: print ("") + +# (__main__) class instance for new-style classes +def test_class(): + o = _d() + oo = _newclass() + ok = dill.pickles(o) + if verbose: print ("%s: %s, %s" % (ok, type(o), o)) + assert ok + ok = dill.pickles(oo) + if verbose: print ("%s: %s, %s" % (ok, type(oo), oo)) + assert ok + if verbose: print ("") + +# frames, generators, and tracebacks (all depend on frame) +def test_frame_related(): + g = _g(1) + f = g.gi_frame + e,t = _f() + _is = lambda ok: ok + ok = dill.pickles(f) + if verbose: print ("%s: %s, %s" % (ok, type(f), f)) + assert not ok + ok = dill.pickles(g) + if verbose: print ("%s: %s, %s" % (ok, type(g), g)) + assert _is(not ok) #XXX: dill fails + ok = dill.pickles(t) + if verbose: print ("%s: %s, %s" % (ok, type(t), t)) + assert not ok #XXX: dill fails + ok = dill.pickles(e) + if verbose: print ("%s: %s, %s" % (ok, type(e), e)) + assert ok + if verbose: print ("") + +def test_typing(): + import typing + x = typing.Any + assert x == dill.copy(x) + x = typing.Dict[int, str] + assert x == dill.copy(x) + x = typing.List[int] + assert x == dill.copy(x) + x = typing.Tuple[int, str] + assert x == dill.copy(x) + x = typing.Tuple[int] + assert x == dill.copy(x) + x = typing.Tuple[()] + assert x == dill.copy(x) + x = typing.Tuple[()].copy_with(()) + assert x == dill.copy(x) + return + + +if __name__ == '__main__': + test_frame_related() + test_dict_contents() + test_class() + test_class_descriptors() + test_typing() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_session.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_session.py new file mode 100644 index 0000000000000000000000000000000000000000..200642def22c82d15ccc5fc737f7106b93740084 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_session.py @@ -0,0 +1,280 @@ +#!/usr/bin/env python + +# Author: Leonardo Gama (@leogama) +# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +import atexit +import os +import sys +import __main__ +from contextlib import suppress +from io import BytesIO + +import dill + +session_file = os.path.join(os.path.dirname(__file__), 'session-refimported-%s.pkl') + +################### +# Child process # +################### + +def _error_line(error, obj, refimported): + import traceback + line = traceback.format_exc().splitlines()[-2].replace('[obj]', '['+repr(obj)+']') + return "while testing (with refimported=%s): %s" % (refimported, line.lstrip()) + +if __name__ == '__main__' and len(sys.argv) >= 3 and sys.argv[1] == '--child': + # Test session loading in a fresh interpreter session. + refimported = (sys.argv[2] == 'True') + dill.load_module(session_file % refimported, module='__main__') + + def test_modules(refimported): + # FIXME: In this test setting with CPython 3.7, 'calendar' is not included + # in sys.modules, independent of the value of refimported. Tried to + # run garbage collection just before loading the session with no luck. It + # fails even when preceding them with 'import calendar'. Needed to run + # these kinds of tests in a supbrocess. Failing test sample: + # assert globals()['day_name'] is sys.modules['calendar'].__dict__['day_name'] + try: + for obj in ('json', 'url', 'local_mod', 'sax', 'dom'): + assert globals()[obj].__name__ in sys.modules + assert 'calendar' in sys.modules and 'cmath' in sys.modules + import calendar, cmath + + for obj in ('Calendar', 'isleap'): + assert globals()[obj] is sys.modules['calendar'].__dict__[obj] + assert __main__.day_name.__module__ == 'calendar' + if refimported: + assert __main__.day_name is calendar.day_name + + assert __main__.complex_log is cmath.log + + except AssertionError as error: + error.args = (_error_line(error, obj, refimported),) + raise + + test_modules(refimported) + sys.exit() + +#################### +# Parent process # +#################### + +# Create various kinds of objects to test different internal logics. + +## Modules. +import json # top-level module +import urllib as url # top-level module under alias +from xml import sax # submodule +import xml.dom.minidom as dom # submodule under alias +import test_dictviews as local_mod # non-builtin top-level module + +## Imported objects. +from calendar import Calendar, isleap, day_name # class, function, other object +from cmath import log as complex_log # imported with alias + +## Local objects. +x = 17 +empty = None +names = ['Alice', 'Bob', 'Carol'] +def squared(x): return x**2 +cubed = lambda x: x**3 +class Person: + def __init__(self, name, age): + self.name = name + self.age = age +person = Person(names[0], x) +class CalendarSubclass(Calendar): + def weekdays(self): + return [day_name[i] for i in self.iterweekdays()] +cal = CalendarSubclass() +selfref = __main__ + +# Setup global namespace for session saving tests. +class TestNamespace: + test_globals = globals().copy() + def __init__(self, **extra): + self.extra = extra + def __enter__(self): + self.backup = globals().copy() + globals().clear() + globals().update(self.test_globals) + globals().update(self.extra) + return self + def __exit__(self, *exc_info): + globals().clear() + globals().update(self.backup) + +def _clean_up_cache(module): + cached = module.__file__.split('.', 1)[0] + '.pyc' + cached = module.__cached__ if hasattr(module, '__cached__') else cached + pycache = os.path.join(os.path.dirname(module.__file__), '__pycache__') + for remove, file in [(os.remove, cached), (os.removedirs, pycache)]: + with suppress(OSError): + remove(file) + +atexit.register(_clean_up_cache, local_mod) + +def _test_objects(main, globals_copy, refimported): + try: + main_dict = __main__.__dict__ + global Person, person, Calendar, CalendarSubclass, cal, selfref + + for obj in ('json', 'url', 'local_mod', 'sax', 'dom'): + assert globals()[obj].__name__ == globals_copy[obj].__name__ + + for obj in ('x', 'empty', 'names'): + assert main_dict[obj] == globals_copy[obj] + + for obj in ['squared', 'cubed']: + assert main_dict[obj].__globals__ is main_dict + assert main_dict[obj](3) == globals_copy[obj](3) + + assert Person.__module__ == __main__.__name__ + assert isinstance(person, Person) + assert person.age == globals_copy['person'].age + + assert issubclass(CalendarSubclass, Calendar) + assert isinstance(cal, CalendarSubclass) + assert cal.weekdays() == globals_copy['cal'].weekdays() + + assert selfref is __main__ + + except AssertionError as error: + error.args = (_error_line(error, obj, refimported),) + raise + +def test_session_main(refimported): + """test dump/load_module() for __main__, both in this process and in a subprocess""" + extra_objects = {} + if refimported: + # Test unpickleable imported object in main. + from sys import flags + extra_objects['flags'] = flags + + with TestNamespace(**extra_objects) as ns: + try: + # Test session loading in a new session. + dill.dump_module(session_file % refimported, refimported=refimported) + from dill.tests.__main__ import python, shell, sp + error = sp.call([python, __file__, '--child', str(refimported)], shell=shell) + if error: sys.exit(error) + finally: + with suppress(OSError): + os.remove(session_file % refimported) + + # Test session loading in the same session. + session_buffer = BytesIO() + dill.dump_module(session_buffer, refimported=refimported) + session_buffer.seek(0) + dill.load_module(session_buffer, module='__main__') + ns.backup['_test_objects'](__main__, ns.backup, refimported) + +def test_session_other(): + """test dump/load_module() for a module other than __main__""" + import test_classdef as module + atexit.register(_clean_up_cache, module) + module.selfref = module + dict_objects = [obj for obj in module.__dict__.keys() if not obj.startswith('__')] + + session_buffer = BytesIO() + dill.dump_module(session_buffer, module) + + for obj in dict_objects: + del module.__dict__[obj] + + session_buffer.seek(0) + dill.load_module(session_buffer, module) + + assert all(obj in module.__dict__ for obj in dict_objects) + assert module.selfref is module + +def test_runtime_module(): + from types import ModuleType + modname = '__runtime__' + runtime = ModuleType(modname) + runtime.x = 42 + + mod = dill.session._stash_modules(runtime) + if mod is not runtime: + print("There are objects to save by referenece that shouldn't be:", + mod.__dill_imported, mod.__dill_imported_as, mod.__dill_imported_top_level, + file=sys.stderr) + + # This is also for code coverage, tests the use case of dump_module(refimported=True) + # without imported objects in the namespace. It's a contrived example because + # even dill can't be in it. This should work after fixing #462. + session_buffer = BytesIO() + dill.dump_module(session_buffer, module=runtime, refimported=True) + session_dump = session_buffer.getvalue() + + # Pass a new runtime created module with the same name. + runtime = ModuleType(modname) # empty + return_val = dill.load_module(BytesIO(session_dump), module=runtime) + assert return_val is None + assert runtime.__name__ == modname + assert runtime.x == 42 + assert runtime not in sys.modules.values() + + # Pass nothing as main. load_module() must create it. + session_buffer.seek(0) + runtime = dill.load_module(BytesIO(session_dump)) + assert runtime.__name__ == modname + assert runtime.x == 42 + assert runtime not in sys.modules.values() + +def test_refimported_imported_as(): + import collections + import concurrent.futures + import types + import typing + mod = sys.modules['__test__'] = types.ModuleType('__test__') + dill.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + mod.Dict = collections.UserDict # select by type + mod.AsyncCM = typing.AsyncContextManager # select by __module__ + mod.thread_exec = dill.executor # select by __module__ with regex + + session_buffer = BytesIO() + dill.dump_module(session_buffer, mod, refimported=True) + session_buffer.seek(0) + mod = dill.load(session_buffer) + del sys.modules['__test__'] + + assert set(mod.__dill_imported_as) == { + ('collections', 'UserDict', 'Dict'), + ('typing', 'AsyncContextManager', 'AsyncCM'), + ('dill', 'executor', 'thread_exec'), + } + +def test_load_module_asdict(): + with TestNamespace(): + session_buffer = BytesIO() + dill.dump_module(session_buffer) + + global empty, names, x, y + x = y = 0 # change x and create y + del empty + globals_state = globals().copy() + + session_buffer.seek(0) + main_vars = dill.load_module_asdict(session_buffer) + + assert main_vars is not globals() + assert globals() == globals_state + + assert main_vars['__name__'] == '__main__' + assert main_vars['names'] == names + assert main_vars['names'] is not names + assert main_vars['x'] != x + assert 'y' not in main_vars + assert 'empty' in main_vars + +if __name__ == '__main__': + test_session_main(refimported=False) + test_session_main(refimported=True) + test_session_other() + test_runtime_module() + test_refimported_imported_as() + test_load_module_asdict() diff --git a/llmeval-env/lib/python3.10/site-packages/dill/tests/test_source.py b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_source.py new file mode 100644 index 0000000000000000000000000000000000000000..8771a542a27a15140139b3de6645d9bdd0615eed --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/dill/tests/test_source.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# +# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) +# Copyright (c) 2008-2016 California Institute of Technology. +# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation. +# License: 3-clause BSD. The full license text is available at: +# - https://github.com/uqfoundation/dill/blob/master/LICENSE + +from dill.source import getsource, getname, _wrap, likely_import +from dill.source import getimportable +from dill._dill import IS_PYPY + +import sys +PY310b = 0x30a00b1 + +f = lambda x: x**2 +def g(x): return f(x) - x + +def h(x): + def g(x): return x + return g(x) - x + +class Foo(object): + def bar(self, x): + return x*x+x +_foo = Foo() + +def add(x,y): + return x+y + +# yes, same as 'f', but things are tricky when it comes to pointers +squared = lambda x:x**2 + +class Bar: + pass +_bar = Bar() + + # inspect.getsourcelines # dill.source.getblocks +def test_getsource(): + assert getsource(f) == 'f = lambda x: x**2\n' + assert getsource(g) == 'def g(x): return f(x) - x\n' + assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n' + assert getname(f) == 'f' + assert getname(g) == 'g' + assert getname(h) == 'h' + assert _wrap(f)(4) == 16 + assert _wrap(g)(4) == 12 + assert _wrap(h)(4) == 0 + + assert getname(Foo) == 'Foo' + assert getname(Bar) == 'Bar' + assert getsource(Bar) == 'class Bar:\n pass\n' + assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' + #XXX: add getsource for _foo, _bar + +# test itself +def test_itself(): + assert likely_import(likely_import)=='from dill.source import likely_import\n' + +# builtin functions and objects +def test_builtin(): + assert likely_import(pow) == 'pow\n' + assert likely_import(100) == '100\n' + assert likely_import(True) == 'True\n' + assert likely_import(pow, explicit=True) == 'from builtins import pow\n' + assert likely_import(100, explicit=True) == '100\n' + assert likely_import(True, explicit=True) == 'True\n' + # this is kinda BS... you can't import a None + assert likely_import(None) == 'None\n' + assert likely_import(None, explicit=True) == 'None\n' + + +# other imported functions +def test_imported(): + from math import sin + assert likely_import(sin) == 'from math import sin\n' + +# interactively defined functions +def test_dynamic(): + assert likely_import(add) == 'from %s import add\n' % __name__ + # interactive lambdas + assert likely_import(squared) == 'from %s import squared\n' % __name__ + +# classes and class instances +def test_classes(): + from io import BytesIO as StringIO + y = "from _io import BytesIO\n" + x = y if (IS_PYPY or sys.hexversion >= PY310b) else "from io import BytesIO\n" + s = StringIO() + + assert likely_import(StringIO) == x + assert likely_import(s) == y + # interactively defined classes and class instances + assert likely_import(Foo) == 'from %s import Foo\n' % __name__ + assert likely_import(_foo) == 'from %s import Foo\n' % __name__ + + +# test getimportable +def test_importable(): + assert getimportable(add) == 'from %s import add\n' % __name__ + assert getimportable(squared) == 'from %s import squared\n' % __name__ + assert getimportable(Foo) == 'from %s import Foo\n' % __name__ + assert getimportable(Foo.bar) == 'from %s import bar\n' % __name__ + assert getimportable(_foo.bar) == 'from %s import bar\n' % __name__ + assert getimportable(None) == 'None\n' + assert getimportable(100) == '100\n' + + assert getimportable(add, byname=False) == 'def add(x,y):\n return x+y\n' + assert getimportable(squared, byname=False) == 'squared = lambda x:x**2\n' + assert getimportable(None, byname=False) == 'None\n' + assert getimportable(Bar, byname=False) == 'class Bar:\n pass\n' + assert getimportable(Foo, byname=False) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' + assert getimportable(Foo.bar, byname=False) == 'def bar(self, x):\n return x*x+x\n' + assert getimportable(Foo.bar, byname=True) == 'from %s import bar\n' % __name__ + assert getimportable(Foo.bar, alias='memo', byname=True) == 'from %s import bar as memo\n' % __name__ + assert getimportable(Foo, alias='memo', byname=True) == 'from %s import Foo as memo\n' % __name__ + assert getimportable(squared, alias='memo', byname=True) == 'from %s import squared as memo\n' % __name__ + assert getimportable(squared, alias='memo', byname=False) == 'memo = squared = lambda x:x**2\n' + assert getimportable(add, alias='memo', byname=False) == 'def add(x,y):\n return x+y\n\nmemo = add\n' + assert getimportable(None, alias='memo', byname=False) == 'memo = None\n' + assert getimportable(100, alias='memo', byname=False) == 'memo = 100\n' + assert getimportable(add, explicit=True) == 'from %s import add\n' % __name__ + assert getimportable(squared, explicit=True) == 'from %s import squared\n' % __name__ + assert getimportable(Foo, explicit=True) == 'from %s import Foo\n' % __name__ + assert getimportable(Foo.bar, explicit=True) == 'from %s import bar\n' % __name__ + assert getimportable(_foo.bar, explicit=True) == 'from %s import bar\n' % __name__ + assert getimportable(None, explicit=True) == 'None\n' + assert getimportable(100, explicit=True) == '100\n' + + +def test_numpy(): + try: + from numpy import array + x = array([1,2,3]) + assert getimportable(x) == 'from numpy import array\narray([1, 2, 3])\n' + assert getimportable(array) == 'from %s import array\n' % array.__module__ + assert getimportable(x, byname=False) == 'from numpy import array\narray([1, 2, 3])\n' + assert getimportable(array, byname=False) == 'from %s import array\n' % array.__module__ + except ImportError: pass + +#NOTE: if before likely_import(pow), will cause pow to throw AssertionError +def test_foo(): + assert getimportable(_foo, byname=False).startswith("import dill\nclass Foo(object):\n def bar(self, x):\n return x*x+x\ndill.loads(") + +if __name__ == '__main__': + test_getsource() + test_itself() + test_builtin() + test_imported() + test_dynamic() + test_classes() + test_importable() + test_numpy() + test_foo() diff --git a/llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/WHEEL b/llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..09a4f39676d0990506e77c210d61756d8852a8bd --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_5_x86_64 +Tag: cp310-cp310-manylinux1_x86_64 +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..52f13fc459edf8f3def6f792c432f0b64f313176 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/top_level.txt @@ -0,0 +1 @@ +frozenlist diff --git a/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/__init__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..379e78fb72c738e7fecbf921254df400b9db9417 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_fork_bomb.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_fork_bomb.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b3405200e73877bc04d472d08685564802112e9 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_fork_bomb.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_preload.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_preload.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9272aeea7fad0fe69775bd7f7d420032fe61b951 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_preload.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/test_multiprocessing_fork.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/test_multiprocessing_fork.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f98d134756f077341894e05f9a2871e7cd1fd1a3 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/test_multiprocessing_fork.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/test_multiprocessing_forkserver.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/test_multiprocessing_forkserver.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6df4327516e3af814f1a5d347863cba8ff7c418e Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/test_multiprocessing_forkserver.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt b/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD b/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..d3d72cd0b04af2a44def94e4441c081ba2049beb --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD @@ -0,0 +1,50 @@ +nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/__pycache__/__init__.cpython-310.pyc,, +nvidia/cuda_cupti/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cuda_cupti/__pycache__/__init__.cpython-310.pyc,, +nvidia/cuda_cupti/include/Openacc/cupti_openacc.h,sha256=Z0OM5e_hbd3cxdXyn3SCHqBBQawLg4QORnlm57Cr2-M,3513 +nvidia/cuda_cupti/include/Openmp/cupti_openmp.h,sha256=E1WNmeb_7HaUSmBegtUNe4IV1i7pXeNxgzIlyKn1zrM,3491 +nvidia/cuda_cupti/include/Openmp/omp-tools.h,sha256=AmuC_xPC7VPu3B-W4PmXuCNufFawhY8PjNXePaQFAOg,37403 +nvidia/cuda_cupti/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cuda_cupti/include/__pycache__/__init__.cpython-310.pyc,, +nvidia/cuda_cupti/include/cuda_stdint.h,sha256=XbFOk9CtJjKqk7PpYNqbSVsDxAsVM8avA4rWpPi0BjQ,4093 +nvidia/cuda_cupti/include/cupti.h,sha256=JkVyAGTIMYzwm62dfVqas3nMcILhgP_Wdz6fh4_NED0,4697 +nvidia/cuda_cupti/include/cupti_activity.h,sha256=qVVazvOJZbDMzvbqgS8nmaHN4gaxAWO2HA_8D7-Vpiw,311866 +nvidia/cuda_cupti/include/cupti_callbacks.h,sha256=-a47AoM4HoU5IuCCB_L-6lZRdrkDAC4XXLJuoUqojeY,26587 +nvidia/cuda_cupti/include/cupti_checkpoint.h,sha256=rTz8JoWxqESBXyZWUhZJGm4xeYcx4OJOtJ7Ld13T_b0,5264 +nvidia/cuda_cupti/include/cupti_driver_cbid.h,sha256=Uc74JDlJN_3qI04l4gkGzYbB3Ki0l0IgZILZO0WXtVs,70346 +nvidia/cuda_cupti/include/cupti_events.h,sha256=oHIOKSsE5ZAot5tZK-sbS2K9xcgiXBXTZZDkPQuiaNw,52639 +nvidia/cuda_cupti/include/cupti_metrics.h,sha256=iLAOlDrcbHEsIIUmgq0Tp1ZOY9O3Ot3wj2-bI8iYbSs,32148 +nvidia/cuda_cupti/include/cupti_nvtx_cbid.h,sha256=_azPtR1g4qivvX7qbvHRUg0RHCWF7iEOJyHMN9qZe9E,5912 +nvidia/cuda_cupti/include/cupti_pcsampling.h,sha256=uT_DtFN0Bye6ADtxfKXUAc8BcrFefotf-VtTuKQGJx0,32395 +nvidia/cuda_cupti/include/cupti_pcsampling_util.h,sha256=gEiMBes3mtpDJqauxqUtfe0csY4J31qpdg2Cp8On95E,13060 +nvidia/cuda_cupti/include/cupti_profiler_target.h,sha256=LWNFuYyotgGhCKY7TS48uVGxjeuOAuANWSnB8yfOfvo,31596 +nvidia/cuda_cupti/include/cupti_result.h,sha256=sOBZCRuRVHvcbIyDlzyLeina5YXwIQH21rVr3FPoB6M,12026 +nvidia/cuda_cupti/include/cupti_runtime_cbid.h,sha256=ZpomdRK7Fhn_NZYiiq5b3AyNZX3gznot-aX4dk-tsZI,44182 +nvidia/cuda_cupti/include/cupti_target.h,sha256=x4Vz1Upb6m9ixmVpmGaKQldDWYQI3OZ-ocEXGzNK0EE,1263 +nvidia/cuda_cupti/include/cupti_version.h,sha256=7XDJSIWpeJU8lrp0cOyma7dXXSGK4bdT1G8akxu8D_Q,4344 +nvidia/cuda_cupti/include/generated_cudaGL_meta.h,sha256=dfd2QuaRdEjbStOKvaQLi1Md_qrpRQh8PfyZznJ8bWY,3115 +nvidia/cuda_cupti/include/generated_cudaVDPAU_meta.h,sha256=fAedsoQxaU3hIAApAWDOKsa9kgcuQw4tdyf8klLm-3k,1453 +nvidia/cuda_cupti/include/generated_cuda_gl_interop_meta.h,sha256=LXOqvQCej0sCgAT1LUKKYZ466EFxN4hIwf9oIhXOLF0,2250 +nvidia/cuda_cupti/include/generated_cuda_meta.h,sha256=qZhsMxL-CURycqC2YkkioSDiD5pA8q22GOje2bOeviU,87152 +nvidia/cuda_cupti/include/generated_cuda_runtime_api_meta.h,sha256=YCkUMRP93XtDGLEH7DOJCUuhdRcVsO1vQwF_K9AuDfI,64332 +nvidia/cuda_cupti/include/generated_cuda_vdpau_interop_meta.h,sha256=8OLqWN26aEYpTWUXtbHJvA5GYhVv3ybYVOTW7yK37z8,1367 +nvidia/cuda_cupti/include/generated_cudart_removed_meta.h,sha256=X3I5WXmhtsJNNlgY7coJ5vg4t11G5FRR6Xo7MboIeck,5172 +nvidia/cuda_cupti/include/generated_nvtx_meta.h,sha256=YHb_RD8g3s4m8PJn7Z0wnxvUHarl7BOAX5ADr-BL3HI,7513 +nvidia/cuda_cupti/include/nvperf_common.h,sha256=MMZrDvDdtG2DSS0h2B8AR1aPyt6UmeWwH-Dc_XsxaHo,10422 +nvidia/cuda_cupti/include/nvperf_cuda_host.h,sha256=xEapxwvdl96uV-On-c8LY2lvwVNfIjq-rAgj9_dYbqo,8299 +nvidia/cuda_cupti/include/nvperf_host.h,sha256=3mcgAEbB9uaDfWheRqC8gLlTiTggc_auV8PE9dTShx4,66289 +nvidia/cuda_cupti/include/nvperf_target.h,sha256=jVR2zEO2KmMta0C-qTGuS9V6rhVyMNnRnOU4QJSiPrc,21476 +nvidia/cuda_cupti/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cuda_cupti/lib/__pycache__/__init__.cpython-310.pyc,, +nvidia/cuda_cupti/lib/libcheckpoint.so,sha256=Fib_EZWCvKRmBbxtSXaat1MUuZk91ke9ZKkN7HR7yEM,1534104 +nvidia/cuda_cupti/lib/libcupti.so.12,sha256=q8YxAOnPUWuO0folNUrlPb_o30g4rFJdjXODMsIZjcI,7419504 +nvidia/cuda_cupti/lib/libnvperf_host.so,sha256=lc7EKudwwfIlHSBLA-EtVv2y5VYeSJjAe0A4L-JHRYk,28636664 +nvidia/cuda_cupti/lib/libnvperf_target.so,sha256=-iWHyNIR-8hei4jMoLzr54yMxAyBsMN2POV6yeY_Bmk,5895416 +nvidia/cuda_cupti/lib/libpcsamplingutil.so,sha256=XGCctMdV5khc1HtLdK_imh8aepM88GJz0q6CcPJtb3k,912728 +nvidia_cuda_cupti_cu12-12.1.105.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nvidia_cuda_cupti_cu12-12.1.105.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262 +nvidia_cuda_cupti_cu12-12.1.105.dist-info/METADATA,sha256=xrOx7eliZP6--5Pla2AJW0e8XI3H0XDb9ZEN7DXghPs,1553 +nvidia_cuda_cupti_cu12-12.1.105.dist-info/RECORD,, +nvidia_cuda_cupti_cu12-12.1.105.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106 +nvidia_cuda_cupti_cu12-12.1.105.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7 diff --git a/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.1.105.dist-info/top_level.txt @@ -0,0 +1 @@ +nvidia diff --git a/llmeval-env/lib/python3.10/site-packages/packaging-24.0.dist-info/INSTALLER b/llmeval-env/lib/python3.10/site-packages/packaging-24.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/packaging-24.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/llmeval-env/lib/python3.10/site-packages/packaging-24.0.dist-info/METADATA b/llmeval-env/lib/python3.10/site-packages/packaging-24.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..10ab4390a96923dea26efffa4c42eaacb502536b --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/packaging-24.0.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 24.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__about__.py b/llmeval-env/lib/python3.10/site-packages/portalocker/__about__.py new file mode 100644 index 0000000000000000000000000000000000000000..e45c44327dfe0627c80af76a8f9922cb5c4330b5 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/__about__.py @@ -0,0 +1,6 @@ +__package_name__ = 'portalocker' +__author__ = 'Rick van Hattem' +__email__ = 'wolph@wol.ph' +__version__ = '2.8.2' +__description__ = '''Wraps the portalocker recipe for easy usage''' +__url__ = 'https://github.com/WoLpH/portalocker' diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__init__.py b/llmeval-env/lib/python3.10/site-packages/portalocker/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9170e33e9c2d297d769b1d4a8d8766434df02bdc --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/__init__.py @@ -0,0 +1,76 @@ +from . import __about__, constants, exceptions, portalocker, utils + +try: # pragma: no cover + from .redis import RedisLock +except ImportError: # pragma: no cover + RedisLock = None # type: ignore + + +#: The package name on Pypi +__package_name__ = __about__.__package_name__ +#: Current author and maintainer, view the git history for the previous ones +__author__ = __about__.__author__ +#: Current author's email address +__email__ = __about__.__email__ +#: Version number +__version__ = '2.8.2' +#: Package description for Pypi +__description__ = __about__.__description__ +#: Package homepage +__url__ = __about__.__url__ + + +#: Exception thrown when the file is already locked by someone else +AlreadyLocked = exceptions.AlreadyLocked +#: Exception thrown if an error occurred during locking +LockException = exceptions.LockException + + +#: Lock a file. Note that this is an advisory lock on Linux/Unix systems +lock = portalocker.lock +#: Unlock a file +unlock = portalocker.unlock + +#: Place an exclusive lock. +#: Only one process may hold an exclusive lock for a given file at a given +#: time. +LOCK_EX: constants.LockFlags = constants.LockFlags.EXCLUSIVE + +#: Place a shared lock. +#: More than one process may hold a shared lock for a given file at a given +#: time. +LOCK_SH: constants.LockFlags = constants.LockFlags.SHARED + +#: Acquire the lock in a non-blocking fashion. +LOCK_NB: constants.LockFlags = constants.LockFlags.NON_BLOCKING + +#: Remove an existing lock held by this process. +LOCK_UN: constants.LockFlags = constants.LockFlags.UNBLOCK + +#: Locking flags enum +LockFlags = constants.LockFlags + +#: Locking utility class to automatically handle opening with timeouts and +#: context wrappers +Lock = utils.Lock +RLock = utils.RLock +BoundedSemaphore = utils.BoundedSemaphore +TemporaryFileLock = utils.TemporaryFileLock +open_atomic = utils.open_atomic + +__all__ = [ + 'lock', + 'unlock', + 'LOCK_EX', + 'LOCK_SH', + 'LOCK_NB', + 'LOCK_UN', + 'LockFlags', + 'LockException', + 'Lock', + 'RLock', + 'AlreadyLocked', + 'BoundedSemaphore', + 'open_atomic', + 'RedisLock', +] diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__main__.py b/llmeval-env/lib/python3.10/site-packages/portalocker/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..658a3ec300f2ed7db2515168fe146d229b041cd9 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/__main__.py @@ -0,0 +1,98 @@ +import argparse +import logging +import os +import pathlib +import re + +base_path = pathlib.Path(__file__).parent.parent +src_path = base_path / 'portalocker' +dist_path = base_path / 'dist' +_default_output_path = base_path / 'dist' / 'portalocker.py' + +_RELATIVE_IMPORT_RE = re.compile(r'^from \. import (?P.+)$') +_USELESS_ASSIGNMENT_RE = re.compile(r'^(?P\w+) = \1\n$') + +_TEXT_TEMPLATE = """''' +{} +''' + +""" + +logger = logging.getLogger(__name__) + + +def main(argv=None): + parser = argparse.ArgumentParser() + + subparsers = parser.add_subparsers(required=True) + combine_parser = subparsers.add_parser( + 'combine', + help='Combine all Python files into a single unified `portalocker.py` ' + 'file for easy distribution', + ) + combine_parser.add_argument( + '--output-file', + '-o', + type=argparse.FileType('w'), + default=str(_default_output_path), + ) + + combine_parser.set_defaults(func=combine) + args = parser.parse_args(argv) + args.func(args) + + +def _read_file(path, seen_files): + if path in seen_files: + return + + names = set() + seen_files.add(path) + for line in path.open(): + if match := _RELATIVE_IMPORT_RE.match(line): + for name in match.group('names').split(','): + name = name.strip() + names.add(name) + yield from _read_file(src_path / f'{name}.py', seen_files) + else: + yield _clean_line(line, names) + + +def _clean_line(line, names): + # Replace `some_import.spam` with `spam` + if names: + joined_names = '|'.join(names) + line = re.sub(fr'\b({joined_names})\.', '', line) + + # Replace useless assignments (e.g. `spam = spam`) + return _USELESS_ASSIGNMENT_RE.sub('', line) + + +def combine(args): + output_file = args.output_file + pathlib.Path(output_file.name).parent.mkdir(parents=True, exist_ok=True) + + output_file.write( + _TEXT_TEMPLATE.format((base_path / 'README.rst').read_text()), + ) + output_file.write( + _TEXT_TEMPLATE.format((base_path / 'LICENSE').read_text()), + ) + + seen_files = set() + for line in _read_file(src_path / '__init__.py', seen_files): + output_file.write(line) + + output_file.flush() + output_file.close() + + logger.info(f'Wrote combined file to {output_file.name}') + # Run black and ruff if available. If not then just run the file. + os.system(f'black {output_file.name}') + os.system(f'ruff --fix {output_file.name}') + os.system(f'python3 {output_file.name}') + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + main() diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__about__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__about__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab75e824bfeb48eb79ef366abb21bb260836b176 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__about__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__init__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..86d9792a7fefcd15a5cfa2f1e82ccb771f30a7c8 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__init__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__main__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7d915ad4367402d78d15dc06e4d7ef2f97cc6ef Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/__main__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/constants.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7088fdaff80a3b4f041ec659bbc565ebce37d2cb Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/constants.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/exceptions.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..010a7585c09504f0d4c92f08489642082e0d0793 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/exceptions.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/portalocker.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/portalocker.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b40b5d6db93745942220743f8d5f1516e4c9d233 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/portalocker.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/redis.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/redis.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..826fa83743e4fbd620c8033a44c42e5f9e784e16 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/redis.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/utils.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7b3e2d07a9621ab6be1364fb0c4264d23b75f89 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/portalocker/__pycache__/utils.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/constants.py b/llmeval-env/lib/python3.10/site-packages/portalocker/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..72733c8546f95343b152600efa5771a294f80f4c --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/constants.py @@ -0,0 +1,58 @@ +''' +Locking constants + +Lock types: + +- `EXCLUSIVE` exclusive lock +- `SHARED` shared lock + +Lock flags: + +- `NON_BLOCKING` non-blocking + +Manually unlock, only needed internally + +- `UNBLOCK` unlock +''' +import enum +import os + +# The actual tests will execute the code anyhow so the following code can +# safely be ignored from the coverage tests +if os.name == 'nt': # pragma: no cover + import msvcrt + + #: exclusive lock + LOCK_EX = 0x1 + #: shared lock + LOCK_SH = 0x2 + #: non-blocking + LOCK_NB = 0x4 + #: unlock + LOCK_UN = msvcrt.LK_UNLCK # type: ignore + +elif os.name == 'posix': # pragma: no cover + import fcntl + + #: exclusive lock + LOCK_EX = fcntl.LOCK_EX + #: shared lock + LOCK_SH = fcntl.LOCK_SH + #: non-blocking + LOCK_NB = fcntl.LOCK_NB + #: unlock + LOCK_UN = fcntl.LOCK_UN + +else: # pragma: no cover + raise RuntimeError('PortaLocker only defined for nt and posix platforms') + + +class LockFlags(enum.IntFlag): + #: exclusive lock + EXCLUSIVE = LOCK_EX + #: shared lock + SHARED = LOCK_SH + #: non-blocking + NON_BLOCKING = LOCK_NB + #: unlock + UNBLOCK = LOCK_UN diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/exceptions.py b/llmeval-env/lib/python3.10/site-packages/portalocker/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e871d13acbf6deb057c2e09d5e0fbd9404891f76 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/exceptions.py @@ -0,0 +1,27 @@ +import typing + + +class BaseLockException(Exception): # noqa: N818 + # Error codes: + LOCK_FAILED = 1 + + def __init__( + self, + *args: typing.Any, + fh: typing.Union[typing.IO, None, int] = None, + **kwargs: typing.Any, + ) -> None: + self.fh = fh + Exception.__init__(self, *args) + + +class LockException(BaseLockException): + pass + + +class AlreadyLocked(LockException): + pass + + +class FileToLarge(LockException): + pass diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/portalocker.py b/llmeval-env/lib/python3.10/site-packages/portalocker/portalocker.py new file mode 100644 index 0000000000000000000000000000000000000000..90307b76ea9bfdfeb3d92a833e89d0e500b2e6c7 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/portalocker.py @@ -0,0 +1,117 @@ +import contextlib +import os +import typing + +from . import constants, exceptions + +# Alias for readability. Due to import recursion issues we cannot do: +# from .constants import LockFlags +LockFlags = constants.LockFlags + + +if os.name == 'nt': # pragma: no cover + import msvcrt + + import pywintypes + import win32con + import win32file + import winerror + + __overlapped = pywintypes.OVERLAPPED() + + def lock(file_: typing.Union[typing.IO, int], flags: LockFlags): + # Windows locking does not support locking through `fh.fileno()` so + # we cast it to make mypy and pyright happy + file_ = typing.cast(typing.IO, file_) + + mode = 0 + if flags & LockFlags.NON_BLOCKING: + mode |= win32con.LOCKFILE_FAIL_IMMEDIATELY + + if flags & LockFlags.EXCLUSIVE: + mode |= win32con.LOCKFILE_EXCLUSIVE_LOCK + + # Save the old position so we can go back to that position but + # still lock from the beginning of the file + savepos = file_.tell() + if savepos: + file_.seek(0) + + os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore + try: + win32file.LockFileEx(os_fh, mode, 0, -0x10000, __overlapped) + except pywintypes.error as exc_value: + # error: (33, 'LockFileEx', 'The process cannot access the file + # because another process has locked a portion of the file.') + if exc_value.winerror == winerror.ERROR_LOCK_VIOLATION: + raise exceptions.AlreadyLocked( + exceptions.LockException.LOCK_FAILED, + exc_value.strerror, + fh=file_, + ) from exc_value + else: + # Q: Are there exceptions/codes we should be dealing with + # here? + raise + finally: + if savepos: + file_.seek(savepos) + + def unlock(file_: typing.IO): + try: + savepos = file_.tell() + if savepos: + file_.seek(0) + + os_fh = msvcrt.get_osfhandle(file_.fileno()) # type: ignore + try: + win32file.UnlockFileEx( + os_fh, + 0, + -0x10000, + __overlapped, + ) + except pywintypes.error as exc: + if exc.winerror != winerror.ERROR_NOT_LOCKED: + # Q: Are there exceptions/codes we should be + # dealing with here? + raise + finally: + if savepos: + file_.seek(savepos) + except OSError as exc: + raise exceptions.LockException( + exceptions.LockException.LOCK_FAILED, + exc.strerror, + fh=file_, + ) from exc + +elif os.name == 'posix': # pragma: no cover + import fcntl + + def lock(file_: typing.Union[typing.IO, int], flags: LockFlags): + locking_exceptions = (IOError,) + with contextlib.suppress(NameError): + locking_exceptions += (BlockingIOError,) # type: ignore + # Locking with NON_BLOCKING without EXCLUSIVE or SHARED enabled results + # in an error + if (flags & LockFlags.NON_BLOCKING) and not flags & ( + LockFlags.SHARED | LockFlags.EXCLUSIVE + ): + raise RuntimeError( + 'When locking in non-blocking mode the SHARED ' + 'or EXCLUSIVE flag must be specified as well', + ) + + try: + fcntl.flock(file_, flags) + except locking_exceptions as exc_value: + # The exception code varies on different systems so we'll catch + # every IO error + raise exceptions.LockException(exc_value, fh=file_) from exc_value + + def unlock(file_: typing.IO): + fcntl.flock(file_.fileno(), LockFlags.UNBLOCK) + +else: # pragma: no cover + raise RuntimeError('PortaLocker only defined for nt and posix platforms') diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/py.typed b/llmeval-env/lib/python3.10/site-packages/portalocker/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/redis.py b/llmeval-env/lib/python3.10/site-packages/portalocker/redis.py new file mode 100644 index 0000000000000000000000000000000000000000..59ee5ff1717a0c0806df28cf065bef6b2276ae6e --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/redis.py @@ -0,0 +1,236 @@ +import _thread +import json +import logging +import random +import time +import typing + +from redis import client + +from . import exceptions, utils + +logger = logging.getLogger(__name__) + +DEFAULT_UNAVAILABLE_TIMEOUT = 1 +DEFAULT_THREAD_SLEEP_TIME = 0.1 + + +class PubSubWorkerThread(client.PubSubWorkerThread): # type: ignore + def run(self): + try: + super().run() + except Exception: # pragma: no cover + _thread.interrupt_main() + raise + + +class RedisLock(utils.LockBase): + ''' + An extremely reliable Redis lock based on pubsub with a keep-alive thread + + As opposed to most Redis locking systems based on key/value pairs, + this locking method is based on the pubsub system. The big advantage is + that if the connection gets killed due to network issues, crashing + processes or otherwise, it will still immediately unlock instead of + waiting for a lock timeout. + + To make sure both sides of the lock know about the connection state it is + recommended to set the `health_check_interval` when creating the redis + connection.. + + Args: + channel: the redis channel to use as locking key. + connection: an optional redis connection if you already have one + or if you need to specify the redis connection + timeout: timeout when trying to acquire a lock + check_interval: check interval while waiting + fail_when_locked: after the initial lock failed, return an error + or lock the file. This does not wait for the timeout. + thread_sleep_time: sleep time between fetching messages from redis to + prevent a busy/wait loop. In the case of lock conflicts this + increases the time it takes to resolve the conflict. This should + be smaller than the `check_interval` to be useful. + unavailable_timeout: If the conflicting lock is properly connected + this should never exceed twice your redis latency. Note that this + will increase the wait time possibly beyond your `timeout` and is + always executed if a conflict arises. + redis_kwargs: The redis connection arguments if no connection is + given. The `DEFAULT_REDIS_KWARGS` are used as default, if you want + to override these you need to explicitly specify a value (e.g. + `health_check_interval=0`) + + ''' + + redis_kwargs: typing.Dict[str, typing.Any] + thread: typing.Optional[PubSubWorkerThread] + channel: str + timeout: float + connection: typing.Optional[client.Redis] + pubsub: typing.Optional[client.PubSub] = None + close_connection: bool + + DEFAULT_REDIS_KWARGS: typing.ClassVar[typing.Dict[str, typing.Any]] = dict( + health_check_interval=10, + ) + + def __init__( + self, + channel: str, + connection: typing.Optional[client.Redis] = None, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = False, + thread_sleep_time: float = DEFAULT_THREAD_SLEEP_TIME, + unavailable_timeout: float = DEFAULT_UNAVAILABLE_TIMEOUT, + redis_kwargs: typing.Optional[typing.Dict] = None, + ): + # We don't want to close connections given as an argument + self.close_connection = not connection + + self.thread = None + self.channel = channel + self.connection = connection + self.thread_sleep_time = thread_sleep_time + self.unavailable_timeout = unavailable_timeout + self.redis_kwargs = redis_kwargs or dict() + + for key, value in self.DEFAULT_REDIS_KWARGS.items(): + self.redis_kwargs.setdefault(key, value) + + super().__init__( + timeout=timeout, + check_interval=check_interval, + fail_when_locked=fail_when_locked, + ) + + def get_connection(self) -> client.Redis: + if not self.connection: + self.connection = client.Redis(**self.redis_kwargs) + + return self.connection + + def channel_handler(self, message): + if message.get('type') != 'message': # pragma: no cover + return + + try: + data = json.loads(message.get('data')) + except TypeError: # pragma: no cover + logger.debug('TypeError while parsing: %r', message) + return + + assert self.connection is not None + self.connection.publish(data['response_channel'], str(time.time())) + + @property + def client_name(self): + return f'{self.channel}-lock' + + def acquire( + self, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = None, + ): + timeout = utils.coalesce(timeout, self.timeout, 0.0) + check_interval = utils.coalesce( + check_interval, + self.check_interval, + 0.0, + ) + fail_when_locked = utils.coalesce( + fail_when_locked, + self.fail_when_locked, + ) + + assert not self.pubsub, 'This lock is already active' + connection = self.get_connection() + + timeout_generator = self._timeout_generator(timeout, check_interval) + for _ in timeout_generator: # pragma: no branch + subscribers = connection.pubsub_numsub(self.channel)[0][1] + + if subscribers: + logger.debug( + 'Found %d lock subscribers for %s', + subscribers, + self.channel, + ) + + if self.check_or_kill_lock( + connection, + self.unavailable_timeout, + ): # pragma: no branch + continue + else: # pragma: no cover + subscribers = 0 + + # Note: this should not be changed to an elif because the if + # above can still end up here + if not subscribers: + connection.client_setname(self.client_name) + self.pubsub = connection.pubsub() + self.pubsub.subscribe(**{self.channel: self.channel_handler}) + self.thread = PubSubWorkerThread( + self.pubsub, + sleep_time=self.thread_sleep_time, + ) + self.thread.start() + + subscribers = connection.pubsub_numsub(self.channel)[0][1] + if subscribers == 1: # pragma: no branch + return self + else: # pragma: no cover + # Race condition, let's try again + self.release() + + if fail_when_locked: # pragma: no cover + raise exceptions.AlreadyLocked(exceptions) + + raise exceptions.AlreadyLocked(exceptions) + + def check_or_kill_lock(self, connection, timeout): + # Random channel name to get messages back from the lock + response_channel = f'{self.channel}-{random.random()}' + + pubsub = connection.pubsub() + pubsub.subscribe(response_channel) + connection.publish( + self.channel, + json.dumps( + dict( + response_channel=response_channel, + message='ping', + ), + ), + ) + + check_interval = min(self.thread_sleep_time, timeout / 10) + for _ in self._timeout_generator( + timeout, + check_interval, + ): # pragma: no branch + if pubsub.get_message(timeout=check_interval): + pubsub.close() + return True + + for client_ in connection.client_list('pubsub'): # pragma: no cover + if client_.get('name') == self.client_name: + logger.warning('Killing unavailable redis client: %r', client_) + connection.client_kill_filter(client_.get('id')) + return None + + def release(self): + if self.thread: # pragma: no branch + self.thread.stop() + self.thread.join() + self.thread = None + time.sleep(0.01) + + if self.pubsub: # pragma: no branch + self.pubsub.unsubscribe(self.channel) + self.pubsub.close() + self.pubsub = None + + def __del__(self): + self.release() diff --git a/llmeval-env/lib/python3.10/site-packages/portalocker/utils.py b/llmeval-env/lib/python3.10/site-packages/portalocker/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3b5682e05518c28b9f207bc069959f17726df1d5 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/portalocker/utils.py @@ -0,0 +1,563 @@ +import abc +import atexit +import contextlib +import logging +import os +import pathlib +import random +import tempfile +import time +import typing +import warnings + +from . import constants, exceptions, portalocker + +logger = logging.getLogger(__name__) + +DEFAULT_TIMEOUT = 5 +DEFAULT_CHECK_INTERVAL = 0.25 +DEFAULT_FAIL_WHEN_LOCKED = False +LOCK_METHOD = constants.LockFlags.EXCLUSIVE | constants.LockFlags.NON_BLOCKING + +__all__ = [ + 'Lock', + 'open_atomic', +] + +Filename = typing.Union[str, pathlib.Path] + + +def coalesce(*args: typing.Any, test_value: typing.Any = None) -> typing.Any: + '''Simple coalescing function that returns the first value that is not + equal to the `test_value`. Or `None` if no value is valid. Usually this + means that the last given value is the default value. + + Note that the `test_value` is compared using an identity check + (i.e. `value is not test_value`) so changing the `test_value` won't work + for all values. + + >>> coalesce(None, 1) + 1 + >>> coalesce() + + >>> coalesce(0, False, True) + 0 + >>> coalesce(0, False, True, test_value=0) + False + + # This won't work because of the `is not test_value` type testing: + >>> coalesce([], dict(spam='eggs'), test_value=[]) + [] + ''' + return next((arg for arg in args if arg is not test_value), None) + + +@contextlib.contextmanager +def open_atomic( + filename: Filename, + binary: bool = True, +) -> typing.Iterator[typing.IO]: + '''Open a file for atomic writing. Instead of locking this method allows + you to write the entire file and move it to the actual location. Note that + this makes the assumption that a rename is atomic on your platform which + is generally the case but not a guarantee. + + http://docs.python.org/library/os.html#os.rename + + >>> filename = 'test_file.txt' + >>> if os.path.exists(filename): + ... os.remove(filename) + + >>> with open_atomic(filename) as fh: + ... written = fh.write(b'test') + >>> assert os.path.exists(filename) + >>> os.remove(filename) + + >>> import pathlib + >>> path_filename = pathlib.Path('test_file.txt') + + >>> with open_atomic(path_filename) as fh: + ... written = fh.write(b'test') + >>> assert path_filename.exists() + >>> path_filename.unlink() + ''' + # `pathlib.Path` cast in case `path` is a `str` + path: pathlib.Path = pathlib.Path(filename) + + assert not path.exists(), '%r exists' % path + + # Create the parent directory if it doesn't exist + path.parent.mkdir(parents=True, exist_ok=True) + + temp_fh = tempfile.NamedTemporaryFile( + mode=binary and 'wb' or 'w', + dir=str(path.parent), + delete=False, + ) + yield temp_fh + temp_fh.flush() + os.fsync(temp_fh.fileno()) + temp_fh.close() + try: + os.rename(temp_fh.name, path) + finally: + with contextlib.suppress(Exception): + os.remove(temp_fh.name) + + +class LockBase(abc.ABC): # pragma: no cover + #: timeout when trying to acquire a lock + timeout: float + #: check interval while waiting for `timeout` + check_interval: float + #: skip the timeout and immediately fail if the initial lock fails + fail_when_locked: bool + + def __init__( + self, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = None, + ): + self.timeout = coalesce(timeout, DEFAULT_TIMEOUT) + self.check_interval = coalesce(check_interval, DEFAULT_CHECK_INTERVAL) + self.fail_when_locked = coalesce( + fail_when_locked, + DEFAULT_FAIL_WHEN_LOCKED, + ) + + @abc.abstractmethod + def acquire( + self, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = None, + ): + return NotImplemented + + def _timeout_generator( + self, + timeout: typing.Optional[float], + check_interval: typing.Optional[float], + ) -> typing.Iterator[int]: + f_timeout = coalesce(timeout, self.timeout, 0.0) + f_check_interval = coalesce(check_interval, self.check_interval, 0.0) + + yield 0 + i = 0 + + start_time = time.perf_counter() + while start_time + f_timeout > time.perf_counter(): + i += 1 + yield i + + # Take low lock checks into account to stay within the interval + since_start_time = time.perf_counter() - start_time + time.sleep(max(0.001, (i * f_check_interval) - since_start_time)) + + @abc.abstractmethod + def release(self): + return NotImplemented + + def __enter__(self): + return self.acquire() + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_value: typing.Optional[BaseException], + traceback: typing.Any, # Should be typing.TracebackType + ) -> typing.Optional[bool]: + self.release() + return None + + def __delete__(self, instance): + instance.release() + + +class Lock(LockBase): + '''Lock manager with built-in timeout + + Args: + filename: filename + mode: the open mode, 'a' or 'ab' should be used for writing. When mode + contains `w` the file will be truncated to 0 bytes. + timeout: timeout when trying to acquire a lock + check_interval: check interval while waiting + fail_when_locked: after the initial lock failed, return an error + or lock the file. This does not wait for the timeout. + **file_open_kwargs: The kwargs for the `open(...)` call + + fail_when_locked is useful when multiple threads/processes can race + when creating a file. If set to true than the system will wait till + the lock was acquired and then return an AlreadyLocked exception. + + Note that the file is opened first and locked later. So using 'w' as + mode will result in truncate _BEFORE_ the lock is checked. + ''' + + def __init__( + self, + filename: Filename, + mode: str = 'a', + timeout: typing.Optional[float] = None, + check_interval: float = DEFAULT_CHECK_INTERVAL, + fail_when_locked: bool = DEFAULT_FAIL_WHEN_LOCKED, + flags: constants.LockFlags = LOCK_METHOD, + **file_open_kwargs, + ): + if 'w' in mode: + truncate = True + mode = mode.replace('w', 'a') + else: + truncate = False + + if timeout is None: + timeout = DEFAULT_TIMEOUT + elif not (flags & constants.LockFlags.NON_BLOCKING): + warnings.warn( + 'timeout has no effect in blocking mode', + stacklevel=1, + ) + + self.fh: typing.Optional[typing.IO] = None + self.filename: str = str(filename) + self.mode: str = mode + self.truncate: bool = truncate + self.timeout: float = timeout + self.check_interval: float = check_interval + self.fail_when_locked: bool = fail_when_locked + self.flags: constants.LockFlags = flags + self.file_open_kwargs = file_open_kwargs + + def acquire( + self, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = None, + ) -> typing.IO: + '''Acquire the locked filehandle''' + + fail_when_locked = coalesce(fail_when_locked, self.fail_when_locked) + + if ( + not (self.flags & constants.LockFlags.NON_BLOCKING) + and timeout is not None + ): + warnings.warn( + 'timeout has no effect in blocking mode', + stacklevel=1, + ) + + # If we already have a filehandle, return it + fh: typing.Optional[typing.IO] = self.fh + if fh: + return fh + + # Get a new filehandler + fh = self._get_fh() + + def try_close(): # pragma: no cover + # Silently try to close the handle if possible, ignore all issues + if fh is not None: + with contextlib.suppress(Exception): + fh.close() + + exception = None + # Try till the timeout has passed + for _ in self._timeout_generator(timeout, check_interval): + exception = None + try: + # Try to lock + fh = self._get_lock(fh) + break + except exceptions.LockException as exc: + # Python will automatically remove the variable from memory + # unless you save it in a different location + exception = exc + + # We already tried to the get the lock + # If fail_when_locked is True, stop trying + if fail_when_locked: + try_close() + raise exceptions.AlreadyLocked(exception) from exc + + # Wait a bit + + if exception: + try_close() + # We got a timeout... reraising + raise exceptions.LockException(exception) + + # Prepare the filehandle (truncate if needed) + fh = self._prepare_fh(fh) + + self.fh = fh + return fh + + def release(self): + '''Releases the currently locked file handle''' + if self.fh: + portalocker.unlock(self.fh) + self.fh.close() + self.fh = None + + def _get_fh(self) -> typing.IO: + '''Get a new filehandle''' + return open( # noqa: SIM115 + self.filename, + self.mode, + **self.file_open_kwargs, + ) + + def _get_lock(self, fh: typing.IO) -> typing.IO: + ''' + Try to lock the given filehandle + + returns LockException if it fails''' + portalocker.lock(fh, self.flags) + return fh + + def _prepare_fh(self, fh: typing.IO) -> typing.IO: + ''' + Prepare the filehandle for usage + + If truncate is a number, the file will be truncated to that amount of + bytes + ''' + if self.truncate: + fh.seek(0) + fh.truncate(0) + + return fh + + +class RLock(Lock): + ''' + A reentrant lock, functions in a similar way to threading.RLock in that it + can be acquired multiple times. When the corresponding number of release() + calls are made the lock will finally release the underlying file lock. + ''' + + def __init__( + self, + filename, + mode='a', + timeout=DEFAULT_TIMEOUT, + check_interval=DEFAULT_CHECK_INTERVAL, + fail_when_locked=False, + flags=LOCK_METHOD, + ): + super().__init__( + filename, + mode, + timeout, + check_interval, + fail_when_locked, + flags, + ) + self._acquire_count = 0 + + def acquire( + self, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = None, + ) -> typing.IO: + if self._acquire_count >= 1: + fh = self.fh + else: + fh = super().acquire(timeout, check_interval, fail_when_locked) + self._acquire_count += 1 + assert fh + return fh + + def release(self): + if self._acquire_count == 0: + raise exceptions.LockException( + 'Cannot release more times than acquired', + ) + + if self._acquire_count == 1: + super().release() + self._acquire_count -= 1 + + +class TemporaryFileLock(Lock): + def __init__( + self, + filename='.lock', + timeout=DEFAULT_TIMEOUT, + check_interval=DEFAULT_CHECK_INTERVAL, + fail_when_locked=True, + flags=LOCK_METHOD, + ): + Lock.__init__( + self, + filename=filename, + mode='w', + timeout=timeout, + check_interval=check_interval, + fail_when_locked=fail_when_locked, + flags=flags, + ) + atexit.register(self.release) + + def release(self): + Lock.release(self) + if os.path.isfile(self.filename): # pragma: no branch + os.unlink(self.filename) + + +class BoundedSemaphore(LockBase): + ''' + Bounded semaphore to prevent too many parallel processes from running + + This method is deprecated because multiple processes that are completely + unrelated could end up using the same semaphore. To prevent this, + use `NamedBoundedSemaphore` instead. The + `NamedBoundedSemaphore` is a drop-in replacement for this class. + + >>> semaphore = BoundedSemaphore(2, directory='') + >>> str(semaphore.get_filenames()[0]) + 'bounded_semaphore.00.lock' + >>> str(sorted(semaphore.get_random_filenames())[1]) + 'bounded_semaphore.01.lock' + ''' + + lock: typing.Optional[Lock] + + def __init__( + self, + maximum: int, + name: str = 'bounded_semaphore', + filename_pattern: str = '{name}.{number:02d}.lock', + directory: str = tempfile.gettempdir(), + timeout: typing.Optional[float] = DEFAULT_TIMEOUT, + check_interval: typing.Optional[float] = DEFAULT_CHECK_INTERVAL, + fail_when_locked: typing.Optional[bool] = True, + ): + self.maximum = maximum + self.name = name + self.filename_pattern = filename_pattern + self.directory = directory + self.lock: typing.Optional[Lock] = None + super().__init__( + timeout=timeout, + check_interval=check_interval, + fail_when_locked=fail_when_locked, + ) + + if not name or name == 'bounded_semaphore': + warnings.warn( + '`BoundedSemaphore` without an explicit `name` ' + 'argument is deprecated, use NamedBoundedSemaphore', + DeprecationWarning, + stacklevel=1, + ) + + def get_filenames(self) -> typing.Sequence[pathlib.Path]: + return [self.get_filename(n) for n in range(self.maximum)] + + def get_random_filenames(self) -> typing.Sequence[pathlib.Path]: + filenames = list(self.get_filenames()) + random.shuffle(filenames) + return filenames + + def get_filename(self, number) -> pathlib.Path: + return pathlib.Path(self.directory) / self.filename_pattern.format( + name=self.name, + number=number, + ) + + def acquire( + self, + timeout: typing.Optional[float] = None, + check_interval: typing.Optional[float] = None, + fail_when_locked: typing.Optional[bool] = None, + ) -> typing.Optional[Lock]: + assert not self.lock, 'Already locked' + + filenames = self.get_filenames() + + for n in self._timeout_generator(timeout, check_interval): # pragma: + logger.debug('trying lock (attempt %d) %r', n, filenames) + # no branch + if self.try_lock(filenames): # pragma: no branch + return self.lock # pragma: no cover + + if fail_when_locked := coalesce( + fail_when_locked, + self.fail_when_locked, + ): + raise exceptions.AlreadyLocked() + + return None + + def try_lock(self, filenames: typing.Sequence[Filename]) -> bool: + filename: Filename + for filename in filenames: + logger.debug('trying lock for %r', filename) + self.lock = Lock(filename, fail_when_locked=True) + try: + self.lock.acquire() + except exceptions.AlreadyLocked: + self.lock = None + else: + logger.debug('locked %r', filename) + return True + + return False + + def release(self): # pragma: no cover + if self.lock is not None: + self.lock.release() + self.lock = None + + +class NamedBoundedSemaphore(BoundedSemaphore): + ''' + Bounded semaphore to prevent too many parallel processes from running + + It's also possible to specify a timeout when acquiring the lock to wait + for a resource to become available. This is very similar to + `threading.BoundedSemaphore` but works across multiple processes and across + multiple operating systems. + + Because this works across multiple processes it's important to give the + semaphore a name. This name is used to create the lock files. If you + don't specify a name, a random name will be generated. This means that + you can't use the same semaphore in multiple processes unless you pass the + semaphore object to the other processes. + + >>> semaphore = NamedBoundedSemaphore(2, name='test') + >>> str(semaphore.get_filenames()[0]) + '...test.00.lock' + + >>> semaphore = NamedBoundedSemaphore(2) + >>> 'bounded_semaphore' in str(semaphore.get_filenames()[0]) + True + + ''' + + def __init__( + self, + maximum: int, + name: typing.Optional[str] = None, + filename_pattern: str = '{name}.{number:02d}.lock', + directory: str = tempfile.gettempdir(), + timeout: typing.Optional[float] = DEFAULT_TIMEOUT, + check_interval: typing.Optional[float] = DEFAULT_CHECK_INTERVAL, + fail_when_locked: typing.Optional[bool] = True, + ): + if name is None: + name = 'bounded_semaphore.%d' % random.randint(0, 1000000) + super().__init__( + maximum, + name, + filename_pattern, + directory, + timeout, + check_interval, + fail_when_locked, + )