applied-ai-018 commited on
Commit
66f727a
·
verified ·
1 Parent(s): b80d94d

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER +1 -0
  2. llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst +28 -0
  3. llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA +93 -0
  4. llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/RECORD +14 -0
  5. llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL +6 -0
  6. llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt +1 -0
  7. llmeval-env/lib/python3.10/site-packages/dill/__diff.py +234 -0
  8. llmeval-env/lib/python3.10/site-packages/dill/__info__.py +291 -0
  9. llmeval-env/lib/python3.10/site-packages/dill/__init__.py +119 -0
  10. llmeval-env/lib/python3.10/site-packages/dill/_dill.py +2198 -0
  11. llmeval-env/lib/python3.10/site-packages/dill/_objects.py +537 -0
  12. llmeval-env/lib/python3.10/site-packages/dill/_shims.py +193 -0
  13. llmeval-env/lib/python3.10/site-packages/dill/detect.py +284 -0
  14. llmeval-env/lib/python3.10/site-packages/dill/logger.py +285 -0
  15. llmeval-env/lib/python3.10/site-packages/dill/objtypes.py +24 -0
  16. llmeval-env/lib/python3.10/site-packages/dill/pointers.py +122 -0
  17. llmeval-env/lib/python3.10/site-packages/dill/session.py +613 -0
  18. llmeval-env/lib/python3.10/site-packages/dill/settings.py +25 -0
  19. llmeval-env/lib/python3.10/site-packages/dill/source.py +1017 -0
  20. llmeval-env/lib/python3.10/site-packages/dill/tests/__init__.py +22 -0
  21. llmeval-env/lib/python3.10/site-packages/dill/tests/__main__.py +35 -0
  22. llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc +0 -0
  23. llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_fglobals.cpython-310.pyc +0 -0
  24. llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc +0 -0
  25. llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc +0 -0
  26. llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_session.cpython-310.pyc +0 -0
  27. llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_source.cpython-310.pyc +0 -0
  28. llmeval-env/lib/python3.10/site-packages/dill/tests/test_abc.py +169 -0
  29. llmeval-env/lib/python3.10/site-packages/dill/tests/test_dataclasses.py +35 -0
  30. llmeval-env/lib/python3.10/site-packages/dill/tests/test_dictviews.py +39 -0
  31. llmeval-env/lib/python3.10/site-packages/dill/tests/test_diff.py +107 -0
  32. llmeval-env/lib/python3.10/site-packages/dill/tests/test_extendpickle.py +53 -0
  33. llmeval-env/lib/python3.10/site-packages/dill/tests/test_functions.py +141 -0
  34. llmeval-env/lib/python3.10/site-packages/dill/tests/test_functors.py +39 -0
  35. llmeval-env/lib/python3.10/site-packages/dill/tests/test_logger.py +70 -0
  36. llmeval-env/lib/python3.10/site-packages/dill/tests/test_mixins.py +121 -0
  37. llmeval-env/lib/python3.10/site-packages/dill/tests/test_module.py +84 -0
  38. llmeval-env/lib/python3.10/site-packages/dill/tests/test_moduledict.py +54 -0
  39. llmeval-env/lib/python3.10/site-packages/dill/tests/test_nested.py +135 -0
  40. llmeval-env/lib/python3.10/site-packages/dill/tests/test_objects.py +63 -0
  41. llmeval-env/lib/python3.10/site-packages/dill/tests/test_properties.py +62 -0
  42. llmeval-env/lib/python3.10/site-packages/dill/tests/test_registered.py +64 -0
  43. llmeval-env/lib/python3.10/site-packages/dill/tests/test_restricted.py +27 -0
  44. llmeval-env/lib/python3.10/site-packages/dill/tests/test_selected.py +125 -0
  45. llmeval-env/lib/python3.10/site-packages/dill/tests/test_session.py +280 -0
  46. llmeval-env/lib/python3.10/site-packages/dill/tests/test_source.py +154 -0
  47. llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/WHEEL +8 -0
  48. llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/top_level.txt +1 -0
  49. llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  50. llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_fork_bomb.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright 2010 Pallets
2
+
3
+ Redistribution and use in source and binary forms, with or without
4
+ modification, are permitted provided that the following conditions are
5
+ met:
6
+
7
+ 1. Redistributions of source code must retain the above copyright
8
+ notice, this list of conditions and the following disclaimer.
9
+
10
+ 2. Redistributions in binary form must reproduce the above copyright
11
+ notice, this list of conditions and the following disclaimer in the
12
+ documentation and/or other materials provided with the distribution.
13
+
14
+ 3. Neither the name of the copyright holder nor the names of its
15
+ contributors may be used to endorse or promote products derived from
16
+ this software without specific prior written permission.
17
+
18
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
21
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22
+ HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
24
+ TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
25
+ PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
26
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
27
+ NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
28
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/METADATA ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: MarkupSafe
3
+ Version: 2.1.5
4
+ Summary: Safely add untrusted strings to HTML/XML markup.
5
+ Home-page: https://palletsprojects.com/p/markupsafe/
6
+ Maintainer: Pallets
7
+ Maintainer-email: [email protected]
8
+ License: BSD-3-Clause
9
+ Project-URL: Donate, https://palletsprojects.com/donate
10
+ Project-URL: Documentation, https://markupsafe.palletsprojects.com/
11
+ Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
12
+ Project-URL: Source Code, https://github.com/pallets/markupsafe/
13
+ Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
14
+ Project-URL: Chat, https://discord.gg/pallets
15
+ Classifier: Development Status :: 5 - Production/Stable
16
+ Classifier: Environment :: Web Environment
17
+ Classifier: Intended Audience :: Developers
18
+ Classifier: License :: OSI Approved :: BSD License
19
+ Classifier: Operating System :: OS Independent
20
+ Classifier: Programming Language :: Python
21
+ Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
22
+ Classifier: Topic :: Text Processing :: Markup :: HTML
23
+ Requires-Python: >=3.7
24
+ Description-Content-Type: text/x-rst
25
+ License-File: LICENSE.rst
26
+
27
+ MarkupSafe
28
+ ==========
29
+
30
+ MarkupSafe implements a text object that escapes characters so it is
31
+ safe to use in HTML and XML. Characters that have special meanings are
32
+ replaced so that they display as the actual characters. This mitigates
33
+ injection attacks, meaning untrusted user input can safely be displayed
34
+ on a page.
35
+
36
+
37
+ Installing
38
+ ----------
39
+
40
+ Install and update using `pip`_:
41
+
42
+ .. code-block:: text
43
+
44
+ pip install -U MarkupSafe
45
+
46
+ .. _pip: https://pip.pypa.io/en/stable/getting-started/
47
+
48
+
49
+ Examples
50
+ --------
51
+
52
+ .. code-block:: pycon
53
+
54
+ >>> from markupsafe import Markup, escape
55
+
56
+ >>> # escape replaces special characters and wraps in Markup
57
+ >>> escape("<script>alert(document.cookie);</script>")
58
+ Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
59
+
60
+ >>> # wrap in Markup to mark text "safe" and prevent escaping
61
+ >>> Markup("<strong>Hello</strong>")
62
+ Markup('<strong>hello</strong>')
63
+
64
+ >>> escape(Markup("<strong>Hello</strong>"))
65
+ Markup('<strong>hello</strong>')
66
+
67
+ >>> # Markup is a str subclass
68
+ >>> # methods and operators escape their arguments
69
+ >>> template = Markup("Hello <em>{name}</em>")
70
+ >>> template.format(name='"World"')
71
+ Markup('Hello <em>&#34;World&#34;</em>')
72
+
73
+
74
+ Donate
75
+ ------
76
+
77
+ The Pallets organization develops and supports MarkupSafe and other
78
+ popular packages. In order to grow the community of contributors and
79
+ users, and allow the maintainers to devote more time to the projects,
80
+ `please donate today`_.
81
+
82
+ .. _please donate today: https://palletsprojects.com/donate
83
+
84
+
85
+ Links
86
+ -----
87
+
88
+ - Documentation: https://markupsafe.palletsprojects.com/
89
+ - Changes: https://markupsafe.palletsprojects.com/changes/
90
+ - PyPI Releases: https://pypi.org/project/MarkupSafe/
91
+ - Source Code: https://github.com/pallets/markupsafe/
92
+ - Issue Tracker: https://github.com/pallets/markupsafe/issues/
93
+ - Chat: https://discord.gg/pallets
llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/RECORD ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
3
+ MarkupSafe-2.1.5.dist-info/METADATA,sha256=2dRDPam6OZLfpX0wg1JN5P3u9arqACxVSfdGmsJU7o8,3003
4
+ MarkupSafe-2.1.5.dist-info/RECORD,,
5
+ MarkupSafe-2.1.5.dist-info/WHEEL,sha256=1FEjxEYgybphwh9S0FO9IcZ0B-NIeM2ko8OzhFZeOeQ,152
6
+ MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
7
+ markupsafe/__init__.py,sha256=r7VOTjUq7EMQ4v3p4R1LoVOGJg6ysfYRncLr34laRBs,10958
8
+ markupsafe/__pycache__/__init__.cpython-310.pyc,,
9
+ markupsafe/__pycache__/_native.cpython-310.pyc,,
10
+ markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713
11
+ markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083
12
+ markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so,sha256=kPt-fhZ_RG7PUbDvwmyC26ZvRJ9DvUlF3hszBIB6_xs,44240
13
+ markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229
14
+ markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.42.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp310-cp310-manylinux_2_17_x86_64
5
+ Tag: cp310-cp310-manylinux2014_x86_64
6
+
llmeval-env/lib/python3.10/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ markupsafe
llmeval-env/lib/python3.10/site-packages/dill/__diff.py ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ """
10
+ Module to show if an object has changed since it was memorised
11
+ """
12
+
13
+ import builtins
14
+ import os
15
+ import sys
16
+ import types
17
+ try:
18
+ import numpy
19
+ HAS_NUMPY = True
20
+ except ImportError:
21
+ HAS_NUMPY = False
22
+
23
+ # pypy doesn't use reference counting
24
+ getrefcount = getattr(sys, 'getrefcount', lambda x:0)
25
+
26
+ # memo of objects indexed by id to a tuple (attributes, sequence items)
27
+ # attributes is a dict indexed by attribute name to attribute id
28
+ # sequence items is either a list of ids, of a dictionary of keys to ids
29
+ memo = {}
30
+ id_to_obj = {}
31
+ # types that cannot have changing attributes
32
+ builtins_types = set((str, list, dict, set, frozenset, int))
33
+ dont_memo = set(id(i) for i in (memo, sys.modules, sys.path_importer_cache,
34
+ os.environ, id_to_obj))
35
+
36
+
37
+ def get_attrs(obj):
38
+ """
39
+ Gets all the attributes of an object though its __dict__ or return None
40
+ """
41
+ if type(obj) in builtins_types \
42
+ or type(obj) is type and obj in builtins_types:
43
+ return
44
+ return getattr(obj, '__dict__', None)
45
+
46
+
47
+ def get_seq(obj, cache={str: False, frozenset: False, list: True, set: True,
48
+ dict: True, tuple: True, type: False,
49
+ types.ModuleType: False, types.FunctionType: False,
50
+ types.BuiltinFunctionType: False}):
51
+ """
52
+ Gets all the items in a sequence or return None
53
+ """
54
+ try:
55
+ o_type = obj.__class__
56
+ except AttributeError:
57
+ o_type = type(obj)
58
+ hsattr = hasattr
59
+ if o_type in cache:
60
+ if cache[o_type]:
61
+ if hsattr(obj, "copy"):
62
+ return obj.copy()
63
+ return obj
64
+ elif HAS_NUMPY and o_type in (numpy.ndarray, numpy.ma.core.MaskedConstant):
65
+ if obj.shape and obj.size:
66
+ return obj
67
+ else:
68
+ return []
69
+ elif hsattr(obj, "__contains__") and hsattr(obj, "__iter__") \
70
+ and hsattr(obj, "__len__") and hsattr(o_type, "__contains__") \
71
+ and hsattr(o_type, "__iter__") and hsattr(o_type, "__len__"):
72
+ cache[o_type] = True
73
+ if hsattr(obj, "copy"):
74
+ return obj.copy()
75
+ return obj
76
+ else:
77
+ cache[o_type] = False
78
+ return None
79
+
80
+
81
+ def memorise(obj, force=False):
82
+ """
83
+ Adds an object to the memo, and recursively adds all the objects
84
+ attributes, and if it is a container, its items. Use force=True to update
85
+ an object already in the memo. Updating is not recursively done.
86
+ """
87
+ obj_id = id(obj)
88
+ if obj_id in memo and not force or obj_id in dont_memo:
89
+ return
90
+ id_ = id
91
+ g = get_attrs(obj)
92
+ if g is None:
93
+ attrs_id = None
94
+ else:
95
+ attrs_id = dict((key,id_(value)) for key, value in g.items())
96
+
97
+ s = get_seq(obj)
98
+ if s is None:
99
+ seq_id = None
100
+ elif hasattr(s, "items"):
101
+ seq_id = dict((id_(key),id_(value)) for key, value in s.items())
102
+ elif not hasattr(s, "__len__"): #XXX: avoid TypeError from unexpected case
103
+ seq_id = None
104
+ else:
105
+ seq_id = [id_(i) for i in s]
106
+
107
+ memo[obj_id] = attrs_id, seq_id
108
+ id_to_obj[obj_id] = obj
109
+ mem = memorise
110
+ if g is not None:
111
+ [mem(value) for key, value in g.items()]
112
+
113
+ if s is not None:
114
+ if hasattr(s, "items"):
115
+ [(mem(key), mem(item))
116
+ for key, item in s.items()]
117
+ else:
118
+ if hasattr(s, '__len__'):
119
+ [mem(item) for item in s]
120
+ else: mem(s)
121
+
122
+
123
+ def release_gone():
124
+ itop, mp, src = id_to_obj.pop, memo.pop, getrefcount
125
+ [(itop(id_), mp(id_)) for id_, obj in list(id_to_obj.items())
126
+ if src(obj) < 4] #XXX: correct for pypy?
127
+
128
+
129
+ def whats_changed(obj, seen=None, simple=False, first=True):
130
+ """
131
+ Check an object against the memo. Returns a list in the form
132
+ (attribute changes, container changed). Attribute changes is a dict of
133
+ attribute name to attribute value. container changed is a boolean.
134
+ If simple is true, just returns a boolean. None for either item means
135
+ that it has not been checked yet
136
+ """
137
+ # Special cases
138
+ if first:
139
+ # ignore the _ variable, which only appears in interactive sessions
140
+ if "_" in builtins.__dict__:
141
+ del builtins._
142
+ if seen is None:
143
+ seen = {}
144
+
145
+ obj_id = id(obj)
146
+
147
+ if obj_id in seen:
148
+ if simple:
149
+ return any(seen[obj_id])
150
+ return seen[obj_id]
151
+
152
+ # Safety checks
153
+ if obj_id in dont_memo:
154
+ seen[obj_id] = [{}, False]
155
+ if simple:
156
+ return False
157
+ return seen[obj_id]
158
+ elif obj_id not in memo:
159
+ if simple:
160
+ return True
161
+ else:
162
+ raise RuntimeError("Object not memorised " + str(obj))
163
+
164
+ seen[obj_id] = ({}, False)
165
+
166
+ chngd = whats_changed
167
+ id_ = id
168
+
169
+ # compare attributes
170
+ attrs = get_attrs(obj)
171
+ if attrs is None:
172
+ changed = {}
173
+ else:
174
+ obj_attrs = memo[obj_id][0]
175
+ obj_get = obj_attrs.get
176
+ changed = dict((key,None) for key in obj_attrs if key not in attrs)
177
+ for key, o in attrs.items():
178
+ if id_(o) != obj_get(key, None) or chngd(o, seen, True, False):
179
+ changed[key] = o
180
+
181
+ # compare sequence
182
+ items = get_seq(obj)
183
+ seq_diff = False
184
+ if (items is not None) and (hasattr(items, '__len__')):
185
+ obj_seq = memo[obj_id][1]
186
+ if (len(items) != len(obj_seq)):
187
+ seq_diff = True
188
+ elif hasattr(obj, "items"): # dict type obj
189
+ obj_get = obj_seq.get
190
+ for key, item in items.items():
191
+ if id_(item) != obj_get(id_(key)) \
192
+ or chngd(key, seen, True, False) \
193
+ or chngd(item, seen, True, False):
194
+ seq_diff = True
195
+ break
196
+ else:
197
+ for i, j in zip(items, obj_seq): # list type obj
198
+ if id_(i) != j or chngd(i, seen, True, False):
199
+ seq_diff = True
200
+ break
201
+ seen[obj_id] = changed, seq_diff
202
+ if simple:
203
+ return changed or seq_diff
204
+ return changed, seq_diff
205
+
206
+
207
+ def has_changed(*args, **kwds):
208
+ kwds['simple'] = True # ignore simple if passed in
209
+ return whats_changed(*args, **kwds)
210
+
211
+ __import__ = __import__
212
+
213
+
214
+ def _imp(*args, **kwds):
215
+ """
216
+ Replaces the default __import__, to allow a module to be memorised
217
+ before the user can change it
218
+ """
219
+ before = set(sys.modules.keys())
220
+ mod = __import__(*args, **kwds)
221
+ after = set(sys.modules.keys()).difference(before)
222
+ for m in after:
223
+ memorise(sys.modules[m])
224
+ return mod
225
+
226
+ builtins.__import__ = _imp
227
+ if hasattr(builtins, "_"):
228
+ del builtins._
229
+
230
+ # memorise all already imported modules. This implies that this must be
231
+ # imported first for any changes to be recorded
232
+ for mod in list(sys.modules.values()):
233
+ memorise(mod)
234
+ release_gone()
llmeval-env/lib/python3.10/site-packages/dill/__info__.py ADDED
@@ -0,0 +1,291 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ '''
8
+ -----------------------------
9
+ dill: serialize all of Python
10
+ -----------------------------
11
+
12
+ About Dill
13
+ ==========
14
+
15
+ ``dill`` extends Python's ``pickle`` module for serializing and de-serializing
16
+ Python objects to the majority of the built-in Python types. Serialization
17
+ is the process of converting an object to a byte stream, and the inverse
18
+ of which is converting a byte stream back to a Python object hierarchy.
19
+
20
+ ``dill`` provides the user the same interface as the ``pickle`` module, and
21
+ also includes some additional features. In addition to pickling Python
22
+ objects, ``dill`` provides the ability to save the state of an interpreter
23
+ session in a single command. Hence, it would be feasible to save an
24
+ interpreter session, close the interpreter, ship the pickled file to
25
+ another computer, open a new interpreter, unpickle the session and
26
+ thus continue from the 'saved' state of the original interpreter
27
+ session.
28
+
29
+ ``dill`` can be used to store Python objects to a file, but the primary
30
+ usage is to send Python objects across the network as a byte stream.
31
+ ``dill`` is quite flexible, and allows arbitrary user defined classes
32
+ and functions to be serialized. Thus ``dill`` is not intended to be
33
+ secure against erroneously or maliciously constructed data. It is
34
+ left to the user to decide whether the data they unpickle is from
35
+ a trustworthy source.
36
+
37
+ ``dill`` is part of ``pathos``, a Python framework for heterogeneous computing.
38
+ ``dill`` is in active development, so any user feedback, bug reports, comments,
39
+ or suggestions are highly appreciated. A list of issues is located at
40
+ https://github.com/uqfoundation/dill/issues, with a legacy list maintained at
41
+ https://uqfoundation.github.io/project/pathos/query.
42
+
43
+
44
+ Major Features
45
+ ==============
46
+
47
+ ``dill`` can pickle the following standard types:
48
+
49
+ - none, type, bool, int, float, complex, bytes, str,
50
+ - tuple, list, dict, file, buffer, builtin,
51
+ - Python classes, namedtuples, dataclasses, metaclasses,
52
+ - instances of classes,
53
+ - set, frozenset, array, functions, exceptions
54
+
55
+ ``dill`` can also pickle more 'exotic' standard types:
56
+
57
+ - functions with yields, nested functions, lambdas,
58
+ - cell, method, unboundmethod, module, code, methodwrapper,
59
+ - methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor,
60
+ - dictproxy, slice, notimplemented, ellipsis, quit
61
+
62
+ ``dill`` cannot yet pickle these standard types:
63
+
64
+ - frame, generator, traceback
65
+
66
+ ``dill`` also provides the capability to:
67
+
68
+ - save and load Python interpreter sessions
69
+ - save and extract the source code from functions and classes
70
+ - interactively diagnose pickling errors
71
+
72
+
73
+ Current Release
74
+ ===============
75
+
76
+ The latest released version of ``dill`` is available from:
77
+
78
+ https://pypi.org/project/dill
79
+
80
+ ``dill`` is distributed under a 3-clause BSD license.
81
+
82
+
83
+ Development Version
84
+ ===================
85
+
86
+ You can get the latest development version with all the shiny new features at:
87
+
88
+ https://github.com/uqfoundation
89
+
90
+ If you have a new contribution, please submit a pull request.
91
+
92
+
93
+ Installation
94
+ ============
95
+
96
+ ``dill`` can be installed with ``pip``::
97
+
98
+ $ pip install dill
99
+
100
+ To optionally include the ``objgraph`` diagnostic tool in the install::
101
+
102
+ $ pip install dill[graph]
103
+
104
+ To optionally include the ``gprof2dot`` diagnostic tool in the install::
105
+
106
+ $ pip install dill[profile]
107
+
108
+ For windows users, to optionally install session history tools::
109
+
110
+ $ pip install dill[readline]
111
+
112
+
113
+ Requirements
114
+ ============
115
+
116
+ ``dill`` requires:
117
+
118
+ - ``python`` (or ``pypy``), **>=3.8**
119
+ - ``setuptools``, **>=42**
120
+
121
+ Optional requirements:
122
+
123
+ - ``objgraph``, **>=1.7.2**
124
+ - ``gprof2dot``, **>=2022.7.29**
125
+ - ``pyreadline``, **>=1.7.1** (on windows)
126
+
127
+
128
+ Basic Usage
129
+ ===========
130
+
131
+ ``dill`` is a drop-in replacement for ``pickle``. Existing code can be
132
+ updated to allow complete pickling using::
133
+
134
+ >>> import dill as pickle
135
+
136
+ or::
137
+
138
+ >>> from dill import dumps, loads
139
+
140
+ ``dumps`` converts the object to a unique byte string, and ``loads`` performs
141
+ the inverse operation::
142
+
143
+ >>> squared = lambda x: x**2
144
+ >>> loads(dumps(squared))(3)
145
+ 9
146
+
147
+ There are a number of options to control serialization which are provided
148
+ as keyword arguments to several ``dill`` functions:
149
+
150
+ * with *protocol*, the pickle protocol level can be set. This uses the
151
+ same value as the ``pickle`` module, *DEFAULT_PROTOCOL*.
152
+ * with *byref=True*, ``dill`` to behave a lot more like pickle with
153
+ certain objects (like modules) pickled by reference as opposed to
154
+ attempting to pickle the object itself.
155
+ * with *recurse=True*, objects referred to in the global dictionary are
156
+ recursively traced and pickled, instead of the default behavior of
157
+ attempting to store the entire global dictionary.
158
+ * with *fmode*, the contents of the file can be pickled along with the file
159
+ handle, which is useful if the object is being sent over the wire to a
160
+ remote system which does not have the original file on disk. Options are
161
+ *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content
162
+ and *FILE_FMODE* for content and handle.
163
+ * with *ignore=False*, objects reconstructed with types defined in the
164
+ top-level script environment use the existing type in the environment
165
+ rather than a possibly different reconstructed type.
166
+
167
+ The default serialization can also be set globally in *dill.settings*.
168
+ Thus, we can modify how ``dill`` handles references to the global dictionary
169
+ locally or globally::
170
+
171
+ >>> import dill.settings
172
+ >>> dumps(absolute) == dumps(absolute, recurse=True)
173
+ False
174
+ >>> dill.settings['recurse'] = True
175
+ >>> dumps(absolute) == dumps(absolute, recurse=True)
176
+ True
177
+
178
+ ``dill`` also includes source code inspection, as an alternate to pickling::
179
+
180
+ >>> import dill.source
181
+ >>> print(dill.source.getsource(squared))
182
+ squared = lambda x:x**2
183
+
184
+ To aid in debugging pickling issues, use *dill.detect* which provides
185
+ tools like pickle tracing::
186
+
187
+ >>> import dill.detect
188
+ >>> with dill.detect.trace():
189
+ >>> dumps(squared)
190
+ ┬ F1: <function <lambda> at 0x7fe074f8c280>
191
+ ├┬ F2: <function _create_function at 0x7fe074c49c10>
192
+ │└ # F2 [34 B]
193
+ ├┬ Co: <code object <lambda> at 0x7fe07501eb30, file "<stdin>", line 1>
194
+ │├┬ F2: <function _create_code at 0x7fe074c49ca0>
195
+ ││└ # F2 [19 B]
196
+ │└ # Co [87 B]
197
+ ├┬ D1: <dict object at 0x7fe0750d4680>
198
+ │└ # D1 [22 B]
199
+ ├┬ D2: <dict object at 0x7fe074c5a1c0>
200
+ │└ # D2 [2 B]
201
+ ├┬ D2: <dict object at 0x7fe074f903c0>
202
+ │├┬ D2: <dict object at 0x7fe074f8ebc0>
203
+ ││└ # D2 [2 B]
204
+ │└ # D2 [23 B]
205
+ └ # F1 [180 B]
206
+
207
+ With trace, we see how ``dill`` stored the lambda (``F1``) by first storing
208
+ ``_create_function``, the underlying code object (``Co``) and ``_create_code``
209
+ (which is used to handle code objects), then we handle the reference to
210
+ the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that
211
+ save the lambda object's state. A ``#`` marks when the object is actually stored.
212
+
213
+
214
+ More Information
215
+ ================
216
+
217
+ Probably the best way to get started is to look at the documentation at
218
+ http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that
219
+ demonstrate how ``dill`` can serialize different Python objects. You can
220
+ run the test suite with ``python -m dill.tests``. The contents of any
221
+ pickle file can be examined with ``undill``. As ``dill`` conforms to
222
+ the ``pickle`` interface, the examples and documentation found at
223
+ http://docs.python.org/library/pickle.html also apply to ``dill``
224
+ if one will ``import dill as pickle``. The source code is also generally
225
+ well documented, so further questions may be resolved by inspecting the
226
+ code itself. Please feel free to submit a ticket on github, or ask a
227
+ question on stackoverflow (**@Mike McKerns**).
228
+ If you would like to share how you use ``dill`` in your work, please send
229
+ an email (to **mmckerns at uqfoundation dot org**).
230
+
231
+
232
+ Citation
233
+ ========
234
+
235
+ If you use ``dill`` to do research that leads to publication, we ask that you
236
+ acknowledge use of ``dill`` by citing the following in your publication::
237
+
238
+ M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis,
239
+ "Building a framework for predictive science", Proceedings of
240
+ the 10th Python in Science Conference, 2011;
241
+ http://arxiv.org/pdf/1202.1056
242
+
243
+ Michael McKerns and Michael Aivazis,
244
+ "pathos: a framework for heterogeneous computing", 2010- ;
245
+ https://uqfoundation.github.io/project/pathos
246
+
247
+ Please see https://uqfoundation.github.io/project/pathos or
248
+ http://arxiv.org/pdf/1202.1056 for further information.
249
+
250
+ '''
251
+
252
+ __version__ = '0.3.8'
253
+ __author__ = 'Mike McKerns'
254
+
255
+ __license__ = '''
256
+ Copyright (c) 2004-2016 California Institute of Technology.
257
+ Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
258
+ All rights reserved.
259
+
260
+ This software is available subject to the conditions and terms laid
261
+ out below. By downloading and using this software you are agreeing
262
+ to the following conditions.
263
+
264
+ Redistribution and use in source and binary forms, with or without
265
+ modification, are permitted provided that the following conditions
266
+ are met:
267
+
268
+ - Redistributions of source code must retain the above copyright
269
+ notice, this list of conditions and the following disclaimer.
270
+
271
+ - Redistributions in binary form must reproduce the above copyright
272
+ notice, this list of conditions and the following disclaimer in the
273
+ documentation and/or other materials provided with the distribution.
274
+
275
+ - Neither the names of the copyright holders nor the names of any of
276
+ the contributors may be used to endorse or promote products derived
277
+ from this software without specific prior written permission.
278
+
279
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
280
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
281
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
282
+ PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
283
+ CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
284
+ EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
285
+ PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
286
+ OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
287
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
288
+ OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
289
+ ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
290
+
291
+ '''
llmeval-env/lib/python3.10/site-packages/dill/__init__.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ # author, version, license, and long description
10
+ try: # the package is installed
11
+ from .__info__ import __version__, __author__, __doc__, __license__
12
+ except: # pragma: no cover
13
+ import os
14
+ import sys
15
+ parent = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
16
+ sys.path.append(parent)
17
+ # get distribution meta info
18
+ from version import (__version__, __author__,
19
+ get_license_text, get_readme_as_rst)
20
+ __license__ = get_license_text(os.path.join(parent, 'LICENSE'))
21
+ __license__ = "\n%s" % __license__
22
+ __doc__ = get_readme_as_rst(os.path.join(parent, 'README.md'))
23
+ del os, sys, parent, get_license_text, get_readme_as_rst
24
+
25
+
26
+ from ._dill import (
27
+ dump, dumps, load, loads, copy,
28
+ Pickler, Unpickler, register, pickle, pickles, check,
29
+ DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, HANDLE_FMODE, CONTENTS_FMODE, FILE_FMODE,
30
+ PickleError, PickleWarning, PicklingError, PicklingWarning, UnpicklingError,
31
+ UnpicklingWarning,
32
+ )
33
+ from .session import (
34
+ dump_module, load_module, load_module_asdict,
35
+ dump_session, load_session # backward compatibility
36
+ )
37
+ from . import detect, logger, session, source, temp
38
+
39
+ # get global settings
40
+ from .settings import settings
41
+
42
+ # make sure "trace" is turned off
43
+ logger.trace(False)
44
+
45
+ objects = {}
46
+ # local import of dill._objects
47
+ #from . import _objects
48
+ #objects.update(_objects.succeeds)
49
+ #del _objects
50
+
51
+ # local import of dill.objtypes
52
+ from . import objtypes as types
53
+
54
+ def load_types(pickleable=True, unpickleable=True):
55
+ """load pickleable and/or unpickleable types to ``dill.types``
56
+
57
+ ``dill.types`` is meant to mimic the ``types`` module, providing a
58
+ registry of object types. By default, the module is empty (for import
59
+ speed purposes). Use the ``load_types`` function to load selected object
60
+ types to the ``dill.types`` module.
61
+
62
+ Args:
63
+ pickleable (bool, default=True): if True, load pickleable types.
64
+ unpickleable (bool, default=True): if True, load unpickleable types.
65
+
66
+ Returns:
67
+ None
68
+ """
69
+ from importlib import reload
70
+ # local import of dill.objects
71
+ from . import _objects
72
+ if pickleable:
73
+ objects.update(_objects.succeeds)
74
+ else:
75
+ [objects.pop(obj,None) for obj in _objects.succeeds]
76
+ if unpickleable:
77
+ objects.update(_objects.failures)
78
+ else:
79
+ [objects.pop(obj,None) for obj in _objects.failures]
80
+ objects.update(_objects.registered)
81
+ del _objects
82
+ # reset contents of types to 'empty'
83
+ [types.__dict__.pop(obj) for obj in list(types.__dict__.keys()) \
84
+ if obj.find('Type') != -1]
85
+ # add corresponding types from objects to types
86
+ reload(types)
87
+
88
+ def extend(use_dill=True):
89
+ '''add (or remove) dill types to/from the pickle registry
90
+
91
+ by default, ``dill`` populates its types to ``pickle.Pickler.dispatch``.
92
+ Thus, all ``dill`` types are available upon calling ``'import pickle'``.
93
+ To drop all ``dill`` types from the ``pickle`` dispatch, *use_dill=False*.
94
+
95
+ Args:
96
+ use_dill (bool, default=True): if True, extend the dispatch table.
97
+
98
+ Returns:
99
+ None
100
+ '''
101
+ from ._dill import _revert_extension, _extend
102
+ if use_dill: _extend()
103
+ else: _revert_extension()
104
+ return
105
+
106
+ extend()
107
+
108
+
109
+ def license():
110
+ """print license"""
111
+ print (__license__)
112
+ return
113
+
114
+ def citation():
115
+ """print citation"""
116
+ print (__doc__[-491:-118])
117
+ return
118
+
119
+ # end of file
llmeval-env/lib/python3.10/site-packages/dill/_dill.py ADDED
@@ -0,0 +1,2198 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2015 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ dill: a utility for serialization of python objects
10
+
11
+ The primary functions in `dill` are :func:`dump` and
12
+ :func:`dumps` for serialization ("pickling") to a
13
+ file or to a string, respectively, and :func:`load`
14
+ and :func:`loads` for deserialization ("unpickling"),
15
+ similarly, from a file or from a string. Other notable
16
+ functions are :func:`~dill.dump_module` and
17
+ :func:`~dill.load_module`, which are used to save and
18
+ restore module objects, including an intepreter session.
19
+
20
+ Based on code written by Oren Tirosh and Armin Ronacher.
21
+ Extended to a (near) full set of the builtin types (in types module),
22
+ and coded to the pickle interface, by <[email protected]>.
23
+ Initial port to python3 by Jonathan Dobson, continued by mmckerns.
24
+ Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns.
25
+ Tested against CH16+ Std. Lib. ... TBD.
26
+ """
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = [
31
+ 'dump','dumps','load','loads','copy',
32
+ 'Pickler','Unpickler','register','pickle','pickles','check',
33
+ 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE',
34
+ 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError',
35
+ 'UnpicklingWarning',
36
+ ]
37
+
38
+ __module__ = 'dill'
39
+
40
+ import warnings
41
+ from .logger import adapter as logger
42
+ from .logger import trace as _trace
43
+ log = logger # backward compatibility (see issue #582)
44
+
45
+ import os
46
+ import sys
47
+ diff = None
48
+ _use_diff = False
49
+ OLD38 = (sys.hexversion < 0x3080000)
50
+ OLD39 = (sys.hexversion < 0x3090000)
51
+ OLD310 = (sys.hexversion < 0x30a0000)
52
+ OLD312a7 = (sys.hexversion < 0x30c00a7)
53
+ #XXX: get types from .objtypes ?
54
+ import builtins as __builtin__
55
+ from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler
56
+ from pickle import GLOBAL, POP
57
+ from _thread import LockType
58
+ from _thread import RLock as RLockType
59
+ #from io import IOBase
60
+ from types import CodeType, FunctionType, MethodType, GeneratorType, \
61
+ TracebackType, FrameType, ModuleType, BuiltinMethodType
62
+ BufferType = memoryview #XXX: unregistered
63
+ ClassType = type # no 'old-style' classes
64
+ EllipsisType = type(Ellipsis)
65
+ #FileType = IOBase
66
+ NotImplementedType = type(NotImplemented)
67
+ SliceType = slice
68
+ TypeType = type # 'new-style' classes #XXX: unregistered
69
+ XRangeType = range
70
+ from types import MappingProxyType as DictProxyType, new_class
71
+ from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError
72
+ import __main__ as _main_module
73
+ import marshal
74
+ import gc
75
+ # import zlib
76
+ import abc
77
+ import dataclasses
78
+ from weakref import ReferenceType, ProxyType, CallableProxyType
79
+ from collections import OrderedDict
80
+ from enum import Enum, EnumMeta
81
+ from functools import partial
82
+ from operator import itemgetter, attrgetter
83
+ GENERATOR_FAIL = False
84
+ import importlib.machinery
85
+ EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
86
+ try:
87
+ import ctypes
88
+ HAS_CTYPES = True
89
+ # if using `pypy`, pythonapi is not found
90
+ IS_PYPY = not hasattr(ctypes, 'pythonapi')
91
+ except ImportError:
92
+ HAS_CTYPES = False
93
+ IS_PYPY = False
94
+ NumpyUfuncType = None
95
+ NumpyDType = None
96
+ NumpyArrayType = None
97
+ try:
98
+ if not importlib.machinery.PathFinder().find_spec('numpy'):
99
+ raise ImportError("No module named 'numpy'")
100
+ NumpyUfuncType = True
101
+ NumpyDType = True
102
+ NumpyArrayType = True
103
+ except ImportError:
104
+ pass
105
+ def __hook__():
106
+ global NumpyArrayType, NumpyDType, NumpyUfuncType
107
+ from numpy import ufunc as NumpyUfuncType
108
+ from numpy import ndarray as NumpyArrayType
109
+ from numpy import dtype as NumpyDType
110
+ return True
111
+ if NumpyArrayType: # then has numpy
112
+ def ndarraysubclassinstance(obj_type):
113
+ if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__):
114
+ return False
115
+ # anything below here is a numpy array (or subclass) instance
116
+ __hook__() # import numpy (so the following works!!!)
117
+ # verify that __reduce__ has not been overridden
118
+ if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \
119
+ or obj_type.__reduce__ is not NumpyArrayType.__reduce__:
120
+ return False
121
+ return True
122
+ def numpyufunc(obj_type):
123
+ return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__)
124
+ def numpydtype(obj_type):
125
+ if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__):
126
+ return False
127
+ # anything below here is a numpy dtype
128
+ __hook__() # import numpy (so the following works!!!)
129
+ return obj_type is type(NumpyDType) # handles subclasses
130
+ else:
131
+ def ndarraysubclassinstance(obj): return False
132
+ def numpyufunc(obj): return False
133
+ def numpydtype(obj): return False
134
+
135
+ from types import GetSetDescriptorType, ClassMethodDescriptorType, \
136
+ WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \
137
+ MethodWrapperType #XXX: unused
138
+
139
+ # make sure to add these 'hand-built' types to _typemap
140
+ CellType = type((lambda x: lambda y: x)(0).__closure__[0])
141
+ PartialType = type(partial(int, base=2))
142
+ SuperType = type(super(Exception, TypeError()))
143
+ ItemGetterType = type(itemgetter(0))
144
+ AttrGetterType = type(attrgetter('__repr__'))
145
+
146
+ try:
147
+ from functools import _lru_cache_wrapper as LRUCacheType
148
+ except ImportError:
149
+ LRUCacheType = None
150
+
151
+ if not isinstance(LRUCacheType, type):
152
+ LRUCacheType = None
153
+
154
+ def get_file_type(*args, **kwargs):
155
+ open = kwargs.pop("open", __builtin__.open)
156
+ f = open(os.devnull, *args, **kwargs)
157
+ t = type(f)
158
+ f.close()
159
+ return t
160
+
161
+ IS_PYODIDE = sys.platform == 'emscripten'
162
+
163
+ FileType = get_file_type('rb', buffering=0)
164
+ TextWrapperType = get_file_type('r', buffering=-1)
165
+ BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1)
166
+ BufferedReaderType = get_file_type('rb', buffering=-1)
167
+ BufferedWriterType = get_file_type('wb', buffering=-1)
168
+ try:
169
+ from _pyio import open as _open
170
+ PyTextWrapperType = get_file_type('r', buffering=-1, open=_open)
171
+ PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open)
172
+ PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open)
173
+ PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open)
174
+ except ImportError:
175
+ PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None
176
+ from io import BytesIO as StringIO
177
+ InputType = OutputType = None
178
+ from socket import socket as SocketType
179
+ #FIXME: additionally calls ForkingPickler.register several times
180
+ from multiprocessing.reduction import _reduce_socket as reduce_socket
181
+ try: #pragma: no cover
182
+ IS_IPYTHON = __IPYTHON__ # is True
183
+ ExitType = None # IPython.core.autocall.ExitAutocall
184
+ IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython')
185
+ except NameError:
186
+ IS_IPYTHON = False
187
+ try: ExitType = type(exit) # apparently 'exit' can be removed
188
+ except NameError: ExitType = None
189
+ IPYTHON_SINGLETONS = ()
190
+
191
+ import inspect
192
+ import typing
193
+
194
+
195
+ ### Shims for different versions of Python and dill
196
+ class Sentinel(object):
197
+ """
198
+ Create a unique sentinel object that is pickled as a constant.
199
+ """
200
+ def __init__(self, name, module_name=None):
201
+ self.name = name
202
+ if module_name is None:
203
+ # Use the calling frame's module
204
+ self.__module__ = inspect.currentframe().f_back.f_globals['__name__']
205
+ else:
206
+ self.__module__ = module_name # pragma: no cover
207
+ def __repr__(self):
208
+ return self.__module__ + '.' + self.name # pragma: no cover
209
+ def __copy__(self):
210
+ return self # pragma: no cover
211
+ def __deepcopy__(self, memo):
212
+ return self # pragma: no cover
213
+ def __reduce__(self):
214
+ return self.name
215
+ def __reduce_ex__(self, protocol):
216
+ return self.name
217
+
218
+ from . import _shims
219
+ from ._shims import Reduce, Getattr
220
+
221
+ ### File modes
222
+ #: Pickles the file handle, preserving mode. The position of the unpickled
223
+ #: object is as for a new file handle.
224
+ HANDLE_FMODE = 0
225
+ #: Pickles the file contents, creating a new file if on load the file does
226
+ #: not exist. The position = min(pickled position, EOF) and mode is chosen
227
+ #: as such that "best" preserves behavior of the original file.
228
+ CONTENTS_FMODE = 1
229
+ #: Pickles the entire file (handle and contents), preserving mode and position.
230
+ FILE_FMODE = 2
231
+
232
+ ### Shorthands (modified from python2.5/lib/pickle.py)
233
+ def copy(obj, *args, **kwds):
234
+ """
235
+ Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`).
236
+
237
+ See :func:`dumps` and :func:`loads` for keyword arguments.
238
+ """
239
+ ignore = kwds.pop('ignore', Unpickler.settings['ignore'])
240
+ return loads(dumps(obj, *args, **kwds), ignore=ignore)
241
+
242
+ def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
243
+ """
244
+ Pickle an object to a file.
245
+
246
+ See :func:`dumps` for keyword arguments.
247
+ """
248
+ from .settings import settings
249
+ protocol = settings['protocol'] if protocol is None else int(protocol)
250
+ _kwds = kwds.copy()
251
+ _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse))
252
+ Pickler(file, protocol, **_kwds).dump(obj)
253
+ return
254
+
255
+ def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
256
+ """
257
+ Pickle an object to a string.
258
+
259
+ *protocol* is the pickler protocol, as defined for Python *pickle*.
260
+
261
+ If *byref=True*, then dill behaves a lot more like pickle as certain
262
+ objects (like modules) are pickled by reference as opposed to attempting
263
+ to pickle the object itself.
264
+
265
+ If *recurse=True*, then objects referred to in the global dictionary
266
+ are recursively traced and pickled, instead of the default behavior
267
+ of attempting to store the entire global dictionary. This is needed for
268
+ functions defined via *exec()*.
269
+
270
+ *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`,
271
+ or :const:`FILE_FMODE`) indicates how file handles will be pickled.
272
+ For example, when pickling a data file handle for transfer to a remote
273
+ compute service, *FILE_FMODE* will include the file contents in the
274
+ pickle and cursor position so that a remote method can operate
275
+ transparently on an object with an open file handle.
276
+
277
+ Default values for keyword arguments can be set in :mod:`dill.settings`.
278
+ """
279
+ file = StringIO()
280
+ dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio)
281
+ return file.getvalue()
282
+
283
+ def load(file, ignore=None, **kwds):
284
+ """
285
+ Unpickle an object from a file.
286
+
287
+ See :func:`loads` for keyword arguments.
288
+ """
289
+ return Unpickler(file, ignore=ignore, **kwds).load()
290
+
291
+ def loads(str, ignore=None, **kwds):
292
+ """
293
+ Unpickle an object from a string.
294
+
295
+ If *ignore=False* then objects whose class is defined in the module
296
+ *__main__* are updated to reference the existing class in *__main__*,
297
+ otherwise they are left to refer to the reconstructed type, which may
298
+ be different.
299
+
300
+ Default values for keyword arguments can be set in :mod:`dill.settings`.
301
+ """
302
+ file = StringIO(str)
303
+ return load(file, ignore, **kwds)
304
+
305
+ # def dumpzs(obj, protocol=None):
306
+ # """pickle an object to a compressed string"""
307
+ # return zlib.compress(dumps(obj, protocol))
308
+
309
+ # def loadzs(str):
310
+ # """unpickle an object from a compressed string"""
311
+ # return loads(zlib.decompress(str))
312
+
313
+ ### End: Shorthands ###
314
+
315
+ class MetaCatchingDict(dict):
316
+ def get(self, key, default=None):
317
+ try:
318
+ return self[key]
319
+ except KeyError:
320
+ return default
321
+
322
+ def __missing__(self, key):
323
+ if issubclass(key, type):
324
+ return save_type
325
+ else:
326
+ raise KeyError()
327
+
328
+ class PickleWarning(Warning, PickleError):
329
+ pass
330
+
331
+ class PicklingWarning(PickleWarning, PicklingError):
332
+ pass
333
+
334
+ class UnpicklingWarning(PickleWarning, UnpicklingError):
335
+ pass
336
+
337
+ ### Extend the Picklers
338
+ class Pickler(StockPickler):
339
+ """python's Pickler extended to interpreter sessions"""
340
+ dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \
341
+ = MetaCatchingDict(StockPickler.dispatch.copy())
342
+ """The dispatch table, a dictionary of serializing functions used
343
+ by Pickler to save objects of specific types. Use :func:`pickle`
344
+ or :func:`register` to associate types to custom functions.
345
+
346
+ :meta hide-value:
347
+ """
348
+ _session = False
349
+ from .settings import settings
350
+
351
+ def __init__(self, file, *args, **kwds):
352
+ settings = Pickler.settings
353
+ _byref = kwds.pop('byref', None)
354
+ #_strictio = kwds.pop('strictio', None)
355
+ _fmode = kwds.pop('fmode', None)
356
+ _recurse = kwds.pop('recurse', None)
357
+ StockPickler.__init__(self, file, *args, **kwds)
358
+ self._main = _main_module
359
+ self._diff_cache = {}
360
+ self._byref = settings['byref'] if _byref is None else _byref
361
+ self._strictio = False #_strictio
362
+ self._fmode = settings['fmode'] if _fmode is None else _fmode
363
+ self._recurse = settings['recurse'] if _recurse is None else _recurse
364
+ self._postproc = OrderedDict()
365
+ self._file = file
366
+
367
+ def save(self, obj, save_persistent_id=True):
368
+ # numpy hack
369
+ obj_type = type(obj)
370
+ if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch):
371
+ # register if the object is a numpy ufunc
372
+ # thanks to Paul Kienzle for pointing out ufuncs didn't pickle
373
+ if numpyufunc(obj_type):
374
+ @register(obj_type)
375
+ def save_numpy_ufunc(pickler, obj):
376
+ logger.trace(pickler, "Nu: %s", obj)
377
+ name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
378
+ StockPickler.save_global(pickler, obj, name=name)
379
+ logger.trace(pickler, "# Nu")
380
+ return
381
+ # NOTE: the above 'save' performs like:
382
+ # import copy_reg
383
+ # def udump(f): return f.__name__
384
+ # def uload(name): return getattr(numpy, name)
385
+ # copy_reg.pickle(NumpyUfuncType, udump, uload)
386
+ # register if the object is a numpy dtype
387
+ if numpydtype(obj_type):
388
+ @register(obj_type)
389
+ def save_numpy_dtype(pickler, obj):
390
+ logger.trace(pickler, "Dt: %s", obj)
391
+ pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj)
392
+ logger.trace(pickler, "# Dt")
393
+ return
394
+ # NOTE: the above 'save' performs like:
395
+ # import copy_reg
396
+ # def uload(name): return type(NumpyDType(name))
397
+ # def udump(f): return uload, (f.type,)
398
+ # copy_reg.pickle(NumpyDTypeType, udump, uload)
399
+ # register if the object is a subclassed numpy array instance
400
+ if ndarraysubclassinstance(obj_type):
401
+ @register(obj_type)
402
+ def save_numpy_array(pickler, obj):
403
+ logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype)
404
+ npdict = getattr(obj, '__dict__', None)
405
+ f, args, state = obj.__reduce__()
406
+ pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj)
407
+ logger.trace(pickler, "# Nu")
408
+ return
409
+ # end numpy hack
410
+
411
+ if GENERATOR_FAIL and obj_type is GeneratorType:
412
+ msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType
413
+ raise PicklingError(msg)
414
+ StockPickler.save(self, obj, save_persistent_id)
415
+
416
+ save.__doc__ = StockPickler.save.__doc__
417
+
418
+ def dump(self, obj): #NOTE: if settings change, need to update attributes
419
+ logger.trace_setup(self)
420
+ StockPickler.dump(self, obj)
421
+ dump.__doc__ = StockPickler.dump.__doc__
422
+
423
+ class Unpickler(StockUnpickler):
424
+ """python's Unpickler extended to interpreter sessions and more types"""
425
+ from .settings import settings
426
+ _session = False
427
+
428
+ def find_class(self, module, name):
429
+ if (module, name) == ('__builtin__', '__main__'):
430
+ return self._main.__dict__ #XXX: above set w/save_module_dict
431
+ elif (module, name) == ('__builtin__', 'NoneType'):
432
+ return type(None) #XXX: special case: NoneType missing
433
+ if module == 'dill.dill': module = 'dill._dill'
434
+ return StockUnpickler.find_class(self, module, name)
435
+
436
+ def __init__(self, *args, **kwds):
437
+ settings = Pickler.settings
438
+ _ignore = kwds.pop('ignore', None)
439
+ StockUnpickler.__init__(self, *args, **kwds)
440
+ self._main = _main_module
441
+ self._ignore = settings['ignore'] if _ignore is None else _ignore
442
+
443
+ def load(self): #NOTE: if settings change, need to update attributes
444
+ obj = StockUnpickler.load(self)
445
+ if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
446
+ if not self._ignore:
447
+ # point obj class to main
448
+ try: obj.__class__ = getattr(self._main, type(obj).__name__)
449
+ except (AttributeError,TypeError): pass # defined in a file
450
+ #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ?
451
+ return obj
452
+ load.__doc__ = StockUnpickler.load.__doc__
453
+ pass
454
+
455
+ '''
456
+ def dispatch_table():
457
+ """get the dispatch table of registered types"""
458
+ return Pickler.dispatch
459
+ '''
460
+
461
+ pickle_dispatch_copy = StockPickler.dispatch.copy()
462
+
463
+ def pickle(t, func):
464
+ """expose :attr:`~Pickler.dispatch` table for user-created extensions"""
465
+ Pickler.dispatch[t] = func
466
+ return
467
+
468
+ def register(t):
469
+ """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table"""
470
+ def proxy(func):
471
+ Pickler.dispatch[t] = func
472
+ return func
473
+ return proxy
474
+
475
+ def _revert_extension():
476
+ """drop dill-registered types from pickle's dispatch table"""
477
+ for type, func in list(StockPickler.dispatch.items()):
478
+ if func.__module__ == __name__:
479
+ del StockPickler.dispatch[type]
480
+ if type in pickle_dispatch_copy:
481
+ StockPickler.dispatch[type] = pickle_dispatch_copy[type]
482
+
483
+ def use_diff(on=True):
484
+ """
485
+ Reduces size of pickles by only including object which have changed.
486
+
487
+ Decreases pickle size but increases CPU time needed.
488
+ Also helps avoid some unpickleable objects.
489
+ MUST be called at start of script, otherwise changes will not be recorded.
490
+ """
491
+ global _use_diff, diff
492
+ _use_diff = on
493
+ if _use_diff and diff is None:
494
+ try:
495
+ from . import diff as d
496
+ except ImportError:
497
+ import diff as d
498
+ diff = d
499
+
500
+ def _create_typemap():
501
+ import types
502
+ d = dict(list(__builtin__.__dict__.items()) + \
503
+ list(types.__dict__.items())).items()
504
+ for key, value in d:
505
+ if getattr(value, '__module__', None) == 'builtins' \
506
+ and type(value) is type:
507
+ yield key, value
508
+ return
509
+ _reverse_typemap = dict(_create_typemap())
510
+ _reverse_typemap.update({
511
+ 'PartialType': PartialType,
512
+ 'SuperType': SuperType,
513
+ 'ItemGetterType': ItemGetterType,
514
+ 'AttrGetterType': AttrGetterType,
515
+ })
516
+ if sys.hexversion < 0x30800a2:
517
+ _reverse_typemap.update({
518
+ 'CellType': CellType,
519
+ })
520
+
521
+ # "Incidental" implementation specific types. Unpickling these types in another
522
+ # implementation of Python (PyPy -> CPython) is not guaranteed to work
523
+
524
+ # This dictionary should contain all types that appear in Python implementations
525
+ # but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types
526
+ x=OrderedDict()
527
+ _incedental_reverse_typemap = {
528
+ 'FileType': FileType,
529
+ 'BufferedRandomType': BufferedRandomType,
530
+ 'BufferedReaderType': BufferedReaderType,
531
+ 'BufferedWriterType': BufferedWriterType,
532
+ 'TextWrapperType': TextWrapperType,
533
+ 'PyBufferedRandomType': PyBufferedRandomType,
534
+ 'PyBufferedReaderType': PyBufferedReaderType,
535
+ 'PyBufferedWriterType': PyBufferedWriterType,
536
+ 'PyTextWrapperType': PyTextWrapperType,
537
+ }
538
+
539
+ _incedental_reverse_typemap.update({
540
+ "DictKeysType": type({}.keys()),
541
+ "DictValuesType": type({}.values()),
542
+ "DictItemsType": type({}.items()),
543
+
544
+ "OdictKeysType": type(x.keys()),
545
+ "OdictValuesType": type(x.values()),
546
+ "OdictItemsType": type(x.items()),
547
+ })
548
+
549
+ if ExitType:
550
+ _incedental_reverse_typemap['ExitType'] = ExitType
551
+ if InputType:
552
+ _incedental_reverse_typemap['InputType'] = InputType
553
+ _incedental_reverse_typemap['OutputType'] = OutputType
554
+
555
+ '''
556
+ try:
557
+ import symtable
558
+ _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table)
559
+ except: #FIXME: fails to pickle
560
+ pass
561
+
562
+ if sys.hexversion >= 0x30a00a0:
563
+ _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines())
564
+ '''
565
+
566
+ if sys.hexversion >= 0x30b00b0:
567
+ from types import GenericAlias
568
+ _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,))))
569
+ '''
570
+ _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions())
571
+ '''
572
+
573
+ try:
574
+ import winreg
575
+ _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType
576
+ except ImportError:
577
+ pass
578
+
579
+ _reverse_typemap.update(_incedental_reverse_typemap)
580
+ _incedental_types = set(_incedental_reverse_typemap.values())
581
+
582
+ del x
583
+
584
+ _typemap = dict((v, k) for k, v in _reverse_typemap.items())
585
+
586
+ def _unmarshal(string):
587
+ return marshal.loads(string)
588
+
589
+ def _load_type(name):
590
+ return _reverse_typemap[name]
591
+
592
+ def _create_type(typeobj, *args):
593
+ return typeobj(*args)
594
+
595
+ def _create_function(fcode, fglobals, fname=None, fdefaults=None,
596
+ fclosure=None, fdict=None, fkwdefaults=None):
597
+ # same as FunctionType, but enable passing __dict__ to new function,
598
+ # __dict__ is the storehouse for attributes added after function creation
599
+ func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure)
600
+ if fdict is not None:
601
+ func.__dict__.update(fdict) #XXX: better copy? option to copy?
602
+ if fkwdefaults is not None:
603
+ func.__kwdefaults__ = fkwdefaults
604
+ # 'recurse' only stores referenced modules/objects in fglobals,
605
+ # thus we need to make sure that we have __builtins__ as well
606
+ if "__builtins__" not in func.__globals__:
607
+ func.__globals__["__builtins__"] = globals()["__builtins__"]
608
+ # assert id(fglobals) == id(func.__globals__)
609
+ return func
610
+
611
+ class match:
612
+ """
613
+ Make avaialable a limited structural pattern matching-like syntax for Python < 3.10
614
+
615
+ Patterns can be only tuples (without types) currently.
616
+ Inspired by the package pattern-matching-PEP634.
617
+
618
+ Usage:
619
+ >>> with match(args) as m:
620
+ >>> if m.case(('x', 'y')):
621
+ >>> # use m.x and m.y
622
+ >>> elif m.case(('x', 'y', 'z')):
623
+ >>> # use m.x, m.y and m.z
624
+
625
+ Equivalent native code for Python >= 3.10:
626
+ >>> match args:
627
+ >>> case (x, y):
628
+ >>> # use x and y
629
+ >>> case (x, y, z):
630
+ >>> # use x, y and z
631
+ """
632
+ def __init__(self, value):
633
+ self.value = value
634
+ self._fields = None
635
+ def __enter__(self):
636
+ return self
637
+ def __exit__(self, *exc_info):
638
+ return False
639
+ def case(self, args): # *args, **kwargs):
640
+ """just handles tuple patterns"""
641
+ if len(self.value) != len(args): # + len(kwargs):
642
+ return False
643
+ #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())):
644
+ # return False
645
+ self.args = args # (*args, *kwargs)
646
+ return True
647
+ @property
648
+ def fields(self):
649
+ # Only bind names to values if necessary.
650
+ if self._fields is None:
651
+ self._fields = dict(zip(self.args, self.value))
652
+ return self._fields
653
+ def __getattr__(self, item):
654
+ return self.fields[item]
655
+
656
+ ALL_CODE_PARAMS = [
657
+ # Version New attribute CodeType parameters
658
+ ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'),
659
+ ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'),
660
+ ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'),
661
+ ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
662
+ ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
663
+ ]
664
+ for version, new_attr, params in ALL_CODE_PARAMS:
665
+ if hasattr(CodeType, new_attr):
666
+ CODE_VERSION = version
667
+ CODE_PARAMS = params.split()
668
+ break
669
+ ENCODE_PARAMS = set(CODE_PARAMS).intersection(
670
+ ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable'])
671
+
672
+ def _create_code(*args):
673
+ if not isinstance(args[0], int): # co_lnotab stored from >= 3.10
674
+ LNOTAB, *args = args
675
+ else: # from < 3.10 (or pre-LNOTAB storage)
676
+ LNOTAB = b''
677
+
678
+ with match(args) as m:
679
+ # Python 3.11/3.12a (18 members)
680
+ if m.case((
681
+ 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
682
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
683
+ 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
684
+ )):
685
+ if CODE_VERSION == (3,11):
686
+ return CodeType(
687
+ *args[:6],
688
+ args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
689
+ *args[7:14],
690
+ args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
691
+ args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable
692
+ args[16],
693
+ args[17],
694
+ )
695
+ fields = m.fields
696
+ # Python 3.10 or 3.8/3.9 (16 members)
697
+ elif m.case((
698
+ 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
699
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13]
700
+ 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:]
701
+ )):
702
+ if CODE_VERSION == (3,10) or CODE_VERSION == (3,8):
703
+ return CodeType(
704
+ *args[:6],
705
+ args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
706
+ *args[7:13],
707
+ args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable
708
+ args[14],
709
+ args[15],
710
+ )
711
+ fields = m.fields
712
+ if CODE_VERSION >= (3,10):
713
+ fields['linetable'] = m.LNOTAB_OR_LINETABLE
714
+ else:
715
+ fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE
716
+ # Python 3.7 (15 args)
717
+ elif m.case((
718
+ 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5]
719
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12]
720
+ 'lnotab', 'freevars', 'cellvars' # args[12:]
721
+ )):
722
+ if CODE_VERSION == (3,7):
723
+ return CodeType(
724
+ *args[:5],
725
+ args[5].encode() if hasattr(args[5], 'encode') else args[5], # code
726
+ *args[6:12],
727
+ args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab
728
+ args[13],
729
+ args[14],
730
+ )
731
+ fields = m.fields
732
+ # Python 3.11a (20 members)
733
+ elif m.case((
734
+ 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
735
+ 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
736
+ 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
737
+ )):
738
+ if CODE_VERSION == (3,11,'a'):
739
+ return CodeType(
740
+ *args[:6],
741
+ args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
742
+ *args[7:14],
743
+ *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable
744
+ args[18],
745
+ args[19],
746
+ )
747
+ fields = m.fields
748
+ else:
749
+ raise UnpicklingError("pattern match for code object failed")
750
+
751
+ # The args format doesn't match this version.
752
+ fields.setdefault('posonlyargcount', 0) # from python <= 3.7
753
+ fields.setdefault('lnotab', LNOTAB) # from python >= 3.10
754
+ fields.setdefault('linetable', b'') # from python <= 3.9
755
+ fields.setdefault('qualname', fields['name']) # from python <= 3.10
756
+ fields.setdefault('exceptiontable', b'') # from python <= 3.10
757
+ fields.setdefault('endlinetable', None) # from python != 3.11a
758
+ fields.setdefault('columntable', None) # from python != 3.11a
759
+
760
+ args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k]
761
+ for k in CODE_PARAMS)
762
+ return CodeType(*args)
763
+
764
+ def _create_ftype(ftypeobj, func, args, kwds):
765
+ if kwds is None:
766
+ kwds = {}
767
+ if args is None:
768
+ args = ()
769
+ return ftypeobj(func, *args, **kwds)
770
+
771
+ def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245
772
+ if not argz:
773
+ return typing.Tuple[()].copy_with(())
774
+ if argz == ((),):
775
+ return typing.Tuple[()]
776
+ return typing.Tuple[argz]
777
+
778
+ def _create_lock(locked, *args): #XXX: ignores 'blocking'
779
+ from threading import Lock
780
+ lock = Lock()
781
+ if locked:
782
+ if not lock.acquire(False):
783
+ raise UnpicklingError("Cannot acquire lock")
784
+ return lock
785
+
786
+ def _create_rlock(count, owner, *args): #XXX: ignores 'blocking'
787
+ lock = RLockType()
788
+ if owner is not None:
789
+ lock._acquire_restore((count, owner))
790
+ if owner and not lock._is_owned():
791
+ raise UnpicklingError("Cannot acquire lock")
792
+ return lock
793
+
794
+ # thanks to matsjoyce for adding all the different file modes
795
+ def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0
796
+ # only pickles the handle, not the file contents... good? or StringIO(data)?
797
+ # (for file contents see: http://effbot.org/librarybook/copy-reg.htm)
798
+ # NOTE: handle special cases first (are there more special cases?)
799
+ names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__,
800
+ '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ?
801
+ if name in list(names.keys()):
802
+ f = names[name] #XXX: safer "f=sys.stdin"
803
+ elif name == '<tmpfile>':
804
+ f = os.tmpfile()
805
+ elif name == '<fdopen>':
806
+ import tempfile
807
+ f = tempfile.TemporaryFile(mode)
808
+ else:
809
+ try:
810
+ exists = os.path.exists(name)
811
+ except Exception:
812
+ exists = False
813
+ if not exists:
814
+ if strictio:
815
+ raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name)
816
+ elif "r" in mode and fmode != FILE_FMODE:
817
+ name = '<fdopen>' # or os.devnull?
818
+ current_size = 0 # or maintain position?
819
+ else:
820
+ current_size = os.path.getsize(name)
821
+
822
+ if position > current_size:
823
+ if strictio:
824
+ raise ValueError("invalid buffer size")
825
+ elif fmode == CONTENTS_FMODE:
826
+ position = current_size
827
+ # try to open the file by name
828
+ # NOTE: has different fileno
829
+ try:
830
+ #FIXME: missing: *buffering*, encoding, softspace
831
+ if fmode == FILE_FMODE:
832
+ f = open(name, mode if "w" in mode else "w")
833
+ f.write(fdata)
834
+ if "w" not in mode:
835
+ f.close()
836
+ f = open(name, mode)
837
+ elif name == '<fdopen>': # file did not exist
838
+ import tempfile
839
+ f = tempfile.TemporaryFile(mode)
840
+ # treat x mode as w mode
841
+ elif fmode == CONTENTS_FMODE \
842
+ and ("w" in mode or "x" in mode):
843
+ # stop truncation when opening
844
+ flags = os.O_CREAT
845
+ if "+" in mode:
846
+ flags |= os.O_RDWR
847
+ else:
848
+ flags |= os.O_WRONLY
849
+ f = os.fdopen(os.open(name, flags), mode)
850
+ # set name to the correct value
851
+ r = getattr(f, "buffer", f)
852
+ r = getattr(r, "raw", r)
853
+ r.name = name
854
+ assert f.name == name
855
+ else:
856
+ f = open(name, mode)
857
+ except (IOError, FileNotFoundError):
858
+ err = sys.exc_info()[1]
859
+ raise UnpicklingError(err)
860
+ if closed:
861
+ f.close()
862
+ elif position >= 0 and fmode != HANDLE_FMODE:
863
+ f.seek(position)
864
+ return f
865
+
866
+ def _create_stringi(value, position, closed):
867
+ f = StringIO(value)
868
+ if closed: f.close()
869
+ else: f.seek(position)
870
+ return f
871
+
872
+ def _create_stringo(value, position, closed):
873
+ f = StringIO()
874
+ if closed: f.close()
875
+ else:
876
+ f.write(value)
877
+ f.seek(position)
878
+ return f
879
+
880
+ class _itemgetter_helper(object):
881
+ def __init__(self):
882
+ self.items = []
883
+ def __getitem__(self, item):
884
+ self.items.append(item)
885
+ return
886
+
887
+ class _attrgetter_helper(object):
888
+ def __init__(self, attrs, index=None):
889
+ self.attrs = attrs
890
+ self.index = index
891
+ def __getattribute__(self, attr):
892
+ attrs = object.__getattribute__(self, "attrs")
893
+ index = object.__getattribute__(self, "index")
894
+ if index is None:
895
+ index = len(attrs)
896
+ attrs.append(attr)
897
+ else:
898
+ attrs[index] = ".".join([attrs[index], attr])
899
+ return type(self)(attrs, index)
900
+
901
+ class _dictproxy_helper(dict):
902
+ def __ror__(self, a):
903
+ return a
904
+
905
+ _dictproxy_helper_instance = _dictproxy_helper()
906
+
907
+ __d = {}
908
+ try:
909
+ # In CPython 3.9 and later, this trick can be used to exploit the
910
+ # implementation of the __or__ function of MappingProxyType to get the true
911
+ # mapping referenced by the proxy. It may work for other implementations,
912
+ # but is not guaranteed.
913
+ MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance)
914
+ except Exception:
915
+ MAPPING_PROXY_TRICK = False
916
+ del __d
917
+
918
+ # _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill
919
+ # whose _create_cell functions do not have a default value.
920
+ # _CELL_REF can be safely removed entirely (replaced by empty tuples for calls
921
+ # to _create_cell) once breaking changes are allowed.
922
+ _CELL_REF = None
923
+ _CELL_EMPTY = Sentinel('_CELL_EMPTY')
924
+
925
+ def _create_cell(contents=None):
926
+ if contents is not _CELL_EMPTY:
927
+ value = contents
928
+ return (lambda: value).__closure__[0]
929
+
930
+ def _create_weakref(obj, *args):
931
+ from weakref import ref
932
+ if obj is None: # it's dead
933
+ from collections import UserDict
934
+ return ref(UserDict(), *args)
935
+ return ref(obj, *args)
936
+
937
+ def _create_weakproxy(obj, callable=False, *args):
938
+ from weakref import proxy
939
+ if obj is None: # it's dead
940
+ if callable: return proxy(lambda x:x, *args)
941
+ from collections import UserDict
942
+ return proxy(UserDict(), *args)
943
+ return proxy(obj, *args)
944
+
945
+ def _eval_repr(repr_str):
946
+ return eval(repr_str)
947
+
948
+ def _create_array(f, args, state, npdict=None):
949
+ #array = numpy.core.multiarray._reconstruct(*args)
950
+ array = f(*args)
951
+ array.__setstate__(state)
952
+ if npdict is not None: # we also have saved state in __dict__
953
+ array.__dict__.update(npdict)
954
+ return array
955
+
956
+ def _create_dtypemeta(scalar_type):
957
+ if NumpyDType is True: __hook__() # a bit hacky I think
958
+ if scalar_type is None:
959
+ return NumpyDType
960
+ return type(NumpyDType(scalar_type))
961
+
962
+ def _create_namedtuple(name, fieldnames, modulename, defaults=None):
963
+ class_ = _import_module(modulename + '.' + name, safe=True)
964
+ if class_ is not None:
965
+ return class_
966
+ import collections
967
+ t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename)
968
+ return t
969
+
970
+ def _create_capsule(pointer, name, context, destructor):
971
+ attr_found = False
972
+ try:
973
+ # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231
974
+ uname = name.decode('utf8')
975
+ for i in range(1, uname.count('.')+1):
976
+ names = uname.rsplit('.', i)
977
+ try:
978
+ module = __import__(names[0])
979
+ except ImportError:
980
+ pass
981
+ obj = module
982
+ for attr in names[1:]:
983
+ obj = getattr(obj, attr)
984
+ capsule = obj
985
+ attr_found = True
986
+ break
987
+ except Exception:
988
+ pass
989
+
990
+ if attr_found:
991
+ if _PyCapsule_IsValid(capsule, name):
992
+ return capsule
993
+ raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name))
994
+ else:
995
+ #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning)
996
+ capsule = _PyCapsule_New(pointer, name, destructor)
997
+ _PyCapsule_SetContext(capsule, context)
998
+ return capsule
999
+
1000
+ def _getattr(objclass, name, repr_str):
1001
+ # hack to grab the reference directly
1002
+ try: #XXX: works only for __builtin__ ?
1003
+ attr = repr_str.split("'")[3]
1004
+ return eval(attr+'.__dict__["'+name+'"]')
1005
+ except Exception:
1006
+ try:
1007
+ attr = objclass.__dict__
1008
+ if type(attr) is DictProxyType:
1009
+ attr = attr[name]
1010
+ else:
1011
+ attr = getattr(objclass,name)
1012
+ except (AttributeError, KeyError):
1013
+ attr = getattr(objclass,name)
1014
+ return attr
1015
+
1016
+ def _get_attr(self, name):
1017
+ # stop recursive pickling
1018
+ return getattr(self, name, None) or getattr(__builtin__, name)
1019
+
1020
+ def _import_module(import_name, safe=False):
1021
+ try:
1022
+ if import_name.startswith('__runtime__.'):
1023
+ return sys.modules[import_name]
1024
+ elif '.' in import_name:
1025
+ items = import_name.split('.')
1026
+ module = '.'.join(items[:-1])
1027
+ obj = items[-1]
1028
+ submodule = getattr(__import__(module, None, None, [obj]), obj)
1029
+ if isinstance(submodule, (ModuleType, type)):
1030
+ return submodule
1031
+ return __import__(import_name, None, None, [obj])
1032
+ else:
1033
+ return __import__(import_name)
1034
+ except (ImportError, AttributeError, KeyError):
1035
+ if safe:
1036
+ return None
1037
+ raise
1038
+
1039
+ # https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333
1040
+ def _getattribute(obj, name):
1041
+ for subpath in name.split('.'):
1042
+ if subpath == '<locals>':
1043
+ raise AttributeError("Can't get local attribute {!r} on {!r}"
1044
+ .format(name, obj))
1045
+ try:
1046
+ parent = obj
1047
+ obj = getattr(obj, subpath)
1048
+ except AttributeError:
1049
+ raise AttributeError("Can't get attribute {!r} on {!r}"
1050
+ .format(name, obj))
1051
+ return obj, parent
1052
+
1053
+ def _locate_function(obj, pickler=None):
1054
+ module_name = getattr(obj, '__module__', None)
1055
+ if module_name in ['__main__', None] or \
1056
+ pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__:
1057
+ return False
1058
+ if hasattr(obj, '__qualname__'):
1059
+ module = _import_module(module_name, safe=True)
1060
+ try:
1061
+ found, _ = _getattribute(module, obj.__qualname__)
1062
+ return found is obj
1063
+ except AttributeError:
1064
+ return False
1065
+ else:
1066
+ found = _import_module(module_name + '.' + obj.__name__, safe=True)
1067
+ return found is obj
1068
+
1069
+
1070
+ def _setitems(dest, source):
1071
+ for k, v in source.items():
1072
+ dest[k] = v
1073
+
1074
+
1075
+ def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None):
1076
+ if obj is Getattr.NO_DEFAULT:
1077
+ obj = Reduce(reduction) # pragma: no cover
1078
+
1079
+ if is_pickler_dill is None:
1080
+ is_pickler_dill = is_dill(pickler, child=True)
1081
+ if is_pickler_dill:
1082
+ # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!'
1083
+ # if not hasattr(pickler, 'x'): pickler.x = 0
1084
+ # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse)
1085
+ # pickler.x += 1
1086
+ if postproc_list is None:
1087
+ postproc_list = []
1088
+
1089
+ # Recursive object not supported. Default to a global instead.
1090
+ if id(obj) in pickler._postproc:
1091
+ name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else ''
1092
+ warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning)
1093
+ pickler.save_global(obj)
1094
+ return
1095
+ pickler._postproc[id(obj)] = postproc_list
1096
+
1097
+ # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations
1098
+ pickler.save_reduce(*reduction, obj=obj)
1099
+
1100
+ if is_pickler_dill:
1101
+ # pickler.x -= 1
1102
+ # print(pickler.x*' ', 'pop', obj, id(obj))
1103
+ postproc = pickler._postproc.pop(id(obj))
1104
+ # assert postproc_list == postproc, 'Stack tampered!'
1105
+ for reduction in reversed(postproc):
1106
+ if reduction[0] is _setitems:
1107
+ # use the internal machinery of pickle.py to speedup when
1108
+ # updating a dictionary in postproc
1109
+ dest, source = reduction[1]
1110
+ if source:
1111
+ pickler.write(pickler.get(pickler.memo[id(dest)][0]))
1112
+ pickler._batch_setitems(iter(source.items()))
1113
+ else:
1114
+ # Updating with an empty dictionary. Same as doing nothing.
1115
+ continue
1116
+ else:
1117
+ pickler.save_reduce(*reduction)
1118
+ # pop None created by calling preprocessing step off stack
1119
+ pickler.write(POP)
1120
+
1121
+ #@register(CodeType)
1122
+ #def save_code(pickler, obj):
1123
+ # logger.trace(pickler, "Co: %s", obj)
1124
+ # pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj)
1125
+ # logger.trace(pickler, "# Co")
1126
+ # return
1127
+
1128
+ # The following function is based on 'save_codeobject' from 'cloudpickle'
1129
+ # Copyright (c) 2012, Regents of the University of California.
1130
+ # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1131
+ # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1132
+ @register(CodeType)
1133
+ def save_code(pickler, obj):
1134
+ logger.trace(pickler, "Co: %s", obj)
1135
+ if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args)
1136
+ args = (
1137
+ obj.co_lnotab, # for < python 3.10 [not counted in args]
1138
+ obj.co_argcount, obj.co_posonlyargcount,
1139
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1140
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1141
+ obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1142
+ obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable,
1143
+ obj.co_columntable, obj.co_exceptiontable, obj.co_freevars,
1144
+ obj.co_cellvars
1145
+ )
1146
+ elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args)
1147
+ with warnings.catch_warnings():
1148
+ if not OLD312a7: # issue 597
1149
+ warnings.filterwarnings('ignore', category=DeprecationWarning)
1150
+ args = (
1151
+ obj.co_lnotab, # for < python 3.10 [not counted in args]
1152
+ obj.co_argcount, obj.co_posonlyargcount,
1153
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1154
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1155
+ obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1156
+ obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable,
1157
+ obj.co_freevars, obj.co_cellvars
1158
+ )
1159
+ elif hasattr(obj, "co_linetable"): # python 3.10 (16 args)
1160
+ args = (
1161
+ obj.co_lnotab, # for < python 3.10 [not counted in args]
1162
+ obj.co_argcount, obj.co_posonlyargcount,
1163
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1164
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1165
+ obj.co_varnames, obj.co_filename, obj.co_name,
1166
+ obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
1167
+ obj.co_cellvars
1168
+ )
1169
+ elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args)
1170
+ args = (
1171
+ obj.co_argcount, obj.co_posonlyargcount,
1172
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1173
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1174
+ obj.co_varnames, obj.co_filename, obj.co_name,
1175
+ obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
1176
+ obj.co_cellvars
1177
+ )
1178
+ else: # python 3.7 (15 args)
1179
+ args = (
1180
+ obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
1181
+ obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
1182
+ obj.co_names, obj.co_varnames, obj.co_filename,
1183
+ obj.co_name, obj.co_firstlineno, obj.co_lnotab,
1184
+ obj.co_freevars, obj.co_cellvars
1185
+ )
1186
+
1187
+ pickler.save_reduce(_create_code, args, obj=obj)
1188
+ logger.trace(pickler, "# Co")
1189
+ return
1190
+
1191
+ def _repr_dict(obj):
1192
+ """Make a short string representation of a dictionary."""
1193
+ return "<%s object at %#012x>" % (type(obj).__name__, id(obj))
1194
+
1195
+ @register(dict)
1196
+ def save_module_dict(pickler, obj):
1197
+ if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \
1198
+ not (pickler._session and pickler._first_pass):
1199
+ logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj
1200
+ pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8'))
1201
+ logger.trace(pickler, "# D1")
1202
+ elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__):
1203
+ logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj
1204
+ pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general?
1205
+ logger.trace(pickler, "# D3")
1206
+ elif '__name__' in obj and obj != _main_module.__dict__ \
1207
+ and type(obj['__name__']) is str \
1208
+ and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None):
1209
+ logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj
1210
+ pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8'))
1211
+ logger.trace(pickler, "# D4")
1212
+ else:
1213
+ logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj
1214
+ if is_dill(pickler, child=False) and pickler._session:
1215
+ # we only care about session the first pass thru
1216
+ pickler._first_pass = False
1217
+ StockPickler.save_dict(pickler, obj)
1218
+ logger.trace(pickler, "# D2")
1219
+ return
1220
+
1221
+
1222
+ if not OLD310 and MAPPING_PROXY_TRICK:
1223
+ def save_dict_view(dicttype):
1224
+ def save_dict_view_for_function(func):
1225
+ def _save_dict_view(pickler, obj):
1226
+ logger.trace(pickler, "Dkvi: <%s>", obj)
1227
+ mapping = obj.mapping | _dictproxy_helper_instance
1228
+ pickler.save_reduce(func, (mapping,), obj=obj)
1229
+ logger.trace(pickler, "# Dkvi")
1230
+ return _save_dict_view
1231
+ return [
1232
+ (funcname, save_dict_view_for_function(getattr(dicttype, funcname)))
1233
+ for funcname in ('keys', 'values', 'items')
1234
+ ]
1235
+ else:
1236
+ # The following functions are based on 'cloudpickle'
1237
+ # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940
1238
+ # Copyright (c) 2012, Regents of the University of California.
1239
+ # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1240
+ # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1241
+ def save_dict_view(dicttype):
1242
+ def save_dict_keys(pickler, obj):
1243
+ logger.trace(pickler, "Dk: <%s>", obj)
1244
+ dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),))
1245
+ pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj)
1246
+ logger.trace(pickler, "# Dk")
1247
+
1248
+ def save_dict_values(pickler, obj):
1249
+ logger.trace(pickler, "Dv: <%s>", obj)
1250
+ dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),))
1251
+ pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj)
1252
+ logger.trace(pickler, "# Dv")
1253
+
1254
+ def save_dict_items(pickler, obj):
1255
+ logger.trace(pickler, "Di: <%s>", obj)
1256
+ pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj)
1257
+ logger.trace(pickler, "# Di")
1258
+
1259
+ return (
1260
+ ('keys', save_dict_keys),
1261
+ ('values', save_dict_values),
1262
+ ('items', save_dict_items)
1263
+ )
1264
+
1265
+ for __dicttype in (
1266
+ dict,
1267
+ OrderedDict
1268
+ ):
1269
+ __obj = __dicttype()
1270
+ for __funcname, __savefunc in save_dict_view(__dicttype):
1271
+ __tview = type(getattr(__obj, __funcname)())
1272
+ if __tview not in Pickler.dispatch:
1273
+ Pickler.dispatch[__tview] = __savefunc
1274
+ del __dicttype, __obj, __funcname, __tview, __savefunc
1275
+
1276
+
1277
+ @register(ClassType)
1278
+ def save_classobj(pickler, obj): #FIXME: enable pickler._byref
1279
+ if not _locate_function(obj, pickler):
1280
+ logger.trace(pickler, "C1: %s", obj)
1281
+ pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
1282
+ obj.__dict__), obj=obj)
1283
+ #XXX: or obj.__dict__.copy()), obj=obj) ?
1284
+ logger.trace(pickler, "# C1")
1285
+ else:
1286
+ logger.trace(pickler, "C2: %s", obj)
1287
+ name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1288
+ StockPickler.save_global(pickler, obj, name=name)
1289
+ logger.trace(pickler, "# C2")
1290
+ return
1291
+
1292
+ @register(typing._GenericAlias)
1293
+ def save_generic_alias(pickler, obj):
1294
+ args = obj.__args__
1295
+ if type(obj.__reduce__()) is str:
1296
+ logger.trace(pickler, "Ga0: %s", obj)
1297
+ StockPickler.save_global(pickler, obj, name=obj.__reduce__())
1298
+ logger.trace(pickler, "# Ga0")
1299
+ elif obj.__origin__ is tuple and (not args or args == ((),)):
1300
+ logger.trace(pickler, "Ga1: %s", obj)
1301
+ pickler.save_reduce(_create_typing_tuple, (args,), obj=obj)
1302
+ logger.trace(pickler, "# Ga1")
1303
+ else:
1304
+ logger.trace(pickler, "Ga2: %s", obj)
1305
+ StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj)
1306
+ logger.trace(pickler, "# Ga2")
1307
+ return
1308
+
1309
+ @register(LockType)
1310
+ def save_lock(pickler, obj):
1311
+ logger.trace(pickler, "Lo: %s", obj)
1312
+ pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj)
1313
+ logger.trace(pickler, "# Lo")
1314
+ return
1315
+
1316
+ @register(RLockType)
1317
+ def save_rlock(pickler, obj):
1318
+ logger.trace(pickler, "RL: %s", obj)
1319
+ r = obj.__repr__() # don't use _release_save as it unlocks the lock
1320
+ count = int(r.split('count=')[1].split()[0].rstrip('>'))
1321
+ owner = int(r.split('owner=')[1].split()[0])
1322
+ pickler.save_reduce(_create_rlock, (count,owner,), obj=obj)
1323
+ logger.trace(pickler, "# RL")
1324
+ return
1325
+
1326
+ #@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL
1327
+ def save_socket(pickler, obj):
1328
+ logger.trace(pickler, "So: %s", obj)
1329
+ pickler.save_reduce(*reduce_socket(obj))
1330
+ logger.trace(pickler, "# So")
1331
+ return
1332
+
1333
+ def _save_file(pickler, obj, open_):
1334
+ if obj.closed:
1335
+ position = 0
1336
+ else:
1337
+ obj.flush()
1338
+ if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__):
1339
+ position = -1
1340
+ else:
1341
+ position = obj.tell()
1342
+ if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE:
1343
+ f = open_(obj.name, "r")
1344
+ fdata = f.read()
1345
+ f.close()
1346
+ else:
1347
+ fdata = ""
1348
+ if is_dill(pickler, child=True):
1349
+ strictio = pickler._strictio
1350
+ fmode = pickler._fmode
1351
+ else:
1352
+ strictio = False
1353
+ fmode = 0 # HANDLE_FMODE
1354
+ pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position,
1355
+ obj.closed, open_, strictio,
1356
+ fmode, fdata), obj=obj)
1357
+ return
1358
+
1359
+
1360
+ @register(FileType) #XXX: in 3.x has buffer=0, needs different _create?
1361
+ @register(BufferedReaderType)
1362
+ @register(BufferedWriterType)
1363
+ @register(TextWrapperType)
1364
+ def save_file(pickler, obj):
1365
+ logger.trace(pickler, "Fi: %s", obj)
1366
+ f = _save_file(pickler, obj, open)
1367
+ logger.trace(pickler, "# Fi")
1368
+ return f
1369
+
1370
+ if BufferedRandomType:
1371
+ @register(BufferedRandomType)
1372
+ def save_file(pickler, obj):
1373
+ logger.trace(pickler, "Fi: %s", obj)
1374
+ f = _save_file(pickler, obj, open)
1375
+ logger.trace(pickler, "# Fi")
1376
+ return f
1377
+
1378
+ if PyTextWrapperType:
1379
+ @register(PyBufferedReaderType)
1380
+ @register(PyBufferedWriterType)
1381
+ @register(PyTextWrapperType)
1382
+ def save_file(pickler, obj):
1383
+ logger.trace(pickler, "Fi: %s", obj)
1384
+ f = _save_file(pickler, obj, _open)
1385
+ logger.trace(pickler, "# Fi")
1386
+ return f
1387
+
1388
+ if PyBufferedRandomType:
1389
+ @register(PyBufferedRandomType)
1390
+ def save_file(pickler, obj):
1391
+ logger.trace(pickler, "Fi: %s", obj)
1392
+ f = _save_file(pickler, obj, _open)
1393
+ logger.trace(pickler, "# Fi")
1394
+ return f
1395
+
1396
+
1397
+ # The following two functions are based on 'saveCStringIoInput'
1398
+ # and 'saveCStringIoOutput' from spickle
1399
+ # Copyright (c) 2011 by science+computing ag
1400
+ # License: http://www.apache.org/licenses/LICENSE-2.0
1401
+ if InputType:
1402
+ @register(InputType)
1403
+ def save_stringi(pickler, obj):
1404
+ logger.trace(pickler, "Io: %s", obj)
1405
+ if obj.closed:
1406
+ value = ''; position = 0
1407
+ else:
1408
+ value = obj.getvalue(); position = obj.tell()
1409
+ pickler.save_reduce(_create_stringi, (value, position, \
1410
+ obj.closed), obj=obj)
1411
+ logger.trace(pickler, "# Io")
1412
+ return
1413
+
1414
+ @register(OutputType)
1415
+ def save_stringo(pickler, obj):
1416
+ logger.trace(pickler, "Io: %s", obj)
1417
+ if obj.closed:
1418
+ value = ''; position = 0
1419
+ else:
1420
+ value = obj.getvalue(); position = obj.tell()
1421
+ pickler.save_reduce(_create_stringo, (value, position, \
1422
+ obj.closed), obj=obj)
1423
+ logger.trace(pickler, "# Io")
1424
+ return
1425
+
1426
+ if LRUCacheType is not None:
1427
+ from functools import lru_cache
1428
+ @register(LRUCacheType)
1429
+ def save_lru_cache(pickler, obj):
1430
+ logger.trace(pickler, "LRU: %s", obj)
1431
+ if OLD39:
1432
+ kwargs = obj.cache_info()
1433
+ args = (kwargs.maxsize,)
1434
+ else:
1435
+ kwargs = obj.cache_parameters()
1436
+ args = (kwargs['maxsize'], kwargs['typed'])
1437
+ if args != lru_cache.__defaults__:
1438
+ wrapper = Reduce(lru_cache, args, is_callable=True)
1439
+ else:
1440
+ wrapper = lru_cache
1441
+ pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj)
1442
+ logger.trace(pickler, "# LRU")
1443
+ return
1444
+
1445
+ @register(SuperType)
1446
+ def save_super(pickler, obj):
1447
+ logger.trace(pickler, "Su: %s", obj)
1448
+ pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj)
1449
+ logger.trace(pickler, "# Su")
1450
+ return
1451
+
1452
+ if IS_PYPY:
1453
+ @register(MethodType)
1454
+ def save_instancemethod0(pickler, obj):
1455
+ code = getattr(obj.__func__, '__code__', None)
1456
+ if code is not None and type(code) is not CodeType \
1457
+ and getattr(obj.__self__, obj.__name__) == obj:
1458
+ # Some PyPy builtin functions have no module name
1459
+ logger.trace(pickler, "Me2: %s", obj)
1460
+ # TODO: verify that this works for all PyPy builtin methods
1461
+ pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
1462
+ logger.trace(pickler, "# Me2")
1463
+ return
1464
+
1465
+ logger.trace(pickler, "Me1: %s", obj)
1466
+ pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1467
+ logger.trace(pickler, "# Me1")
1468
+ return
1469
+ else:
1470
+ @register(MethodType)
1471
+ def save_instancemethod0(pickler, obj):
1472
+ logger.trace(pickler, "Me1: %s", obj)
1473
+ pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1474
+ logger.trace(pickler, "# Me1")
1475
+ return
1476
+
1477
+ if not IS_PYPY:
1478
+ @register(MemberDescriptorType)
1479
+ @register(GetSetDescriptorType)
1480
+ @register(MethodDescriptorType)
1481
+ @register(WrapperDescriptorType)
1482
+ @register(ClassMethodDescriptorType)
1483
+ def save_wrapper_descriptor(pickler, obj):
1484
+ logger.trace(pickler, "Wr: %s", obj)
1485
+ pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1486
+ obj.__repr__()), obj=obj)
1487
+ logger.trace(pickler, "# Wr")
1488
+ return
1489
+ else:
1490
+ @register(MemberDescriptorType)
1491
+ @register(GetSetDescriptorType)
1492
+ def save_wrapper_descriptor(pickler, obj):
1493
+ logger.trace(pickler, "Wr: %s", obj)
1494
+ pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1495
+ obj.__repr__()), obj=obj)
1496
+ logger.trace(pickler, "# Wr")
1497
+ return
1498
+
1499
+ @register(CellType)
1500
+ def save_cell(pickler, obj):
1501
+ try:
1502
+ f = obj.cell_contents
1503
+ except ValueError: # cell is empty
1504
+ logger.trace(pickler, "Ce3: %s", obj)
1505
+ # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7.
1506
+ # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in
1507
+ # _shims.py. This object is not present in Python 3 because the cell's
1508
+ # contents can be deleted in newer versions of Python. The reduce object
1509
+ # will instead unpickle to None if unpickled in Python 3.
1510
+
1511
+ # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can
1512
+ # be replaced by () OR the delattr function can be removed repending on
1513
+ # whichever is more convienient.
1514
+ pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj)
1515
+ # Call the function _delattr on the cell's cell_contents attribute
1516
+ # The result of this function call will be None
1517
+ pickler.save_reduce(_shims._delattr, (obj, 'cell_contents'))
1518
+ # pop None created by calling _delattr off stack
1519
+ pickler.write(POP)
1520
+ logger.trace(pickler, "# Ce3")
1521
+ return
1522
+ if is_dill(pickler, child=True):
1523
+ if id(f) in pickler._postproc:
1524
+ # Already seen. Add to its postprocessing.
1525
+ postproc = pickler._postproc[id(f)]
1526
+ else:
1527
+ # Haven't seen it. Add to the highest possible object and set its
1528
+ # value as late as possible to prevent cycle.
1529
+ postproc = next(iter(pickler._postproc.values()), None)
1530
+ if postproc is not None:
1531
+ logger.trace(pickler, "Ce2: %s", obj)
1532
+ # _CELL_REF is defined in _shims.py to support older versions of
1533
+ # dill. When breaking changes are made to dill, (_CELL_REF,) can
1534
+ # be replaced by ()
1535
+ pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj)
1536
+ postproc.append((_shims._setattr, (obj, 'cell_contents', f)))
1537
+ logger.trace(pickler, "# Ce2")
1538
+ return
1539
+ logger.trace(pickler, "Ce1: %s", obj)
1540
+ pickler.save_reduce(_create_cell, (f,), obj=obj)
1541
+ logger.trace(pickler, "# Ce1")
1542
+ return
1543
+
1544
+ if MAPPING_PROXY_TRICK:
1545
+ @register(DictProxyType)
1546
+ def save_dictproxy(pickler, obj):
1547
+ logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1548
+ mapping = obj | _dictproxy_helper_instance
1549
+ pickler.save_reduce(DictProxyType, (mapping,), obj=obj)
1550
+ logger.trace(pickler, "# Mp")
1551
+ return
1552
+ else:
1553
+ @register(DictProxyType)
1554
+ def save_dictproxy(pickler, obj):
1555
+ logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1556
+ pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj)
1557
+ logger.trace(pickler, "# Mp")
1558
+ return
1559
+
1560
+ @register(SliceType)
1561
+ def save_slice(pickler, obj):
1562
+ logger.trace(pickler, "Sl: %s", obj)
1563
+ pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj)
1564
+ logger.trace(pickler, "# Sl")
1565
+ return
1566
+
1567
+ @register(XRangeType)
1568
+ @register(EllipsisType)
1569
+ @register(NotImplementedType)
1570
+ def save_singleton(pickler, obj):
1571
+ logger.trace(pickler, "Si: %s", obj)
1572
+ pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj)
1573
+ logger.trace(pickler, "# Si")
1574
+ return
1575
+
1576
+ def _proxy_helper(obj): # a dead proxy returns a reference to None
1577
+ """get memory address of proxy's reference object"""
1578
+ _repr = repr(obj)
1579
+ try: _str = str(obj)
1580
+ except ReferenceError: # it's a dead proxy
1581
+ return id(None)
1582
+ if _str == _repr: return id(obj) # it's a repr
1583
+ try: # either way, it's a proxy from here
1584
+ address = int(_str.rstrip('>').split(' at ')[-1], base=16)
1585
+ except ValueError: # special case: proxy of a 'type'
1586
+ if not IS_PYPY:
1587
+ address = int(_repr.rstrip('>').split(' at ')[-1], base=16)
1588
+ else:
1589
+ objects = iter(gc.get_objects())
1590
+ for _obj in objects:
1591
+ if repr(_obj) == _str: return id(_obj)
1592
+ # all bad below... nothing found so throw ReferenceError
1593
+ msg = "Cannot reference object for proxy at '%s'" % id(obj)
1594
+ raise ReferenceError(msg)
1595
+ return address
1596
+
1597
+ def _locate_object(address, module=None):
1598
+ """get object located at the given memory address (inverse of id(obj))"""
1599
+ special = [None, True, False] #XXX: more...?
1600
+ for obj in special:
1601
+ if address == id(obj): return obj
1602
+ if module:
1603
+ objects = iter(module.__dict__.values())
1604
+ else: objects = iter(gc.get_objects())
1605
+ for obj in objects:
1606
+ if address == id(obj): return obj
1607
+ # all bad below... nothing found so throw ReferenceError or TypeError
1608
+ try: address = hex(address)
1609
+ except TypeError:
1610
+ raise TypeError("'%s' is not a valid memory address" % str(address))
1611
+ raise ReferenceError("Cannot reference object at '%s'" % address)
1612
+
1613
+ @register(ReferenceType)
1614
+ def save_weakref(pickler, obj):
1615
+ refobj = obj()
1616
+ logger.trace(pickler, "R1: %s", obj)
1617
+ #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None"
1618
+ pickler.save_reduce(_create_weakref, (refobj,), obj=obj)
1619
+ logger.trace(pickler, "# R1")
1620
+ return
1621
+
1622
+ @register(ProxyType)
1623
+ @register(CallableProxyType)
1624
+ def save_weakproxy(pickler, obj):
1625
+ # Must do string substitution here and use %r to avoid ReferenceError.
1626
+ logger.trace(pickler, "R2: %r" % obj)
1627
+ refobj = _locate_object(_proxy_helper(obj))
1628
+ pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj)
1629
+ logger.trace(pickler, "# R2")
1630
+ return
1631
+
1632
+ def _is_builtin_module(module):
1633
+ if not hasattr(module, "__file__"): return True
1634
+ if module.__file__ is None: return False
1635
+ # If a module file name starts with prefix, it should be a builtin
1636
+ # module, so should always be pickled as a reference.
1637
+ names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"]
1638
+ rp = os.path.realpath
1639
+ # See https://github.com/uqfoundation/dill/issues/566
1640
+ return (
1641
+ any(
1642
+ module.__file__.startswith(getattr(sys, name))
1643
+ or rp(module.__file__).startswith(rp(getattr(sys, name)))
1644
+ for name in names
1645
+ if hasattr(sys, name)
1646
+ )
1647
+ or module.__file__.endswith(EXTENSION_SUFFIXES)
1648
+ or 'site-packages' in module.__file__
1649
+ )
1650
+
1651
+ def _is_imported_module(module):
1652
+ return getattr(module, '__loader__', None) is not None or module in sys.modules.values()
1653
+
1654
+ @register(ModuleType)
1655
+ def save_module(pickler, obj):
1656
+ if False: #_use_diff:
1657
+ if obj.__name__.split('.', 1)[0] != "dill":
1658
+ try:
1659
+ changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0]
1660
+ except RuntimeError: # not memorised module, probably part of dill
1661
+ pass
1662
+ else:
1663
+ logger.trace(pickler, "M2: %s with diff", obj)
1664
+ logger.info("Diff: %s", changed.keys())
1665
+ pickler.save_reduce(_import_module, (obj.__name__,), obj=obj,
1666
+ state=changed)
1667
+ logger.trace(pickler, "# M2")
1668
+ return
1669
+
1670
+ logger.trace(pickler, "M1: %s", obj)
1671
+ pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1672
+ logger.trace(pickler, "# M1")
1673
+ else:
1674
+ builtin_mod = _is_builtin_module(obj)
1675
+ is_session_main = is_dill(pickler, child=True) and obj is pickler._main
1676
+ if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod
1677
+ or is_session_main):
1678
+ logger.trace(pickler, "M1: %s", obj)
1679
+ # Hack for handling module-type objects in load_module().
1680
+ mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__
1681
+ # Second references are saved as __builtin__.__main__ in save_module_dict().
1682
+ main_dict = obj.__dict__.copy()
1683
+ for item in ('__builtins__', '__loader__'):
1684
+ main_dict.pop(item, None)
1685
+ for item in IPYTHON_SINGLETONS: #pragma: no cover
1686
+ if getattr(main_dict.get(item), '__module__', '').startswith('IPython'):
1687
+ del main_dict[item]
1688
+ pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict)
1689
+ logger.trace(pickler, "# M1")
1690
+ elif obj.__name__ == "dill._dill":
1691
+ logger.trace(pickler, "M2: %s", obj)
1692
+ pickler.save_global(obj, name="_dill")
1693
+ logger.trace(pickler, "# M2")
1694
+ else:
1695
+ logger.trace(pickler, "M2: %s", obj)
1696
+ pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1697
+ logger.trace(pickler, "# M2")
1698
+ return
1699
+
1700
+ # The following function is based on '_extract_class_dict' from 'cloudpickle'
1701
+ # Copyright (c) 2012, Regents of the University of California.
1702
+ # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1703
+ # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1704
+ def _get_typedict_type(cls, clsdict, attrs, postproc_list):
1705
+ """Retrieve a copy of the dict of a class without the inherited methods"""
1706
+ if len(cls.__bases__) == 1:
1707
+ inherited_dict = cls.__bases__[0].__dict__
1708
+ else:
1709
+ inherited_dict = {}
1710
+ for base in reversed(cls.__bases__):
1711
+ inherited_dict.update(base.__dict__)
1712
+ to_remove = []
1713
+ for name, value in dict.items(clsdict):
1714
+ try:
1715
+ base_value = inherited_dict[name]
1716
+ if value is base_value and hasattr(value, '__qualname__'):
1717
+ to_remove.append(name)
1718
+ except KeyError:
1719
+ pass
1720
+ for name in to_remove:
1721
+ dict.pop(clsdict, name)
1722
+
1723
+ if issubclass(type(cls), type):
1724
+ clsdict.pop('__dict__', None)
1725
+ clsdict.pop('__weakref__', None)
1726
+ # clsdict.pop('__prepare__', None)
1727
+ return clsdict, attrs
1728
+
1729
+ def _get_typedict_abc(obj, _dict, attrs, postproc_list):
1730
+ if hasattr(abc, '_get_dump'):
1731
+ (registry, _, _, _) = abc._get_dump(obj)
1732
+ register = obj.register
1733
+ postproc_list.extend((register, (reg(),)) for reg in registry)
1734
+ elif hasattr(obj, '_abc_registry'):
1735
+ registry = obj._abc_registry
1736
+ register = obj.register
1737
+ postproc_list.extend((register, (reg,)) for reg in registry)
1738
+ else:
1739
+ raise PicklingError("Cannot find registry of ABC %s", obj)
1740
+
1741
+ if '_abc_registry' in _dict:
1742
+ _dict.pop('_abc_registry', None)
1743
+ _dict.pop('_abc_cache', None)
1744
+ _dict.pop('_abc_negative_cache', None)
1745
+ # _dict.pop('_abc_negative_cache_version', None)
1746
+ else:
1747
+ _dict.pop('_abc_impl', None)
1748
+ return _dict, attrs
1749
+
1750
+ @register(TypeType)
1751
+ def save_type(pickler, obj, postproc_list=None):
1752
+ if obj in _typemap:
1753
+ logger.trace(pickler, "T1: %s", obj)
1754
+ # if obj in _incedental_types:
1755
+ # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning)
1756
+ pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
1757
+ logger.trace(pickler, "# T1")
1758
+ elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]):
1759
+ # special case: namedtuples
1760
+ logger.trace(pickler, "T6: %s", obj)
1761
+
1762
+ obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1763
+ if obj.__name__ != obj_name:
1764
+ if postproc_list is None:
1765
+ postproc_list = []
1766
+ postproc_list.append((setattr, (obj, '__qualname__', obj_name)))
1767
+
1768
+ if not obj._field_defaults:
1769
+ _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list)
1770
+ else:
1771
+ defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults]
1772
+ _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list)
1773
+ logger.trace(pickler, "# T6")
1774
+ return
1775
+
1776
+ # special cases: NoneType, NotImplementedType, EllipsisType, EnumMeta
1777
+ elif obj is type(None):
1778
+ logger.trace(pickler, "T7: %s", obj)
1779
+ #XXX: pickler.save_reduce(type, (None,), obj=obj)
1780
+ pickler.write(GLOBAL + b'__builtin__\nNoneType\n')
1781
+ logger.trace(pickler, "# T7")
1782
+ elif obj is NotImplementedType:
1783
+ logger.trace(pickler, "T7: %s", obj)
1784
+ pickler.save_reduce(type, (NotImplemented,), obj=obj)
1785
+ logger.trace(pickler, "# T7")
1786
+ elif obj is EllipsisType:
1787
+ logger.trace(pickler, "T7: %s", obj)
1788
+ pickler.save_reduce(type, (Ellipsis,), obj=obj)
1789
+ logger.trace(pickler, "# T7")
1790
+ elif obj is EnumMeta:
1791
+ logger.trace(pickler, "T7: %s", obj)
1792
+ pickler.write(GLOBAL + b'enum\nEnumMeta\n')
1793
+ logger.trace(pickler, "# T7")
1794
+
1795
+ else:
1796
+ _byref = getattr(pickler, '_byref', None)
1797
+ obj_recursive = id(obj) in getattr(pickler, '_postproc', ())
1798
+ incorrectly_named = not _locate_function(obj, pickler)
1799
+ if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over
1800
+ if postproc_list is None:
1801
+ postproc_list = []
1802
+
1803
+ # thanks to Tom Stepleton pointing out pickler._session unneeded
1804
+ logger.trace(pickler, "T2: %s", obj)
1805
+ _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict
1806
+
1807
+ #print (_dict)
1808
+ #print ("%s\n%s" % (type(obj), obj.__name__))
1809
+ #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1810
+ slots = _dict.get('__slots__', ())
1811
+ if type(slots) == str:
1812
+ # __slots__ accepts a single string
1813
+ slots = (slots,)
1814
+
1815
+ for name in slots:
1816
+ _dict.pop(name, None)
1817
+
1818
+ if isinstance(obj, abc.ABCMeta):
1819
+ logger.trace(pickler, "ABC: %s", obj)
1820
+ _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list)
1821
+ logger.trace(pickler, "# ABC")
1822
+
1823
+ qualname = getattr(obj, '__qualname__', None)
1824
+ if attrs is not None:
1825
+ for k, v in attrs.items():
1826
+ postproc_list.append((setattr, (obj, k, v)))
1827
+ # TODO: Consider using the state argument to save_reduce?
1828
+ if qualname is not None:
1829
+ postproc_list.append((setattr, (obj, '__qualname__', qualname)))
1830
+
1831
+ if not hasattr(obj, '__orig_bases__'):
1832
+ _save_with_postproc(pickler, (_create_type, (
1833
+ type(obj), obj.__name__, obj.__bases__, _dict
1834
+ )), obj=obj, postproc_list=postproc_list)
1835
+ else:
1836
+ # This case will always work, but might be overkill.
1837
+ _metadict = {
1838
+ 'metaclass': type(obj)
1839
+ }
1840
+
1841
+ if _dict:
1842
+ _dict_update = PartialType(_setitems, source=_dict)
1843
+ else:
1844
+ _dict_update = None
1845
+
1846
+ _save_with_postproc(pickler, (new_class, (
1847
+ obj.__name__, obj.__orig_bases__, _metadict, _dict_update
1848
+ )), obj=obj, postproc_list=postproc_list)
1849
+ logger.trace(pickler, "# T2")
1850
+ else:
1851
+ obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1852
+ logger.trace(pickler, "T4: %s", obj)
1853
+ if incorrectly_named:
1854
+ warnings.warn(
1855
+ "Cannot locate reference to %r." % (obj,),
1856
+ PicklingWarning,
1857
+ stacklevel=3,
1858
+ )
1859
+ if obj_recursive:
1860
+ warnings.warn(
1861
+ "Cannot pickle %r: %s.%s has recursive self-references that "
1862
+ "trigger a RecursionError." % (obj, obj.__module__, obj_name),
1863
+ PicklingWarning,
1864
+ stacklevel=3,
1865
+ )
1866
+ #print (obj.__dict__)
1867
+ #print ("%s\n%s" % (type(obj), obj.__name__))
1868
+ #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1869
+ StockPickler.save_global(pickler, obj, name=obj_name)
1870
+ logger.trace(pickler, "# T4")
1871
+ return
1872
+
1873
+ @register(property)
1874
+ @register(abc.abstractproperty)
1875
+ def save_property(pickler, obj):
1876
+ logger.trace(pickler, "Pr: %s", obj)
1877
+ pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__),
1878
+ obj=obj)
1879
+ logger.trace(pickler, "# Pr")
1880
+
1881
+ @register(staticmethod)
1882
+ @register(classmethod)
1883
+ @register(abc.abstractstaticmethod)
1884
+ @register(abc.abstractclassmethod)
1885
+ def save_classmethod(pickler, obj):
1886
+ logger.trace(pickler, "Cm: %s", obj)
1887
+ orig_func = obj.__func__
1888
+
1889
+ # if type(obj.__dict__) is dict:
1890
+ # if obj.__dict__:
1891
+ # state = obj.__dict__
1892
+ # else:
1893
+ # state = None
1894
+ # else:
1895
+ # state = (None, {'__dict__', obj.__dict__})
1896
+
1897
+ pickler.save_reduce(type(obj), (orig_func,), obj=obj)
1898
+ logger.trace(pickler, "# Cm")
1899
+
1900
+ @register(FunctionType)
1901
+ def save_function(pickler, obj):
1902
+ if not _locate_function(obj, pickler):
1903
+ if type(obj.__code__) is not CodeType:
1904
+ # Some PyPy builtin functions have no module name, and thus are not
1905
+ # able to be located
1906
+ module_name = getattr(obj, '__module__', None)
1907
+ if module_name is None:
1908
+ module_name = __builtin__.__name__
1909
+ module = _import_module(module_name, safe=True)
1910
+ _pypy_builtin = False
1911
+ try:
1912
+ found, _ = _getattribute(module, obj.__qualname__)
1913
+ if getattr(found, '__func__', None) is obj:
1914
+ _pypy_builtin = True
1915
+ except AttributeError:
1916
+ pass
1917
+
1918
+ if _pypy_builtin:
1919
+ logger.trace(pickler, "F3: %s", obj)
1920
+ pickler.save_reduce(getattr, (found, '__func__'), obj=obj)
1921
+ logger.trace(pickler, "# F3")
1922
+ return
1923
+
1924
+ logger.trace(pickler, "F1: %s", obj)
1925
+ _recurse = getattr(pickler, '_recurse', None)
1926
+ _postproc = getattr(pickler, '_postproc', None)
1927
+ _main_modified = getattr(pickler, '_main_modified', None)
1928
+ _original_main = getattr(pickler, '_original_main', __builtin__)#'None'
1929
+ postproc_list = []
1930
+ if _recurse:
1931
+ # recurse to get all globals referred to by obj
1932
+ from .detect import globalvars
1933
+ globs_copy = globalvars(obj, recurse=True, builtin=True)
1934
+
1935
+ # Add the name of the module to the globs dictionary to prevent
1936
+ # the duplication of the dictionary. Pickle the unpopulated
1937
+ # globals dictionary and set the remaining items after the function
1938
+ # is created to correctly handle recursion.
1939
+ globs = {'__name__': obj.__module__}
1940
+ else:
1941
+ globs_copy = obj.__globals__
1942
+
1943
+ # If the globals is the __dict__ from the module being saved as a
1944
+ # session, substitute it by the dictionary being actually saved.
1945
+ if _main_modified and globs_copy is _original_main.__dict__:
1946
+ globs_copy = getattr(pickler, '_main', _original_main).__dict__
1947
+ globs = globs_copy
1948
+ # If the globals is a module __dict__, do not save it in the pickle.
1949
+ elif globs_copy is not None and obj.__module__ is not None and \
1950
+ getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy:
1951
+ globs = globs_copy
1952
+ else:
1953
+ globs = {'__name__': obj.__module__}
1954
+
1955
+ if globs_copy is not None and globs is not globs_copy:
1956
+ # In the case that the globals are copied, we need to ensure that
1957
+ # the globals dictionary is updated when all objects in the
1958
+ # dictionary are already created.
1959
+ glob_ids = {id(g) for g in globs_copy.values()}
1960
+ for stack_element in _postproc:
1961
+ if stack_element in glob_ids:
1962
+ _postproc[stack_element].append((_setitems, (globs, globs_copy)))
1963
+ break
1964
+ else:
1965
+ postproc_list.append((_setitems, (globs, globs_copy)))
1966
+
1967
+ closure = obj.__closure__
1968
+ state_dict = {}
1969
+ for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'):
1970
+ fattr = getattr(obj, fattrname, None)
1971
+ if fattr is not None:
1972
+ state_dict[fattrname] = fattr
1973
+ if obj.__qualname__ != obj.__name__:
1974
+ state_dict['__qualname__'] = obj.__qualname__
1975
+ if '__name__' not in globs or obj.__module__ != globs['__name__']:
1976
+ state_dict['__module__'] = obj.__module__
1977
+
1978
+ state = obj.__dict__
1979
+ if type(state) is not dict:
1980
+ state_dict['__dict__'] = state
1981
+ state = None
1982
+ if state_dict:
1983
+ state = state, state_dict
1984
+
1985
+ _save_with_postproc(pickler, (_create_function, (
1986
+ obj.__code__, globs, obj.__name__, obj.__defaults__,
1987
+ closure
1988
+ ), state), obj=obj, postproc_list=postproc_list)
1989
+
1990
+ # Lift closure cell update to earliest function (#458)
1991
+ if _postproc:
1992
+ topmost_postproc = next(iter(_postproc.values()), None)
1993
+ if closure and topmost_postproc:
1994
+ for cell in closure:
1995
+ possible_postproc = (setattr, (cell, 'cell_contents', obj))
1996
+ try:
1997
+ topmost_postproc.remove(possible_postproc)
1998
+ except ValueError:
1999
+ continue
2000
+
2001
+ # Change the value of the cell
2002
+ pickler.save_reduce(*possible_postproc)
2003
+ # pop None created by calling preprocessing step off stack
2004
+ pickler.write(POP)
2005
+
2006
+ logger.trace(pickler, "# F1")
2007
+ else:
2008
+ logger.trace(pickler, "F2: %s", obj)
2009
+ name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
2010
+ StockPickler.save_global(pickler, obj, name=name)
2011
+ logger.trace(pickler, "# F2")
2012
+ return
2013
+
2014
+ if HAS_CTYPES and hasattr(ctypes, 'pythonapi'):
2015
+ _PyCapsule_New = ctypes.pythonapi.PyCapsule_New
2016
+ _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p)
2017
+ _PyCapsule_New.restype = ctypes.py_object
2018
+ _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer
2019
+ _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p)
2020
+ _PyCapsule_GetPointer.restype = ctypes.c_void_p
2021
+ _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor
2022
+ _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,)
2023
+ _PyCapsule_GetDestructor.restype = ctypes.c_void_p
2024
+ _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext
2025
+ _PyCapsule_GetContext.argtypes = (ctypes.py_object,)
2026
+ _PyCapsule_GetContext.restype = ctypes.c_void_p
2027
+ _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName
2028
+ _PyCapsule_GetName.argtypes = (ctypes.py_object,)
2029
+ _PyCapsule_GetName.restype = ctypes.c_char_p
2030
+ _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid
2031
+ _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p)
2032
+ _PyCapsule_IsValid.restype = ctypes.c_bool
2033
+ _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext
2034
+ _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p)
2035
+ _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor
2036
+ _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p)
2037
+ _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName
2038
+ _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p)
2039
+ _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer
2040
+ _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p)
2041
+ #from _socket import CAPI as _testcapsule
2042
+ _testcapsule_name = b'dill._dill._testcapsule'
2043
+ _testcapsule = _PyCapsule_New(
2044
+ ctypes.cast(_PyCapsule_New, ctypes.c_void_p),
2045
+ ctypes.c_char_p(_testcapsule_name),
2046
+ None
2047
+ )
2048
+ PyCapsuleType = type(_testcapsule)
2049
+ @register(PyCapsuleType)
2050
+ def save_capsule(pickler, obj):
2051
+ logger.trace(pickler, "Cap: %s", obj)
2052
+ name = _PyCapsule_GetName(obj)
2053
+ #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning)
2054
+ pointer = _PyCapsule_GetPointer(obj, name)
2055
+ context = _PyCapsule_GetContext(obj)
2056
+ destructor = _PyCapsule_GetDestructor(obj)
2057
+ pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj)
2058
+ logger.trace(pickler, "# Cap")
2059
+ _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType
2060
+ _reverse_typemap['PyCapsuleType'] = PyCapsuleType
2061
+ _incedental_types.add(PyCapsuleType)
2062
+ else:
2063
+ _testcapsule = None
2064
+
2065
+
2066
+ #############################
2067
+ # A quick fix for issue #500
2068
+ # This should be removed when a better solution is found.
2069
+
2070
+ if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"):
2071
+ @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS)
2072
+ def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj):
2073
+ logger.trace(pickler, "DcHDF: %s", obj)
2074
+ pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n")
2075
+ logger.trace(pickler, "# DcHDF")
2076
+
2077
+ if hasattr(dataclasses, "MISSING"):
2078
+ @register(type(dataclasses.MISSING))
2079
+ def save_dataclasses_MISSING_TYPE(pickler, obj):
2080
+ logger.trace(pickler, "DcM: %s", obj)
2081
+ pickler.write(GLOBAL + b"dataclasses\nMISSING\n")
2082
+ logger.trace(pickler, "# DcM")
2083
+
2084
+ if hasattr(dataclasses, "KW_ONLY"):
2085
+ @register(type(dataclasses.KW_ONLY))
2086
+ def save_dataclasses_KW_ONLY_TYPE(pickler, obj):
2087
+ logger.trace(pickler, "DcKWO: %s", obj)
2088
+ pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n")
2089
+ logger.trace(pickler, "# DcKWO")
2090
+
2091
+ if hasattr(dataclasses, "_FIELD_BASE"):
2092
+ @register(dataclasses._FIELD_BASE)
2093
+ def save_dataclasses_FIELD_BASE(pickler, obj):
2094
+ logger.trace(pickler, "DcFB: %s", obj)
2095
+ pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n")
2096
+ logger.trace(pickler, "# DcFB")
2097
+
2098
+ #############################
2099
+
2100
+ # quick sanity checking
2101
+ def pickles(obj,exact=False,safe=False,**kwds):
2102
+ """
2103
+ Quick check if object pickles with dill.
2104
+
2105
+ If *exact=True* then an equality test is done to check if the reconstructed
2106
+ object matches the original object.
2107
+
2108
+ If *safe=True* then any exception will raised in copy signal that the
2109
+ object is not picklable, otherwise only pickling errors will be trapped.
2110
+
2111
+ Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2112
+ """
2113
+ if safe: exceptions = (Exception,) # RuntimeError, ValueError
2114
+ else:
2115
+ exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError)
2116
+ try:
2117
+ pik = copy(obj, **kwds)
2118
+ #FIXME: should check types match first, then check content if "exact"
2119
+ try:
2120
+ #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ
2121
+ result = bool(pik.all() == obj.all())
2122
+ except (AttributeError, TypeError):
2123
+ warnings.filterwarnings('ignore') #FIXME: be specific
2124
+ result = pik == obj
2125
+ if warnings.filters: del warnings.filters[0]
2126
+ if hasattr(result, 'toarray'): # for unusual types like sparse matrix
2127
+ result = result.toarray().all()
2128
+ if result: return True
2129
+ if not exact:
2130
+ result = type(pik) == type(obj)
2131
+ if result: return result
2132
+ # class instances might have been dumped with byref=False
2133
+ return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
2134
+ return False
2135
+ except exceptions:
2136
+ return False
2137
+
2138
+ def check(obj, *args, **kwds):
2139
+ """
2140
+ Check pickling of an object across another process.
2141
+
2142
+ *python* is the path to the python interpreter (defaults to sys.executable)
2143
+
2144
+ Set *verbose=True* to print the unpickled object in the other process.
2145
+
2146
+ Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2147
+ """
2148
+ # == undocumented ==
2149
+ # python -- the string path or executable name of the selected python
2150
+ # verbose -- if True, be verbose about printing warning messages
2151
+ # all other args and kwds are passed to dill.dumps #FIXME: ignore on load
2152
+ verbose = kwds.pop('verbose', False)
2153
+ python = kwds.pop('python', None)
2154
+ if python is None:
2155
+ import sys
2156
+ python = sys.executable
2157
+ # type check
2158
+ isinstance(python, str)
2159
+ import subprocess
2160
+ fail = True
2161
+ try:
2162
+ _obj = dumps(obj, *args, **kwds)
2163
+ fail = False
2164
+ finally:
2165
+ if fail and verbose:
2166
+ print("DUMP FAILED")
2167
+ #FIXME: fails if python interpreter path contains spaces
2168
+ # Use the following instead (which also processes the 'ignore' keyword):
2169
+ # ignore = kwds.pop('ignore', None)
2170
+ # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore))
2171
+ # cmd = [python, "-c", "import dill; print(%s)"%unpickle]
2172
+ # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED"
2173
+ msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj))
2174
+ msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED"
2175
+ if verbose:
2176
+ print(msg)
2177
+ return
2178
+
2179
+ # use to protect against missing attributes
2180
+ def is_dill(pickler, child=None):
2181
+ "check the dill-ness of your pickler"
2182
+ if child is False or not hasattr(pickler.__class__, 'mro'):
2183
+ return 'dill' in pickler.__module__
2184
+ return Pickler in pickler.__class__.mro()
2185
+
2186
+ def _extend():
2187
+ """extend pickle with all of dill's registered types"""
2188
+ # need to have pickle not choke on _main_module? use is_dill(pickler)
2189
+ for t,func in Pickler.dispatch.items():
2190
+ try:
2191
+ StockPickler.dispatch[t] = func
2192
+ except Exception: #TypeError, PicklingError, UnpicklingError
2193
+ logger.trace(pickler, "skip: %s", t)
2194
+ return
2195
+
2196
+ del diff, _use_diff, use_diff
2197
+
2198
+ # EOF
llmeval-env/lib/python3.10/site-packages/dill/_objects.py ADDED
@@ -0,0 +1,537 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ all Python Standard Library objects (currently: CH 1-15 @ 2.7)
10
+ and some other common objects (i.e. numpy.ndarray)
11
+ """
12
+
13
+ __all__ = ['registered','failures','succeeds']
14
+
15
+ # helper imports
16
+ import warnings; warnings.filterwarnings("ignore", category=DeprecationWarning)
17
+ import sys
18
+ import queue as Queue
19
+ import dbm as anydbm
20
+ from io import BytesIO as StringIO
21
+ import re
22
+ import array
23
+ import collections
24
+ import codecs
25
+ import struct
26
+ import dataclasses
27
+ import datetime
28
+ import calendar
29
+ import weakref
30
+ import pprint
31
+ import decimal
32
+ import numbers
33
+ import functools
34
+ import itertools
35
+ import operator
36
+ import tempfile
37
+ import shelve
38
+ import zlib
39
+ import gzip
40
+ import zipfile
41
+ import tarfile
42
+ import csv
43
+ import hashlib
44
+ import hmac
45
+ import os
46
+ import logging
47
+ import logging.handlers
48
+ import optparse
49
+ #import __hello__
50
+ import threading
51
+ import socket
52
+ import contextlib
53
+ try:
54
+ import bz2
55
+ import sqlite3
56
+ import dbm.ndbm as dbm
57
+ HAS_ALL = True
58
+ except ImportError: # Ubuntu
59
+ HAS_ALL = False
60
+ try:
61
+ #import curses
62
+ #from curses import textpad, panel
63
+ HAS_CURSES = True
64
+ except ImportError: # Windows
65
+ HAS_CURSES = False
66
+ try:
67
+ import ctypes
68
+ HAS_CTYPES = True
69
+ # if using `pypy`, pythonapi is not found
70
+ IS_PYPY = not hasattr(ctypes, 'pythonapi')
71
+ except ImportError: # MacPorts
72
+ HAS_CTYPES = False
73
+ IS_PYPY = False
74
+
75
+ # helper objects
76
+ class _class:
77
+ def _method(self):
78
+ pass
79
+ # @classmethod
80
+ # def _clsmethod(cls): #XXX: test me
81
+ # pass
82
+ # @staticmethod
83
+ # def _static(self): #XXX: test me
84
+ # pass
85
+ class _class2:
86
+ def __call__(self):
87
+ pass
88
+ _instance2 = _class2()
89
+ class _newclass(object):
90
+ def _method(self):
91
+ pass
92
+ # @classmethod
93
+ # def _clsmethod(cls): #XXX: test me
94
+ # pass
95
+ # @staticmethod
96
+ # def _static(self): #XXX: test me
97
+ # pass
98
+ class _newclass2(object):
99
+ __slots__ = ['descriptor']
100
+ def _function(x): yield x
101
+ def _function2():
102
+ try: raise
103
+ except Exception:
104
+ from sys import exc_info
105
+ e, er, tb = exc_info()
106
+ return er, tb
107
+ if HAS_CTYPES:
108
+ class _Struct(ctypes.Structure):
109
+ pass
110
+ _Struct._fields_ = [("_field", ctypes.c_int),("next", ctypes.POINTER(_Struct))]
111
+ _filedescrip, _tempfile = tempfile.mkstemp('r') # deleted in cleanup
112
+ if sys.hexversion < 0x30d00a1:
113
+ _tmpf = tempfile.TemporaryFile('w') # emits OSError 9 in python 3.13
114
+ else:
115
+ _tmpf = tempfile.NamedTemporaryFile('w').file # for > python 3.9
116
+
117
+ # objects used by dill for type declaration
118
+ registered = d = {}
119
+ # objects dill fails to pickle
120
+ failures = x = {}
121
+ # all other type objects
122
+ succeeds = a = {}
123
+
124
+ # types module (part of CH 8)
125
+ a['BooleanType'] = bool(1)
126
+ a['BuiltinFunctionType'] = len
127
+ a['BuiltinMethodType'] = a['BuiltinFunctionType']
128
+ a['BytesType'] = _bytes = codecs.latin_1_encode('\x00')[0] # bytes(1)
129
+ a['ClassType'] = _class
130
+ a['ComplexType'] = complex(1)
131
+ a['DictType'] = _dict = {}
132
+ a['DictionaryType'] = a['DictType']
133
+ a['FloatType'] = float(1)
134
+ a['FunctionType'] = _function
135
+ a['InstanceType'] = _instance = _class()
136
+ a['IntType'] = _int = int(1)
137
+ a['ListType'] = _list = []
138
+ a['NoneType'] = None
139
+ a['ObjectType'] = object()
140
+ a['StringType'] = _str = str(1)
141
+ a['TupleType'] = _tuple = ()
142
+ a['TypeType'] = type
143
+ a['LongType'] = _int
144
+ a['UnicodeType'] = _str
145
+ # built-in constants (CH 4)
146
+ a['CopyrightType'] = copyright
147
+ # built-in types (CH 5)
148
+ a['ClassObjectType'] = _newclass # <type 'type'>
149
+ a['ClassInstanceType'] = _newclass() # <type 'class'>
150
+ a['SetType'] = _set = set()
151
+ a['FrozenSetType'] = frozenset()
152
+ # built-in exceptions (CH 6)
153
+ a['ExceptionType'] = _exception = _function2()[0]
154
+ # string services (CH 7)
155
+ a['SREPatternType'] = _srepattern = re.compile('')
156
+ # data types (CH 8)
157
+ a['ArrayType'] = array.array("f")
158
+ a['DequeType'] = collections.deque([0])
159
+ a['DefaultDictType'] = collections.defaultdict(_function, _dict)
160
+ a['TZInfoType'] = datetime.tzinfo()
161
+ a['DateTimeType'] = datetime.datetime.today()
162
+ a['CalendarType'] = calendar.Calendar()
163
+ # numeric and mathematical types (CH 9)
164
+ a['DecimalType'] = decimal.Decimal(1)
165
+ a['CountType'] = itertools.count(0)
166
+ # data compression and archiving (CH 12)
167
+ a['TarInfoType'] = tarfile.TarInfo()
168
+ # generic operating system services (CH 15)
169
+ a['LoggerType'] = _logger = logging.getLogger()
170
+ a['FormatterType'] = logging.Formatter() # pickle ok
171
+ a['FilterType'] = logging.Filter() # pickle ok
172
+ a['LogRecordType'] = logging.makeLogRecord(_dict) # pickle ok
173
+ a['OptionParserType'] = _oparser = optparse.OptionParser() # pickle ok
174
+ a['OptionGroupType'] = optparse.OptionGroup(_oparser,"foo") # pickle ok
175
+ a['OptionType'] = optparse.Option('--foo') # pickle ok
176
+ if HAS_CTYPES:
177
+ z = x if IS_PYPY else a
178
+ z['CCharType'] = _cchar = ctypes.c_char()
179
+ z['CWCharType'] = ctypes.c_wchar() # fail == 2.6
180
+ z['CByteType'] = ctypes.c_byte()
181
+ z['CUByteType'] = ctypes.c_ubyte()
182
+ z['CShortType'] = ctypes.c_short()
183
+ z['CUShortType'] = ctypes.c_ushort()
184
+ z['CIntType'] = ctypes.c_int()
185
+ z['CUIntType'] = ctypes.c_uint()
186
+ z['CLongType'] = ctypes.c_long()
187
+ z['CULongType'] = ctypes.c_ulong()
188
+ z['CLongLongType'] = ctypes.c_longlong()
189
+ z['CULongLongType'] = ctypes.c_ulonglong()
190
+ z['CFloatType'] = ctypes.c_float()
191
+ z['CDoubleType'] = ctypes.c_double()
192
+ z['CSizeTType'] = ctypes.c_size_t()
193
+ del z
194
+ a['CLibraryLoaderType'] = ctypes.cdll
195
+ a['StructureType'] = _Struct
196
+ # if not IS_PYPY:
197
+ # a['BigEndianStructureType'] = ctypes.BigEndianStructure()
198
+ #NOTE: also LittleEndianStructureType and UnionType... abstract classes
199
+ #NOTE: remember for ctypesobj.contents creates a new python object
200
+ #NOTE: ctypes.c_int._objects is memberdescriptor for object's __dict__
201
+ #NOTE: base class of all ctypes data types is non-public _CData
202
+
203
+ import fractions
204
+ import io
205
+ from io import StringIO as TextIO
206
+ # built-in functions (CH 2)
207
+ a['ByteArrayType'] = bytearray([1])
208
+ # numeric and mathematical types (CH 9)
209
+ a['FractionType'] = fractions.Fraction()
210
+ a['NumberType'] = numbers.Number()
211
+ # generic operating system services (CH 15)
212
+ a['IOBaseType'] = io.IOBase()
213
+ a['RawIOBaseType'] = io.RawIOBase()
214
+ a['TextIOBaseType'] = io.TextIOBase()
215
+ a['BufferedIOBaseType'] = io.BufferedIOBase()
216
+ a['UnicodeIOType'] = TextIO() # the new StringIO
217
+ a['LoggerAdapterType'] = logging.LoggerAdapter(_logger,_dict) # pickle ok
218
+ if HAS_CTYPES:
219
+ z = x if IS_PYPY else a
220
+ z['CBoolType'] = ctypes.c_bool(1)
221
+ z['CLongDoubleType'] = ctypes.c_longdouble()
222
+ del z
223
+ import argparse
224
+ # data types (CH 8)
225
+ a['OrderedDictType'] = collections.OrderedDict(_dict)
226
+ a['CounterType'] = collections.Counter(_dict)
227
+ if HAS_CTYPES:
228
+ z = x if IS_PYPY else a
229
+ z['CSSizeTType'] = ctypes.c_ssize_t()
230
+ del z
231
+ # generic operating system services (CH 15)
232
+ a['NullHandlerType'] = logging.NullHandler() # pickle ok # new 2.7
233
+ a['ArgParseFileType'] = argparse.FileType() # pickle ok
234
+
235
+ # -- pickle fails on all below here -----------------------------------------
236
+ # types module (part of CH 8)
237
+ a['CodeType'] = compile('','','exec')
238
+ a['DictProxyType'] = type.__dict__
239
+ a['DictProxyType2'] = _newclass.__dict__
240
+ a['EllipsisType'] = Ellipsis
241
+ a['ClosedFileType'] = open(os.devnull, 'wb', buffering=0).close()
242
+ a['GetSetDescriptorType'] = array.array.typecode
243
+ a['LambdaType'] = _lambda = lambda x: lambda y: x #XXX: works when not imported!
244
+ a['MemberDescriptorType'] = _newclass2.descriptor
245
+ if not IS_PYPY:
246
+ a['MemberDescriptorType2'] = datetime.timedelta.days
247
+ a['MethodType'] = _method = _class()._method #XXX: works when not imported!
248
+ a['ModuleType'] = datetime
249
+ a['NotImplementedType'] = NotImplemented
250
+ a['SliceType'] = slice(1)
251
+ a['UnboundMethodType'] = _class._method #XXX: works when not imported!
252
+ d['TextWrapperType'] = open(os.devnull, 'r') # same as mode='w','w+','r+'
253
+ d['BufferedRandomType'] = open(os.devnull, 'r+b') # same as mode='w+b'
254
+ d['BufferedReaderType'] = open(os.devnull, 'rb') # (default: buffering=-1)
255
+ d['BufferedWriterType'] = open(os.devnull, 'wb')
256
+ try: # oddities: deprecated
257
+ from _pyio import open as _open
258
+ d['PyTextWrapperType'] = _open(os.devnull, 'r', buffering=-1)
259
+ d['PyBufferedRandomType'] = _open(os.devnull, 'r+b', buffering=-1)
260
+ d['PyBufferedReaderType'] = _open(os.devnull, 'rb', buffering=-1)
261
+ d['PyBufferedWriterType'] = _open(os.devnull, 'wb', buffering=-1)
262
+ except ImportError:
263
+ pass
264
+ # other (concrete) object types
265
+ z = d if sys.hexversion < 0x30800a2 else a
266
+ z['CellType'] = (_lambda)(0).__closure__[0]
267
+ del z
268
+ a['XRangeType'] = _xrange = range(1)
269
+ a['MethodDescriptorType'] = type.__dict__['mro']
270
+ a['WrapperDescriptorType'] = type.__repr__
271
+ #a['WrapperDescriptorType2'] = type.__dict__['__module__']#XXX: GetSetDescriptor
272
+ a['ClassMethodDescriptorType'] = type.__dict__['__prepare__']
273
+ # built-in functions (CH 2)
274
+ _methodwrap = (1).__lt__
275
+ a['MethodWrapperType'] = _methodwrap
276
+ a['StaticMethodType'] = staticmethod(_method)
277
+ a['ClassMethodType'] = classmethod(_method)
278
+ a['PropertyType'] = property()
279
+ d['SuperType'] = super(Exception, _exception)
280
+ # string services (CH 7)
281
+ _in = _bytes
282
+ a['InputType'] = _cstrI = StringIO(_in)
283
+ a['OutputType'] = _cstrO = StringIO()
284
+ # data types (CH 8)
285
+ a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary()
286
+ a['WeakValueDictionaryType'] = weakref.WeakValueDictionary()
287
+ a['ReferenceType'] = weakref.ref(_instance)
288
+ a['DeadReferenceType'] = weakref.ref(_class())
289
+ a['ProxyType'] = weakref.proxy(_instance)
290
+ a['DeadProxyType'] = weakref.proxy(_class())
291
+ a['CallableProxyType'] = weakref.proxy(_instance2)
292
+ a['DeadCallableProxyType'] = weakref.proxy(_class2())
293
+ a['QueueType'] = Queue.Queue()
294
+ # numeric and mathematical types (CH 9)
295
+ d['PartialType'] = functools.partial(int,base=2)
296
+ a['IzipType'] = zip('0','1')
297
+ a['ChainType'] = itertools.chain('0','1')
298
+ d['ItemGetterType'] = operator.itemgetter(0)
299
+ d['AttrGetterType'] = operator.attrgetter('__repr__')
300
+ # file and directory access (CH 10)
301
+ _fileW = _cstrO
302
+ # data persistence (CH 11)
303
+ if HAS_ALL:
304
+ x['ConnectionType'] = _conn = sqlite3.connect(':memory:')
305
+ x['CursorType'] = _conn.cursor()
306
+ a['ShelveType'] = shelve.Shelf({})
307
+ # data compression and archiving (CH 12)
308
+ if HAS_ALL:
309
+ x['BZ2FileType'] = bz2.BZ2File(os.devnull)
310
+ x['BZ2CompressorType'] = bz2.BZ2Compressor()
311
+ x['BZ2DecompressorType'] = bz2.BZ2Decompressor()
312
+ #x['ZipFileType'] = _zip = zipfile.ZipFile(os.devnull,'w')
313
+ #_zip.write(_tempfile,'x') [causes annoying warning/error printed on import]
314
+ #a['ZipInfoType'] = _zip.getinfo('x')
315
+ a['TarFileType'] = tarfile.open(fileobj=_fileW,mode='w')
316
+ # file formats (CH 13)
317
+ x['DialectType'] = csv.get_dialect('excel')
318
+ if sys.hexversion < 0x30d00a1:
319
+ import xdrlib
320
+ a['PackerType'] = xdrlib.Packer()
321
+ # optional operating system services (CH 16)
322
+ a['LockType'] = threading.Lock()
323
+ a['RLockType'] = threading.RLock()
324
+ # generic operating system services (CH 15) # also closed/open and r/w/etc...
325
+ a['NamedLoggerType'] = _logger = logging.getLogger(__name__)
326
+ #a['FrozenModuleType'] = __hello__ #FIXME: prints "Hello world..."
327
+ # interprocess communication (CH 17)
328
+ x['SocketType'] = _socket = socket.socket()
329
+ x['SocketPairType'] = socket.socketpair()[0]
330
+ # python runtime services (CH 27)
331
+ a['GeneratorContextManagerType'] = contextlib.contextmanager(max)([1])
332
+
333
+ try: # ipython
334
+ __IPYTHON__ is True # is ipython
335
+ except NameError:
336
+ # built-in constants (CH 4)
337
+ a['QuitterType'] = quit
338
+ d['ExitType'] = a['QuitterType']
339
+ try: # numpy #FIXME: slow... 0.05 to 0.1 sec to import numpy
340
+ from numpy import ufunc as _numpy_ufunc
341
+ from numpy import array as _numpy_array
342
+ from numpy import int32 as _numpy_int32
343
+ a['NumpyUfuncType'] = _numpy_ufunc
344
+ a['NumpyArrayType'] = _numpy_array
345
+ a['NumpyInt32Type'] = _numpy_int32
346
+ except ImportError:
347
+ pass
348
+ # numeric and mathematical types (CH 9)
349
+ a['ProductType'] = itertools.product('0','1')
350
+ # generic operating system services (CH 15)
351
+ a['FileHandlerType'] = logging.FileHandler(os.devnull)
352
+ a['RotatingFileHandlerType'] = logging.handlers.RotatingFileHandler(os.devnull)
353
+ a['SocketHandlerType'] = logging.handlers.SocketHandler('localhost',514)
354
+ a['MemoryHandlerType'] = logging.handlers.MemoryHandler(1)
355
+ # data types (CH 8)
356
+ a['WeakSetType'] = weakref.WeakSet() # 2.7
357
+ # generic operating system services (CH 15) [errors when dill is imported]
358
+ #a['ArgumentParserType'] = _parser = argparse.ArgumentParser('PROG')
359
+ #a['NamespaceType'] = _parser.parse_args() # pickle ok
360
+ #a['SubParsersActionType'] = _parser.add_subparsers()
361
+ #a['MutuallyExclusiveGroupType'] = _parser.add_mutually_exclusive_group()
362
+ #a['ArgumentGroupType'] = _parser.add_argument_group()
363
+
364
+ # -- dill fails in some versions below here ---------------------------------
365
+ # types module (part of CH 8)
366
+ d['FileType'] = open(os.devnull, 'rb', buffering=0) # same 'wb','wb+','rb+'
367
+ # built-in functions (CH 2)
368
+ # Iterators:
369
+ a['ListIteratorType'] = iter(_list) # empty vs non-empty
370
+ a['SetIteratorType'] = iter(_set) #XXX: empty vs non-empty #FIXME: list_iterator
371
+ a['TupleIteratorType']= iter(_tuple) # empty vs non-empty
372
+ a['XRangeIteratorType'] = iter(_xrange) # empty vs non-empty
373
+ a["BytesIteratorType"] = iter(b'')
374
+ a["BytearrayIteratorType"] = iter(bytearray(b''))
375
+ z = x if IS_PYPY else a
376
+ z["CallableIteratorType"] = iter(iter, None)
377
+ del z
378
+ x["MemoryIteratorType"] = iter(memoryview(b''))
379
+ a["ListReverseiteratorType"] = reversed([])
380
+ X = a['OrderedDictType']
381
+ d["OdictKeysType"] = X.keys()
382
+ d["OdictValuesType"] = X.values()
383
+ d["OdictItemsType"] = X.items()
384
+ a["OdictIteratorType"] = iter(X.keys()) #FIXME: list_iterator
385
+ del X
386
+ #FIXME: list_iterator
387
+ a['DictionaryItemIteratorType'] = iter(type.__dict__.items())
388
+ a['DictionaryKeyIteratorType'] = iter(type.__dict__.keys())
389
+ a['DictionaryValueIteratorType'] = iter(type.__dict__.values())
390
+ if sys.hexversion >= 0x30800a0:
391
+ a["DictReversekeyiteratorType"] = reversed({}.keys())
392
+ a["DictReversevalueiteratorType"] = reversed({}.values())
393
+ a["DictReverseitemiteratorType"] = reversed({}.items())
394
+
395
+ try:
396
+ import symtable
397
+ #FIXME: fails to pickle
398
+ x["SymtableEntryType"] = symtable.symtable("", "string", "exec")._table
399
+ except ImportError:
400
+ pass
401
+
402
+ if sys.hexversion >= 0x30a00a0 and not IS_PYPY:
403
+ x['LineIteratorType'] = compile('3', '', 'eval').co_lines()
404
+
405
+ if sys.hexversion >= 0x30b00b0:
406
+ from types import GenericAlias
407
+ d["GenericAliasIteratorType"] = iter(GenericAlias(list, (int,)))
408
+ x['PositionsIteratorType'] = compile('3', '', 'eval').co_positions()
409
+
410
+ # data types (CH 8)
411
+ a['PrettyPrinterType'] = pprint.PrettyPrinter()
412
+ # numeric and mathematical types (CH 9)
413
+ a['CycleType'] = itertools.cycle('0')
414
+ # file and directory access (CH 10)
415
+ a['TemporaryFileType'] = _tmpf
416
+ # data compression and archiving (CH 12)
417
+ x['GzipFileType'] = gzip.GzipFile(fileobj=_fileW)
418
+ # generic operating system services (CH 15)
419
+ a['StreamHandlerType'] = logging.StreamHandler()
420
+ # numeric and mathematical types (CH 9)
421
+ a['PermutationsType'] = itertools.permutations('0')
422
+ a['CombinationsType'] = itertools.combinations('0',1)
423
+ a['RepeatType'] = itertools.repeat(0)
424
+ a['CompressType'] = itertools.compress('0',[1])
425
+ #XXX: ...and etc
426
+
427
+ # -- dill fails on all below here -------------------------------------------
428
+ # types module (part of CH 8)
429
+ x['GeneratorType'] = _generator = _function(1) #XXX: priority
430
+ x['FrameType'] = _generator.gi_frame #XXX: inspect.currentframe()
431
+ x['TracebackType'] = _function2()[1] #(see: inspect.getouterframes,getframeinfo)
432
+ # other (concrete) object types
433
+ # (also: Capsule / CObject ?)
434
+ # built-in functions (CH 2)
435
+ # built-in types (CH 5)
436
+ # string services (CH 7)
437
+ x['StructType'] = struct.Struct('c')
438
+ x['CallableIteratorType'] = _srepattern.finditer('')
439
+ x['SREMatchType'] = _srepattern.match('')
440
+ x['SREScannerType'] = _srepattern.scanner('')
441
+ x['StreamReader'] = codecs.StreamReader(_cstrI) #XXX: ... and etc
442
+ # python object persistence (CH 11)
443
+ # x['DbShelveType'] = shelve.open('foo','n')#,protocol=2) #XXX: delete foo
444
+ if HAS_ALL:
445
+ z = a if IS_PYPY else x
446
+ z['DbmType'] = dbm.open(_tempfile,'n')
447
+ del z
448
+ # x['DbCursorType'] = _dbcursor = anydbm.open('foo','n') #XXX: delete foo
449
+ # x['DbType'] = _dbcursor.db
450
+ # data compression and archiving (CH 12)
451
+ x['ZlibCompressType'] = zlib.compressobj()
452
+ x['ZlibDecompressType'] = zlib.decompressobj()
453
+ # file formats (CH 13)
454
+ x['CSVReaderType'] = csv.reader(_cstrI)
455
+ x['CSVWriterType'] = csv.writer(_cstrO)
456
+ x['CSVDictReaderType'] = csv.DictReader(_cstrI)
457
+ x['CSVDictWriterType'] = csv.DictWriter(_cstrO,{})
458
+ # cryptographic services (CH 14)
459
+ x['HashType'] = hashlib.md5()
460
+ if (sys.hexversion < 0x30800a1):
461
+ x['HMACType'] = hmac.new(_in)
462
+ else:
463
+ x['HMACType'] = hmac.new(_in, digestmod='md5')
464
+ # generic operating system services (CH 15)
465
+ if HAS_CURSES: pass
466
+ #x['CursesWindowType'] = _curwin = curses.initscr() #FIXME: messes up tty
467
+ #x['CursesTextPadType'] = textpad.Textbox(_curwin)
468
+ #x['CursesPanelType'] = panel.new_panel(_curwin)
469
+ if HAS_CTYPES:
470
+ x['CCharPType'] = ctypes.c_char_p()
471
+ x['CWCharPType'] = ctypes.c_wchar_p()
472
+ x['CVoidPType'] = ctypes.c_void_p()
473
+ if sys.platform[:3] == 'win':
474
+ x['CDLLType'] = _cdll = ctypes.cdll.msvcrt
475
+ else:
476
+ x['CDLLType'] = _cdll = ctypes.CDLL(None)
477
+ if not IS_PYPY:
478
+ x['PyDLLType'] = _pydll = ctypes.pythonapi
479
+ x['FuncPtrType'] = _cdll._FuncPtr()
480
+ x['CCharArrayType'] = ctypes.create_string_buffer(1)
481
+ x['CWCharArrayType'] = ctypes.create_unicode_buffer(1)
482
+ x['CParamType'] = ctypes.byref(_cchar)
483
+ x['LPCCharType'] = ctypes.pointer(_cchar)
484
+ x['LPCCharObjType'] = _lpchar = ctypes.POINTER(ctypes.c_char)
485
+ x['NullPtrType'] = _lpchar()
486
+ x['NullPyObjectType'] = ctypes.py_object()
487
+ x['PyObjectType'] = ctypes.py_object(lambda :None)
488
+ z = a if IS_PYPY else x
489
+ z['FieldType'] = _field = _Struct._field
490
+ z['CFUNCTYPEType'] = _cfunc = ctypes.CFUNCTYPE(ctypes.c_char)
491
+ if sys.hexversion < 0x30c00b3:
492
+ x['CFunctionType'] = _cfunc(str)
493
+ del z
494
+ # numeric and mathematical types (CH 9)
495
+ a['MethodCallerType'] = operator.methodcaller('mro') # 2.6
496
+ # built-in types (CH 5)
497
+ x['MemoryType'] = memoryview(_in) # 2.7
498
+ x['MemoryType2'] = memoryview(bytearray(_in)) # 2.7
499
+ d['DictItemsType'] = _dict.items() # 2.7
500
+ d['DictKeysType'] = _dict.keys() # 2.7
501
+ d['DictValuesType'] = _dict.values() # 2.7
502
+ # generic operating system services (CH 15)
503
+ a['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG')
504
+ a['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter('PROG')
505
+ a['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter('PROG')
506
+ z = a if IS_PYPY else x
507
+ z['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap) # 2.7, >=3.2
508
+ z['CmpKeyObjType'] = _cmpkey('0') #2.7, >=3.2
509
+ del z
510
+ # oddities: removed, etc
511
+ x['BufferType'] = x['MemoryType']
512
+
513
+ from dill._dill import _testcapsule
514
+ if _testcapsule is not None:
515
+ d['PyCapsuleType'] = _testcapsule
516
+ del _testcapsule
517
+
518
+ if hasattr(dataclasses, '_HAS_DEFAULT_FACTORY'):
519
+ a['DataclassesHasDefaultFactoryType'] = dataclasses._HAS_DEFAULT_FACTORY
520
+
521
+ if hasattr(dataclasses, 'MISSING'):
522
+ a['DataclassesMissingType'] = dataclasses.MISSING
523
+
524
+ if hasattr(dataclasses, 'KW_ONLY'):
525
+ a['DataclassesKWOnlyType'] = dataclasses.KW_ONLY
526
+
527
+ if hasattr(dataclasses, '_FIELD_BASE'):
528
+ a['DataclassesFieldBaseType'] = dataclasses._FIELD
529
+
530
+ # -- cleanup ----------------------------------------------------------------
531
+ a.update(d) # registered also succeed
532
+ if sys.platform[:3] == 'win':
533
+ os.close(_filedescrip) # required on win32
534
+ os.remove(_tempfile)
535
+
536
+
537
+ # EOF
llmeval-env/lib/python3.10/site-packages/dill/_shims.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Anirudh Vegesana ([email protected])
5
+ # Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Provides shims for compatibility between versions of dill and Python.
10
+
11
+ Compatibility shims should be provided in this file. Here are two simple example
12
+ use cases.
13
+
14
+ Deprecation of constructor function:
15
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16
+ Assume that we were transitioning _import_module in _dill.py to
17
+ the builtin function importlib.import_module when present.
18
+
19
+ @move_to(_dill)
20
+ def _import_module(import_name):
21
+ ... # code already in _dill.py
22
+
23
+ _import_module = Getattr(importlib, 'import_module', Getattr(_dill, '_import_module', None))
24
+
25
+ The code will attempt to find import_module in the importlib module. If not
26
+ present, it will use the _import_module function in _dill.
27
+
28
+ Emulate new Python behavior in older Python versions:
29
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
30
+ CellType.cell_contents behaves differently in Python 3.6 and 3.7. It is
31
+ read-only in Python 3.6 and writable and deletable in 3.7.
32
+
33
+ if _dill.OLD37 and _dill.HAS_CTYPES and ...:
34
+ @move_to(_dill)
35
+ def _setattr(object, name, value):
36
+ if type(object) is _dill.CellType and name == 'cell_contents':
37
+ _PyCell_Set.argtypes = (ctypes.py_object, ctypes.py_object)
38
+ _PyCell_Set(object, value)
39
+ else:
40
+ setattr(object, name, value)
41
+ ... # more cases below
42
+
43
+ _setattr = Getattr(_dill, '_setattr', setattr)
44
+
45
+ _dill._setattr will be used when present to emulate Python 3.7 functionality in
46
+ older versions of Python while defaulting to the standard setattr in 3.7+.
47
+
48
+ See this PR for the discussion that lead to this system:
49
+ https://github.com/uqfoundation/dill/pull/443
50
+ """
51
+
52
+ import inspect
53
+ import sys
54
+
55
+ _dill = sys.modules['dill._dill']
56
+
57
+
58
+ class Reduce(object):
59
+ """
60
+ Reduce objects are wrappers used for compatibility enforcement during
61
+ unpickle-time. They should only be used in calls to pickler.save and
62
+ other Reduce objects. They are only evaluated within unpickler.load.
63
+
64
+ Pickling a Reduce object makes the two implementations equivalent:
65
+
66
+ pickler.save(Reduce(*reduction))
67
+
68
+ pickler.save_reduce(*reduction, obj=reduction)
69
+ """
70
+ __slots__ = ['reduction']
71
+ def __new__(cls, *reduction, **kwargs):
72
+ """
73
+ Args:
74
+ *reduction: a tuple that matches the format given here:
75
+ https://docs.python.org/3/library/pickle.html#object.__reduce__
76
+ is_callable: a bool to indicate that the object created by
77
+ unpickling `reduction` is callable. If true, the current Reduce
78
+ is allowed to be used as the function in further save_reduce calls
79
+ or Reduce objects.
80
+ """
81
+ is_callable = kwargs.get('is_callable', False) # Pleases Py2. Can be removed later
82
+ if is_callable:
83
+ self = object.__new__(_CallableReduce)
84
+ else:
85
+ self = object.__new__(Reduce)
86
+ self.reduction = reduction
87
+ return self
88
+ def __repr__(self):
89
+ return 'Reduce%s' % (self.reduction,)
90
+ def __copy__(self):
91
+ return self # pragma: no cover
92
+ def __deepcopy__(self, memo):
93
+ return self # pragma: no cover
94
+ def __reduce__(self):
95
+ return self.reduction
96
+ def __reduce_ex__(self, protocol):
97
+ return self.__reduce__()
98
+
99
+ class _CallableReduce(Reduce):
100
+ # A version of Reduce for functions. Used to trick pickler.save_reduce into
101
+ # thinking that Reduce objects of functions are themselves meaningful functions.
102
+ def __call__(self, *args, **kwargs):
103
+ reduction = self.__reduce__()
104
+ func = reduction[0]
105
+ f_args = reduction[1]
106
+ obj = func(*f_args)
107
+ return obj(*args, **kwargs)
108
+
109
+ __NO_DEFAULT = _dill.Sentinel('Getattr.NO_DEFAULT')
110
+
111
+ def Getattr(object, name, default=__NO_DEFAULT):
112
+ """
113
+ A Reduce object that represents the getattr operation. When unpickled, the
114
+ Getattr will access an attribute 'name' of 'object' and return the value
115
+ stored there. If the attribute doesn't exist, the default value will be
116
+ returned if present.
117
+
118
+ The following statements are equivalent:
119
+
120
+ Getattr(collections, 'OrderedDict')
121
+ Getattr(collections, 'spam', None)
122
+ Getattr(*args)
123
+
124
+ Reduce(getattr, (collections, 'OrderedDict'))
125
+ Reduce(getattr, (collections, 'spam', None))
126
+ Reduce(getattr, args)
127
+
128
+ During unpickling, the first two will result in collections.OrderedDict and
129
+ None respectively because the first attribute exists and the second one does
130
+ not, forcing it to use the default value given in the third argument.
131
+ """
132
+
133
+ if default is Getattr.NO_DEFAULT:
134
+ reduction = (getattr, (object, name))
135
+ else:
136
+ reduction = (getattr, (object, name, default))
137
+
138
+ return Reduce(*reduction, is_callable=callable(default))
139
+
140
+ Getattr.NO_DEFAULT = __NO_DEFAULT
141
+ del __NO_DEFAULT
142
+
143
+ def move_to(module, name=None):
144
+ def decorator(func):
145
+ if name is None:
146
+ fname = func.__name__
147
+ else:
148
+ fname = name
149
+ module.__dict__[fname] = func
150
+ func.__module__ = module.__name__
151
+ return func
152
+ return decorator
153
+
154
+ def register_shim(name, default):
155
+ """
156
+ A easier to understand and more compact way of "softly" defining a function.
157
+ These two pieces of code are equivalent:
158
+
159
+ if _dill.OLD3X:
160
+ def _create_class():
161
+ ...
162
+ _create_class = register_shim('_create_class', types.new_class)
163
+
164
+ if _dill.OLD3X:
165
+ @move_to(_dill)
166
+ def _create_class():
167
+ ...
168
+ _create_class = Getattr(_dill, '_create_class', types.new_class)
169
+
170
+ Intuitively, it creates a function or object in the versions of dill/python
171
+ that require special reimplementations, and use a core library or default
172
+ implementation if that function or object does not exist.
173
+ """
174
+ func = globals().get(name)
175
+ if func is not None:
176
+ _dill.__dict__[name] = func
177
+ func.__module__ = _dill.__name__
178
+
179
+ if default is Getattr.NO_DEFAULT:
180
+ reduction = (getattr, (_dill, name))
181
+ else:
182
+ reduction = (getattr, (_dill, name, default))
183
+
184
+ return Reduce(*reduction, is_callable=callable(default))
185
+
186
+ ######################
187
+ ## Compatibility Shims are defined below
188
+ ######################
189
+
190
+ _CELL_EMPTY = register_shim('_CELL_EMPTY', None)
191
+
192
+ _setattr = register_shim('_setattr', setattr)
193
+ _delattr = register_shim('_delattr', delattr)
llmeval-env/lib/python3.10/site-packages/dill/detect.py ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Methods for detecting objects leading to pickling failures.
10
+ """
11
+
12
+ import dis
13
+ from inspect import ismethod, isfunction, istraceback, isframe, iscode
14
+
15
+ from .pointers import parent, reference, at, parents, children
16
+ from .logger import trace
17
+
18
+ __all__ = ['baditems','badobjects','badtypes','code','errors','freevars',
19
+ 'getmodule','globalvars','nestedcode','nestedglobals','outermost',
20
+ 'referredglobals','referrednested','trace','varnames']
21
+
22
+ def getmodule(object, _filename=None, force=False):
23
+ """get the module of the object"""
24
+ from inspect import getmodule as getmod
25
+ module = getmod(object, _filename)
26
+ if module or not force: return module
27
+ import builtins
28
+ from .source import getname
29
+ name = getname(object, force=True)
30
+ return builtins if name in vars(builtins).keys() else None
31
+
32
+ def outermost(func): # is analogous to getsource(func,enclosing=True)
33
+ """get outermost enclosing object (i.e. the outer function in a closure)
34
+
35
+ NOTE: this is the object-equivalent of getsource(func, enclosing=True)
36
+ """
37
+ if ismethod(func):
38
+ _globals = func.__func__.__globals__ or {}
39
+ elif isfunction(func):
40
+ _globals = func.__globals__ or {}
41
+ else:
42
+ return #XXX: or raise? no matches
43
+ _globals = _globals.items()
44
+ # get the enclosing source
45
+ from .source import getsourcelines
46
+ try: lines,lnum = getsourcelines(func, enclosing=True)
47
+ except Exception: #TypeError, IOError
48
+ lines,lnum = [],None
49
+ code = ''.join(lines)
50
+ # get all possible names,objects that are named in the enclosing source
51
+ _locals = ((name,obj) for (name,obj) in _globals if name in code)
52
+ # now only save the objects that generate the enclosing block
53
+ for name,obj in _locals: #XXX: don't really need 'name'
54
+ try:
55
+ if getsourcelines(obj) == (lines,lnum): return obj
56
+ except Exception: #TypeError, IOError
57
+ pass
58
+ return #XXX: or raise? no matches
59
+
60
+ def nestedcode(func, recurse=True): #XXX: or return dict of {co_name: co} ?
61
+ """get the code objects for any nested functions (e.g. in a closure)"""
62
+ func = code(func)
63
+ if not iscode(func): return [] #XXX: or raise? no matches
64
+ nested = set()
65
+ for co in func.co_consts:
66
+ if co is None: continue
67
+ co = code(co)
68
+ if co:
69
+ nested.add(co)
70
+ if recurse: nested |= set(nestedcode(co, recurse=True))
71
+ return list(nested)
72
+
73
+ def code(func):
74
+ """get the code object for the given function or method
75
+
76
+ NOTE: use dill.source.getsource(CODEOBJ) to get the source code
77
+ """
78
+ if ismethod(func): func = func.__func__
79
+ if isfunction(func): func = func.__code__
80
+ if istraceback(func): func = func.tb_frame
81
+ if isframe(func): func = func.f_code
82
+ if iscode(func): return func
83
+ return
84
+
85
+ #XXX: ugly: parse dis.dis for name after "<code object" in line and in globals?
86
+ def referrednested(func, recurse=True): #XXX: return dict of {__name__: obj} ?
87
+ """get functions defined inside of func (e.g. inner functions in a closure)
88
+
89
+ NOTE: results may differ if the function has been executed or not.
90
+ If len(nestedcode(func)) > len(referrednested(func)), try calling func().
91
+ If possible, python builds code objects, but delays building functions
92
+ until func() is called.
93
+ """
94
+ import gc
95
+ funcs = set()
96
+ # get the code objects, and try to track down by referrence
97
+ for co in nestedcode(func, recurse):
98
+ # look for function objects that refer to the code object
99
+ for obj in gc.get_referrers(co):
100
+ # get methods
101
+ _ = getattr(obj, '__func__', None) # ismethod
102
+ if getattr(_, '__code__', None) is co: funcs.add(obj)
103
+ # get functions
104
+ elif getattr(obj, '__code__', None) is co: funcs.add(obj)
105
+ # get frame objects
106
+ elif getattr(obj, 'f_code', None) is co: funcs.add(obj)
107
+ # get code objects
108
+ elif hasattr(obj, 'co_code') and obj is co: funcs.add(obj)
109
+ # frameobjs => func.__code__.co_varnames not in func.__code__.co_cellvars
110
+ # funcobjs => func.__code__.co_cellvars not in func.__code__.co_varnames
111
+ # frameobjs are not found, however funcobjs are...
112
+ # (see: test_mixins.quad ... and test_mixins.wtf)
113
+ # after execution, code objects get compiled, and then may be found by gc
114
+ return list(funcs)
115
+
116
+
117
+ def freevars(func):
118
+ """get objects defined in enclosing code that are referred to by func
119
+
120
+ returns a dict of {name:object}"""
121
+ if ismethod(func): func = func.__func__
122
+ if isfunction(func):
123
+ closures = func.__closure__ or ()
124
+ func = func.__code__.co_freevars # get freevars
125
+ else:
126
+ return {}
127
+
128
+ def get_cell_contents():
129
+ for name, c in zip(func, closures):
130
+ try:
131
+ cell_contents = c.cell_contents
132
+ except ValueError: # cell is empty
133
+ continue
134
+ yield name, c.cell_contents
135
+
136
+ return dict(get_cell_contents())
137
+
138
+ # thanks to Davies Liu for recursion of globals
139
+ def nestedglobals(func, recurse=True):
140
+ """get the names of any globals found within func"""
141
+ func = code(func)
142
+ if func is None: return list()
143
+ import sys
144
+ from .temp import capture
145
+ CAN_NULL = sys.hexversion >= 0x30b00a7 # NULL may be prepended >= 3.11a7
146
+ names = set()
147
+ with capture('stdout') as out:
148
+ dis.dis(func) #XXX: dis.dis(None) disassembles last traceback
149
+ for line in out.getvalue().splitlines():
150
+ if '_GLOBAL' in line:
151
+ name = line.split('(')[-1].split(')')[0]
152
+ if CAN_NULL:
153
+ names.add(name.replace('NULL + ', '').replace(' + NULL', ''))
154
+ else:
155
+ names.add(name)
156
+ for co in getattr(func, 'co_consts', tuple()):
157
+ if co and recurse and iscode(co):
158
+ names.update(nestedglobals(co, recurse=True))
159
+ return list(names)
160
+
161
+ def referredglobals(func, recurse=True, builtin=False):
162
+ """get the names of objects in the global scope referred to by func"""
163
+ return globalvars(func, recurse, builtin).keys()
164
+
165
+ def globalvars(func, recurse=True, builtin=False):
166
+ """get objects defined in global scope that are referred to by func
167
+
168
+ return a dict of {name:object}"""
169
+ if ismethod(func): func = func.__func__
170
+ if isfunction(func):
171
+ globs = vars(getmodule(sum)).copy() if builtin else {}
172
+ # get references from within closure
173
+ orig_func, func = func, set()
174
+ for obj in orig_func.__closure__ or {}:
175
+ try:
176
+ cell_contents = obj.cell_contents
177
+ except ValueError: # cell is empty
178
+ pass
179
+ else:
180
+ _vars = globalvars(cell_contents, recurse, builtin) or {}
181
+ func.update(_vars) #XXX: (above) be wary of infinte recursion?
182
+ globs.update(_vars)
183
+ # get globals
184
+ globs.update(orig_func.__globals__ or {})
185
+ # get names of references
186
+ if not recurse:
187
+ func.update(orig_func.__code__.co_names)
188
+ else:
189
+ func.update(nestedglobals(orig_func.__code__))
190
+ # find globals for all entries of func
191
+ for key in func.copy(): #XXX: unnecessary...?
192
+ nested_func = globs.get(key)
193
+ if nested_func is orig_func:
194
+ #func.remove(key) if key in func else None
195
+ continue #XXX: globalvars(func, False)?
196
+ func.update(globalvars(nested_func, True, builtin))
197
+ elif iscode(func):
198
+ globs = vars(getmodule(sum)).copy() if builtin else {}
199
+ #globs.update(globals())
200
+ if not recurse:
201
+ func = func.co_names # get names
202
+ else:
203
+ orig_func = func.co_name # to stop infinite recursion
204
+ func = set(nestedglobals(func))
205
+ # find globals for all entries of func
206
+ for key in func.copy(): #XXX: unnecessary...?
207
+ if key is orig_func:
208
+ #func.remove(key) if key in func else None
209
+ continue #XXX: globalvars(func, False)?
210
+ nested_func = globs.get(key)
211
+ func.update(globalvars(nested_func, True, builtin))
212
+ else:
213
+ return {}
214
+ #NOTE: if name not in __globals__, then we skip it...
215
+ return dict((name,globs[name]) for name in func if name in globs)
216
+
217
+
218
+ def varnames(func):
219
+ """get names of variables defined by func
220
+
221
+ returns a tuple (local vars, local vars referrenced by nested functions)"""
222
+ func = code(func)
223
+ if not iscode(func):
224
+ return () #XXX: better ((),())? or None?
225
+ return func.co_varnames, func.co_cellvars
226
+
227
+
228
+ def baditems(obj, exact=False, safe=False): #XXX: obj=globals() ?
229
+ """get items in object that fail to pickle"""
230
+ if not hasattr(obj,'__iter__'): # is not iterable
231
+ return [j for j in (badobjects(obj,0,exact,safe),) if j is not None]
232
+ obj = obj.values() if getattr(obj,'values',None) else obj
233
+ _obj = [] # can't use a set, as items may be unhashable
234
+ [_obj.append(badobjects(i,0,exact,safe)) for i in obj if i not in _obj]
235
+ return [j for j in _obj if j is not None]
236
+
237
+
238
+ def badobjects(obj, depth=0, exact=False, safe=False):
239
+ """get objects that fail to pickle"""
240
+ from dill import pickles
241
+ if not depth:
242
+ if pickles(obj,exact,safe): return None
243
+ return obj
244
+ return dict(((attr, badobjects(getattr(obj,attr),depth-1,exact,safe)) \
245
+ for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe)))
246
+
247
+ def badtypes(obj, depth=0, exact=False, safe=False):
248
+ """get types for objects that fail to pickle"""
249
+ from dill import pickles
250
+ if not depth:
251
+ if pickles(obj,exact,safe): return None
252
+ return type(obj)
253
+ return dict(((attr, badtypes(getattr(obj,attr),depth-1,exact,safe)) \
254
+ for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe)))
255
+
256
+ def errors(obj, depth=0, exact=False, safe=False):
257
+ """get errors for objects that fail to pickle"""
258
+ from dill import pickles, copy
259
+ if not depth:
260
+ try:
261
+ pik = copy(obj)
262
+ if exact:
263
+ assert pik == obj, \
264
+ "Unpickling produces %s instead of %s" % (pik,obj)
265
+ assert type(pik) == type(obj), \
266
+ "Unpickling produces %s instead of %s" % (type(pik),type(obj))
267
+ return None
268
+ except Exception:
269
+ import sys
270
+ return sys.exc_info()[1]
271
+ _dict = {}
272
+ for attr in dir(obj):
273
+ try:
274
+ _attr = getattr(obj,attr)
275
+ except Exception:
276
+ import sys
277
+ _dict[attr] = sys.exc_info()[1]
278
+ continue
279
+ if not pickles(_attr,exact,safe):
280
+ _dict[attr] = errors(_attr,depth-1,exact,safe)
281
+ return _dict
282
+
283
+
284
+ # EOF
llmeval-env/lib/python3.10/site-packages/dill/logger.py ADDED
@@ -0,0 +1,285 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ #
4
+ # Author: Leonardo Gama (@leogama)
5
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ Logging utilities for dill.
10
+
11
+ The 'logger' object is dill's top-level logger.
12
+
13
+ The 'adapter' object wraps the logger and implements a 'trace()' method that
14
+ generates a detailed tree-style trace for the pickling call at log level INFO.
15
+
16
+ The 'trace()' function sets and resets dill's logger log level, enabling and
17
+ disabling the pickling trace.
18
+
19
+ The trace shows a tree structure depicting the depth of each object serialized
20
+ *with dill save functions*, but not the ones that use save functions from
21
+ 'pickle._Pickler.dispatch'. If the information is available, it also displays
22
+ the size in bytes that the object contributed to the pickle stream (including
23
+ its child objects). Sample trace output:
24
+
25
+ >>> import dill, dill.tests
26
+ >>> dill.detect.trace(True)
27
+ >>> dill.dump_session(main=dill.tests)
28
+ ┬ M1: <module 'dill.tests' from '.../dill/tests/__init__.py'>
29
+ ├┬ F2: <function _import_module at 0x7f0d2dce1b80>
30
+ │└ # F2 [32 B]
31
+ ├┬ D2: <dict object at 0x7f0d2e98a540>
32
+ │├┬ T4: <class '_frozen_importlib.ModuleSpec'>
33
+ ││└ # T4 [35 B]
34
+ │├┬ D2: <dict object at 0x7f0d2ef0e8c0>
35
+ ││├┬ T4: <class '_frozen_importlib_external.SourceFileLoader'>
36
+ │││└ # T4 [50 B]
37
+ ││├┬ D2: <dict object at 0x7f0d2e988a40>
38
+ │││└ # D2 [84 B]
39
+ ││└ # D2 [413 B]
40
+ │└ # D2 [763 B]
41
+ └ # M1 [813 B]
42
+ """
43
+
44
+ __all__ = ['adapter', 'logger', 'trace']
45
+
46
+ import codecs
47
+ import contextlib
48
+ import locale
49
+ import logging
50
+ import math
51
+ import os
52
+ from functools import partial
53
+ from typing import TextIO, Union
54
+
55
+ import dill
56
+
57
+ # Tree drawing characters: Unicode to ASCII map.
58
+ ASCII_MAP = str.maketrans({"│": "|", "├": "|", "┬": "+", "└": "`"})
59
+
60
+ ## Notes about the design choices ##
61
+
62
+ # Here is some domumentation of the Standard Library's logging internals that
63
+ # can't be found completely in the official documentation. dill's logger is
64
+ # obtained by calling logging.getLogger('dill') and therefore is an instance of
65
+ # logging.getLoggerClass() at the call time. As this is controlled by the user,
66
+ # in order to add some functionality to it it's necessary to use a LoggerAdapter
67
+ # to wrap it, overriding some of the adapter's methods and creating new ones.
68
+ #
69
+ # Basic calling sequence
70
+ # ======================
71
+ #
72
+ # Python's logging functionality can be conceptually divided into five steps:
73
+ # 0. Check logging level -> abort if call level is greater than logger level
74
+ # 1. Gather information -> construct a LogRecord from passed arguments and context
75
+ # 2. Filter (optional) -> discard message if the record matches a filter
76
+ # 3. Format -> format message with args, then format output string with message plus record
77
+ # 4. Handle -> write the formatted string to output as defined in the handler
78
+ #
79
+ # dill.logging.logger.log -> # or logger.info, etc.
80
+ # Logger.log -> \
81
+ # Logger._log -> }- accept 'extra' parameter for custom record entries
82
+ # Logger.makeRecord -> /
83
+ # LogRecord.__init__
84
+ # Logger.handle ->
85
+ # Logger.callHandlers ->
86
+ # Handler.handle ->
87
+ # Filterer.filter ->
88
+ # Filter.filter
89
+ # StreamHandler.emit ->
90
+ # Handler.format ->
91
+ # Formatter.format ->
92
+ # LogRecord.getMessage # does: record.message = msg % args
93
+ # Formatter.formatMessage ->
94
+ # PercentStyle.format # does: self._fmt % vars(record)
95
+ #
96
+ # NOTE: All methods from the second line on are from logging.__init__.py
97
+
98
+ class TraceAdapter(logging.LoggerAdapter):
99
+ """
100
+ Tracks object tree depth and calculates pickled object size.
101
+
102
+ A single instance of this wraps the module's logger, as the logging API
103
+ doesn't allow setting it directly with a custom Logger subclass. The added
104
+ 'trace()' method receives a pickle instance as the first argument and
105
+ creates extra values to be added in the LogRecord from it, then calls
106
+ 'info()'.
107
+
108
+ Usage of logger with 'trace()' method:
109
+
110
+ >>> from dill.logger import adapter as logger #NOTE: not dill.logger.logger
111
+ >>> ...
112
+ >>> def save_atype(pickler, obj):
113
+ >>> logger.trace(pickler, "Message with %s and %r etc. placeholders", 'text', obj)
114
+ >>> ...
115
+ """
116
+ def __init__(self, logger):
117
+ self.logger = logger
118
+ def addHandler(self, handler):
119
+ formatter = TraceFormatter("%(prefix)s%(message)s%(suffix)s", handler=handler)
120
+ handler.setFormatter(formatter)
121
+ self.logger.addHandler(handler)
122
+ def removeHandler(self, handler):
123
+ self.logger.removeHandler(handler)
124
+ def process(self, msg, kwargs):
125
+ # A no-op override, as we don't have self.extra.
126
+ return msg, kwargs
127
+ def trace_setup(self, pickler):
128
+ # Called by Pickler.dump().
129
+ if not dill._dill.is_dill(pickler, child=False):
130
+ return
131
+ if self.isEnabledFor(logging.INFO):
132
+ pickler._trace_depth = 1
133
+ pickler._size_stack = []
134
+ else:
135
+ pickler._trace_depth = None
136
+ def trace(self, pickler, msg, *args, **kwargs):
137
+ if not hasattr(pickler, '_trace_depth'):
138
+ logger.info(msg, *args, **kwargs)
139
+ return
140
+ if pickler._trace_depth is None:
141
+ return
142
+ extra = kwargs.get('extra', {})
143
+ pushed_obj = msg.startswith('#')
144
+ size = None
145
+ try:
146
+ # Streams are not required to be tellable.
147
+ size = pickler._file.tell()
148
+ frame = pickler.framer.current_frame
149
+ try:
150
+ size += frame.tell()
151
+ except AttributeError:
152
+ # PyPy may use a BytesBuilder as frame
153
+ size += len(frame)
154
+ except (AttributeError, TypeError):
155
+ pass
156
+ if size is not None:
157
+ if not pushed_obj:
158
+ pickler._size_stack.append(size)
159
+ else:
160
+ size -= pickler._size_stack.pop()
161
+ extra['size'] = size
162
+ if pushed_obj:
163
+ pickler._trace_depth -= 1
164
+ extra['depth'] = pickler._trace_depth
165
+ kwargs['extra'] = extra
166
+ self.info(msg, *args, **kwargs)
167
+ if not pushed_obj:
168
+ pickler._trace_depth += 1
169
+
170
+ class TraceFormatter(logging.Formatter):
171
+ """
172
+ Generates message prefix and suffix from record.
173
+
174
+ This Formatter adds prefix and suffix strings to the log message in trace
175
+ mode (an also provides empty string defaults for normal logs).
176
+ """
177
+ def __init__(self, *args, handler=None, **kwargs):
178
+ super().__init__(*args, **kwargs)
179
+ try:
180
+ encoding = handler.stream.encoding
181
+ if encoding is None:
182
+ raise AttributeError
183
+ except AttributeError:
184
+ encoding = locale.getpreferredencoding()
185
+ try:
186
+ encoding = codecs.lookup(encoding).name
187
+ except LookupError:
188
+ self.is_utf8 = False
189
+ else:
190
+ self.is_utf8 = (encoding == codecs.lookup('utf-8').name)
191
+ def format(self, record):
192
+ fields = {'prefix': "", 'suffix': ""}
193
+ if getattr(record, 'depth', 0) > 0:
194
+ if record.msg.startswith("#"):
195
+ prefix = (record.depth - 1)*"│" + "└"
196
+ elif record.depth == 1:
197
+ prefix = "┬"
198
+ else:
199
+ prefix = (record.depth - 2)*"│" + "├┬"
200
+ if not self.is_utf8:
201
+ prefix = prefix.translate(ASCII_MAP) + "-"
202
+ fields['prefix'] = prefix + " "
203
+ if hasattr(record, 'size') and record.size is not None and record.size >= 1:
204
+ # Show object size in human-readable form.
205
+ power = int(math.log(record.size, 2)) // 10
206
+ size = record.size >> power*10
207
+ fields['suffix'] = " [%d %sB]" % (size, "KMGTP"[power] + "i" if power else "")
208
+ vars(record).update(fields)
209
+ return super().format(record)
210
+
211
+ logger = logging.getLogger('dill')
212
+ logger.propagate = False
213
+ adapter = TraceAdapter(logger)
214
+ stderr_handler = logging._StderrHandler()
215
+ adapter.addHandler(stderr_handler)
216
+
217
+ def trace(arg: Union[bool, TextIO, str, os.PathLike] = None, *, mode: str = 'a') -> None:
218
+ """print a trace through the stack when pickling; useful for debugging
219
+
220
+ With a single boolean argument, enable or disable the tracing.
221
+
222
+ Example usage:
223
+
224
+ >>> import dill
225
+ >>> dill.detect.trace(True)
226
+ >>> dill.dump_session()
227
+
228
+ Alternatively, ``trace()`` can be used as a context manager. With no
229
+ arguments, it just takes care of restoring the tracing state on exit.
230
+ Either a file handle, or a file name and (optionally) a file mode may be
231
+ specitfied to redirect the tracing output in the ``with`` block context. A
232
+ log function is yielded by the manager so the user can write extra
233
+ information to the file.
234
+
235
+ Example usage:
236
+
237
+ >>> from dill import detect
238
+ >>> D = {'a': 42, 'b': {'x': None}}
239
+ >>> with detect.trace():
240
+ >>> dumps(D)
241
+ ┬ D2: <dict object at 0x7f2721804800>
242
+ ├┬ D2: <dict object at 0x7f27217f5c40>
243
+ │└ # D2 [8 B]
244
+ └ # D2 [22 B]
245
+ >>> squared = lambda x: x**2
246
+ >>> with detect.trace('output.txt', mode='w') as log:
247
+ >>> log("> D = %r", D)
248
+ >>> dumps(D)
249
+ >>> log("> squared = %r", squared)
250
+ >>> dumps(squared)
251
+
252
+ Arguments:
253
+ arg: a boolean value, or an optional file-like or path-like object for the context manager
254
+ mode: mode string for ``open()`` if a file name is passed as the first argument
255
+ """
256
+ if not isinstance(arg, bool):
257
+ return TraceManager(file=arg, mode=mode)
258
+ logger.setLevel(logging.INFO if arg else logging.WARNING)
259
+
260
+ class TraceManager(contextlib.AbstractContextManager):
261
+ """context manager version of trace(); can redirect the trace to a file"""
262
+ def __init__(self, file, mode):
263
+ self.file = file
264
+ self.mode = mode
265
+ self.redirect = file is not None
266
+ self.file_is_stream = hasattr(file, 'write')
267
+ def __enter__(self):
268
+ if self.redirect:
269
+ stderr_handler.flush()
270
+ if self.file_is_stream:
271
+ self.handler = logging.StreamHandler(self.file)
272
+ else:
273
+ self.handler = logging.FileHandler(self.file, self.mode)
274
+ adapter.removeHandler(stderr_handler)
275
+ adapter.addHandler(self.handler)
276
+ self.old_level = adapter.getEffectiveLevel()
277
+ adapter.setLevel(logging.INFO)
278
+ return adapter.info
279
+ def __exit__(self, *exc_info):
280
+ adapter.setLevel(self.old_level)
281
+ if self.redirect:
282
+ adapter.removeHandler(self.handler)
283
+ adapter.addHandler(stderr_handler)
284
+ if not self.file_is_stream:
285
+ self.handler.close()
llmeval-env/lib/python3.10/site-packages/dill/objtypes.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ all Python Standard Library object types (currently: CH 1-15 @ 2.7)
10
+ and some other common object types (i.e. numpy.ndarray)
11
+
12
+ to load more objects and types, use dill.load_types()
13
+ """
14
+
15
+ # non-local import of dill.objects
16
+ from dill import objects
17
+ for _type in objects.keys():
18
+ exec("%s = type(objects['%s'])" % (_type,_type))
19
+
20
+ del objects
21
+ try:
22
+ del _type
23
+ except NameError:
24
+ pass
llmeval-env/lib/python3.10/site-packages/dill/pointers.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ __all__ = ['parent', 'reference', 'at', 'parents', 'children']
10
+
11
+ import gc
12
+ import sys
13
+
14
+ from ._dill import _proxy_helper as reference
15
+ from ._dill import _locate_object as at
16
+
17
+ def parent(obj, objtype, ignore=()):
18
+ """
19
+ >>> listiter = iter([4,5,6,7])
20
+ >>> obj = parent(listiter, list)
21
+ >>> obj == [4,5,6,7] # actually 'is', but don't have handle any longer
22
+ True
23
+
24
+ NOTE: objtype can be a single type (e.g. int or list) or a tuple of types.
25
+
26
+ WARNING: if obj is a sequence (e.g. list), may produce unexpected results.
27
+ Parent finds *one* parent (e.g. the last member of the sequence).
28
+ """
29
+ depth = 1 #XXX: always looking for the parent (only, right?)
30
+ chain = parents(obj, objtype, depth, ignore)
31
+ parent = chain.pop()
32
+ if parent is obj:
33
+ return None
34
+ return parent
35
+
36
+
37
+ def parents(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ?
38
+ """Find the chain of referents for obj. Chain will end with obj.
39
+
40
+ objtype: an object type or tuple of types to search for
41
+ depth: search depth (e.g. depth=2 is 'grandparents')
42
+ ignore: an object or tuple of objects to ignore in the search
43
+ """
44
+ edge_func = gc.get_referents # looking for refs, not back_refs
45
+ predicate = lambda x: isinstance(x, objtype) # looking for parent type
46
+ #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ?
47
+ ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore
48
+ ignore = (id(obj) for obj in ignore)
49
+ chain = find_chain(obj, predicate, edge_func, depth)[::-1]
50
+ #XXX: should pop off obj... ?
51
+ return chain
52
+
53
+
54
+ def children(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ?
55
+ """Find the chain of referrers for obj. Chain will start with obj.
56
+
57
+ objtype: an object type or tuple of types to search for
58
+ depth: search depth (e.g. depth=2 is 'grandchildren')
59
+ ignore: an object or tuple of objects to ignore in the search
60
+
61
+ NOTE: a common thing to ignore is all globals, 'ignore=(globals(),)'
62
+
63
+ NOTE: repeated calls may yield different results, as python stores
64
+ the last value in the special variable '_'; thus, it is often good
65
+ to execute something to replace '_' (e.g. >>> 1+1).
66
+ """
67
+ edge_func = gc.get_referrers # looking for back_refs, not refs
68
+ predicate = lambda x: isinstance(x, objtype) # looking for child type
69
+ #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ?
70
+ ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore
71
+ ignore = (id(obj) for obj in ignore)
72
+ chain = find_chain(obj, predicate, edge_func, depth, ignore)
73
+ #XXX: should pop off obj... ?
74
+ return chain
75
+
76
+
77
+ # more generic helper function (cut-n-paste from objgraph)
78
+ # Source at http://mg.pov.lt/objgraph/
79
+ # Copyright (c) 2008-2010 Marius Gedminas <[email protected]>
80
+ # Copyright (c) 2010 Stefano Rivera <[email protected]>
81
+ # Released under the MIT licence (see objgraph/objgrah.py)
82
+
83
+ def find_chain(obj, predicate, edge_func, max_depth=20, extra_ignore=()):
84
+ queue = [obj]
85
+ depth = {id(obj): 0}
86
+ parent = {id(obj): None}
87
+ ignore = set(extra_ignore)
88
+ ignore.add(id(extra_ignore))
89
+ ignore.add(id(queue))
90
+ ignore.add(id(depth))
91
+ ignore.add(id(parent))
92
+ ignore.add(id(ignore))
93
+ ignore.add(id(sys._getframe())) # this function
94
+ ignore.add(id(sys._getframe(1))) # find_chain/find_backref_chain, likely
95
+ gc.collect()
96
+ while queue:
97
+ target = queue.pop(0)
98
+ if predicate(target):
99
+ chain = [target]
100
+ while parent[id(target)] is not None:
101
+ target = parent[id(target)]
102
+ chain.append(target)
103
+ return chain
104
+ tdepth = depth[id(target)]
105
+ if tdepth < max_depth:
106
+ referrers = edge_func(target)
107
+ ignore.add(id(referrers))
108
+ for source in referrers:
109
+ if id(source) in ignore:
110
+ continue
111
+ if id(source) not in depth:
112
+ depth[id(source)] = tdepth + 1
113
+ parent[id(source)] = target
114
+ queue.append(source)
115
+ return [obj] # not found
116
+
117
+
118
+ # backward compatibility
119
+ refobject = at
120
+
121
+
122
+ # EOF
llmeval-env/lib/python3.10/site-packages/dill/session.py ADDED
@@ -0,0 +1,613 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Leonardo Gama (@leogama)
5
+ # Copyright (c) 2008-2015 California Institute of Technology.
6
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
7
+ # License: 3-clause BSD. The full license text is available at:
8
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
9
+ """
10
+ Pickle and restore the intepreter session.
11
+ """
12
+
13
+ __all__ = [
14
+ 'dump_module', 'load_module', 'load_module_asdict',
15
+ 'dump_session', 'load_session' # backward compatibility
16
+ ]
17
+
18
+ import re
19
+ import os
20
+ import sys
21
+ import warnings
22
+
23
+ from dill import _dill, Pickler, Unpickler
24
+ from ._dill import (
25
+ BuiltinMethodType, FunctionType, MethodType, ModuleType, TypeType,
26
+ _import_module, _is_builtin_module, _is_imported_module, _main_module,
27
+ _reverse_typemap, __builtin__,
28
+ )
29
+
30
+ # Type hints.
31
+ from typing import Optional, Union
32
+
33
+ import pathlib
34
+ import tempfile
35
+
36
+ TEMPDIR = pathlib.PurePath(tempfile.gettempdir())
37
+
38
+ def _module_map():
39
+ """get map of imported modules"""
40
+ from collections import defaultdict
41
+ from types import SimpleNamespace
42
+ modmap = SimpleNamespace(
43
+ by_name=defaultdict(list),
44
+ by_id=defaultdict(list),
45
+ top_level={},
46
+ )
47
+ for modname, module in sys.modules.items():
48
+ if modname in ('__main__', '__mp_main__') or not isinstance(module, ModuleType):
49
+ continue
50
+ if '.' not in modname:
51
+ modmap.top_level[id(module)] = modname
52
+ for objname, modobj in module.__dict__.items():
53
+ modmap.by_name[objname].append((modobj, modname))
54
+ modmap.by_id[id(modobj)].append((modobj, objname, modname))
55
+ return modmap
56
+
57
+ IMPORTED_AS_TYPES = (ModuleType, TypeType, FunctionType, MethodType, BuiltinMethodType)
58
+ if 'PyCapsuleType' in _reverse_typemap:
59
+ IMPORTED_AS_TYPES += (_reverse_typemap['PyCapsuleType'],)
60
+ IMPORTED_AS_MODULES = ('ctypes', 'typing', 'subprocess', 'threading',
61
+ r'concurrent\.futures(\.\w+)?', r'multiprocessing(\.\w+)?')
62
+ IMPORTED_AS_MODULES = tuple(re.compile(x) for x in IMPORTED_AS_MODULES)
63
+
64
+ def _lookup_module(modmap, name, obj, main_module):
65
+ """lookup name or id of obj if module is imported"""
66
+ for modobj, modname in modmap.by_name[name]:
67
+ if modobj is obj and sys.modules[modname] is not main_module:
68
+ return modname, name
69
+ __module__ = getattr(obj, '__module__', None)
70
+ if isinstance(obj, IMPORTED_AS_TYPES) or (__module__ is not None
71
+ and any(regex.fullmatch(__module__) for regex in IMPORTED_AS_MODULES)):
72
+ for modobj, objname, modname in modmap.by_id[id(obj)]:
73
+ if sys.modules[modname] is not main_module:
74
+ return modname, objname
75
+ return None, None
76
+
77
+ def _stash_modules(main_module):
78
+ modmap = _module_map()
79
+ newmod = ModuleType(main_module.__name__)
80
+
81
+ imported = []
82
+ imported_as = []
83
+ imported_top_level = [] # keep separated for backward compatibility
84
+ original = {}
85
+ for name, obj in main_module.__dict__.items():
86
+ if obj is main_module:
87
+ original[name] = newmod # self-reference
88
+ elif obj is main_module.__dict__:
89
+ original[name] = newmod.__dict__
90
+ # Avoid incorrectly matching a singleton value in another package (ex.: __doc__).
91
+ elif any(obj is singleton for singleton in (None, False, True)) \
92
+ or isinstance(obj, ModuleType) and _is_builtin_module(obj): # always saved by ref
93
+ original[name] = obj
94
+ else:
95
+ source_module, objname = _lookup_module(modmap, name, obj, main_module)
96
+ if source_module is not None:
97
+ if objname == name:
98
+ imported.append((source_module, name))
99
+ else:
100
+ imported_as.append((source_module, objname, name))
101
+ else:
102
+ try:
103
+ imported_top_level.append((modmap.top_level[id(obj)], name))
104
+ except KeyError:
105
+ original[name] = obj
106
+
107
+ if len(original) < len(main_module.__dict__):
108
+ newmod.__dict__.update(original)
109
+ newmod.__dill_imported = imported
110
+ newmod.__dill_imported_as = imported_as
111
+ newmod.__dill_imported_top_level = imported_top_level
112
+ if getattr(newmod, '__loader__', None) is None and _is_imported_module(main_module):
113
+ # Trick _is_imported_module() to force saving as an imported module.
114
+ newmod.__loader__ = True # will be discarded by save_module()
115
+ return newmod
116
+ else:
117
+ return main_module
118
+
119
+ def _restore_modules(unpickler, main_module):
120
+ try:
121
+ for modname, name in main_module.__dict__.pop('__dill_imported'):
122
+ main_module.__dict__[name] = unpickler.find_class(modname, name)
123
+ for modname, objname, name in main_module.__dict__.pop('__dill_imported_as'):
124
+ main_module.__dict__[name] = unpickler.find_class(modname, objname)
125
+ for modname, name in main_module.__dict__.pop('__dill_imported_top_level'):
126
+ main_module.__dict__[name] = __import__(modname)
127
+ except KeyError:
128
+ pass
129
+
130
+ #NOTE: 06/03/15 renamed main_module to main
131
+ def dump_module(
132
+ filename: Union[str, os.PathLike] = None,
133
+ module: Optional[Union[ModuleType, str]] = None,
134
+ refimported: bool = False,
135
+ **kwds
136
+ ) -> None:
137
+ """Pickle the current state of :py:mod:`__main__` or another module to a file.
138
+
139
+ Save the contents of :py:mod:`__main__` (e.g. from an interactive
140
+ interpreter session), an imported module, or a module-type object (e.g.
141
+ built with :py:class:`~types.ModuleType`), to a file. The pickled
142
+ module can then be restored with the function :py:func:`load_module`.
143
+
144
+ Args:
145
+ filename: a path-like object or a writable stream. If `None`
146
+ (the default), write to a named file in a temporary directory.
147
+ module: a module object or the name of an importable module. If `None`
148
+ (the default), :py:mod:`__main__` is saved.
149
+ refimported: if `True`, all objects identified as having been imported
150
+ into the module's namespace are saved by reference. *Note:* this is
151
+ similar but independent from ``dill.settings[`byref`]``, as
152
+ ``refimported`` refers to virtually all imported objects, while
153
+ ``byref`` only affects select objects.
154
+ **kwds: extra keyword arguments passed to :py:class:`Pickler()`.
155
+
156
+ Raises:
157
+ :py:exc:`PicklingError`: if pickling fails.
158
+
159
+ Examples:
160
+
161
+ - Save current interpreter session state:
162
+
163
+ >>> import dill
164
+ >>> squared = lambda x: x*x
165
+ >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl
166
+
167
+ - Save the state of an imported/importable module:
168
+
169
+ >>> import dill
170
+ >>> import pox
171
+ >>> pox.plus_one = lambda x: x+1
172
+ >>> dill.dump_module('pox_session.pkl', module=pox)
173
+
174
+ - Save the state of a non-importable, module-type object:
175
+
176
+ >>> import dill
177
+ >>> from types import ModuleType
178
+ >>> foo = ModuleType('foo')
179
+ >>> foo.values = [1,2,3]
180
+ >>> import math
181
+ >>> foo.sin = math.sin
182
+ >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True)
183
+
184
+ - Restore the state of the saved modules:
185
+
186
+ >>> import dill
187
+ >>> dill.load_module()
188
+ >>> squared(2)
189
+ 4
190
+ >>> pox = dill.load_module('pox_session.pkl')
191
+ >>> pox.plus_one(1)
192
+ 2
193
+ >>> foo = dill.load_module('foo_session.pkl')
194
+ >>> [foo.sin(x) for x in foo.values]
195
+ [0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
196
+
197
+ - Use `refimported` to save imported objects by reference:
198
+
199
+ >>> import dill
200
+ >>> from html.entities import html5
201
+ >>> type(html5), len(html5)
202
+ (dict, 2231)
203
+ >>> import io
204
+ >>> buf = io.BytesIO()
205
+ >>> dill.dump_module(buf) # saves __main__, with html5 saved by value
206
+ >>> len(buf.getvalue()) # pickle size in bytes
207
+ 71665
208
+ >>> buf = io.BytesIO()
209
+ >>> dill.dump_module(buf, refimported=True) # html5 saved by reference
210
+ >>> len(buf.getvalue())
211
+ 438
212
+
213
+ *Changed in version 0.3.6:* Function ``dump_session()`` was renamed to
214
+ ``dump_module()``. Parameters ``main`` and ``byref`` were renamed to
215
+ ``module`` and ``refimported``, respectively.
216
+
217
+ Note:
218
+ Currently, ``dill.settings['byref']`` and ``dill.settings['recurse']``
219
+ don't apply to this function.
220
+ """
221
+ for old_par, par in [('main', 'module'), ('byref', 'refimported')]:
222
+ if old_par in kwds:
223
+ message = "The argument %r has been renamed %r" % (old_par, par)
224
+ if old_par == 'byref':
225
+ message += " to distinguish it from dill.settings['byref']"
226
+ warnings.warn(message + ".", PendingDeprecationWarning)
227
+ if locals()[par]: # the defaults are None and False
228
+ raise TypeError("both %r and %r arguments were used" % (par, old_par))
229
+ refimported = kwds.pop('byref', refimported)
230
+ module = kwds.pop('main', module)
231
+
232
+ from .settings import settings
233
+ protocol = settings['protocol']
234
+ main = module
235
+ if main is None:
236
+ main = _main_module
237
+ elif isinstance(main, str):
238
+ main = _import_module(main)
239
+ if not isinstance(main, ModuleType):
240
+ raise TypeError("%r is not a module" % main)
241
+ if hasattr(filename, 'write'):
242
+ file = filename
243
+ else:
244
+ if filename is None:
245
+ filename = str(TEMPDIR/'session.pkl')
246
+ file = open(filename, 'wb')
247
+ try:
248
+ pickler = Pickler(file, protocol, **kwds)
249
+ pickler._original_main = main
250
+ if refimported:
251
+ main = _stash_modules(main)
252
+ pickler._main = main #FIXME: dill.settings are disabled
253
+ pickler._byref = False # disable pickling by name reference
254
+ pickler._recurse = False # disable pickling recursion for globals
255
+ pickler._session = True # is best indicator of when pickling a session
256
+ pickler._first_pass = True
257
+ pickler._main_modified = main is not pickler._original_main
258
+ pickler.dump(main)
259
+ finally:
260
+ if file is not filename: # if newly opened file
261
+ file.close()
262
+ return
263
+
264
+ # Backward compatibility.
265
+ def dump_session(filename=None, main=None, byref=False, **kwds):
266
+ warnings.warn("dump_session() has been renamed dump_module()", PendingDeprecationWarning)
267
+ dump_module(filename, module=main, refimported=byref, **kwds)
268
+ dump_session.__doc__ = dump_module.__doc__
269
+
270
+ class _PeekableReader:
271
+ """lightweight stream wrapper that implements peek()"""
272
+ def __init__(self, stream):
273
+ self.stream = stream
274
+ def read(self, n):
275
+ return self.stream.read(n)
276
+ def readline(self):
277
+ return self.stream.readline()
278
+ def tell(self):
279
+ return self.stream.tell()
280
+ def close(self):
281
+ return self.stream.close()
282
+ def peek(self, n):
283
+ stream = self.stream
284
+ try:
285
+ if hasattr(stream, 'flush'): stream.flush()
286
+ position = stream.tell()
287
+ stream.seek(position) # assert seek() works before reading
288
+ chunk = stream.read(n)
289
+ stream.seek(position)
290
+ return chunk
291
+ except (AttributeError, OSError):
292
+ raise NotImplementedError("stream is not peekable: %r", stream) from None
293
+
294
+ def _make_peekable(stream):
295
+ """return stream as an object with a peek() method"""
296
+ import io
297
+ if hasattr(stream, 'peek'):
298
+ return stream
299
+ if not (hasattr(stream, 'tell') and hasattr(stream, 'seek')):
300
+ try:
301
+ return io.BufferedReader(stream)
302
+ except Exception:
303
+ pass
304
+ return _PeekableReader(stream)
305
+
306
+ def _identify_module(file, main=None):
307
+ """identify the name of the module stored in the given file-type object"""
308
+ from pickletools import genops
309
+ UNICODE = {'UNICODE', 'BINUNICODE', 'SHORT_BINUNICODE'}
310
+ found_import = False
311
+ try:
312
+ for opcode, arg, pos in genops(file.peek(256)):
313
+ if not found_import:
314
+ if opcode.name in ('GLOBAL', 'SHORT_BINUNICODE') and \
315
+ arg.endswith('_import_module'):
316
+ found_import = True
317
+ else:
318
+ if opcode.name in UNICODE:
319
+ return arg
320
+ else:
321
+ raise UnpicklingError("reached STOP without finding main module")
322
+ except (NotImplementedError, ValueError) as error:
323
+ # ValueError occours when the end of the chunk is reached (without a STOP).
324
+ if isinstance(error, NotImplementedError) and main is not None:
325
+ # file is not peekable, but we have main.
326
+ return None
327
+ raise UnpicklingError("unable to identify main module") from error
328
+
329
+ def load_module(
330
+ filename: Union[str, os.PathLike] = None,
331
+ module: Optional[Union[ModuleType, str]] = None,
332
+ **kwds
333
+ ) -> Optional[ModuleType]:
334
+ """Update the selected module (default is :py:mod:`__main__`) with
335
+ the state saved at ``filename``.
336
+
337
+ Restore a module to the state saved with :py:func:`dump_module`. The
338
+ saved module can be :py:mod:`__main__` (e.g. an interpreter session),
339
+ an imported module, or a module-type object (e.g. created with
340
+ :py:class:`~types.ModuleType`).
341
+
342
+ When restoring the state of a non-importable module-type object, the
343
+ current instance of this module may be passed as the argument ``main``.
344
+ Otherwise, a new instance is created with :py:class:`~types.ModuleType`
345
+ and returned.
346
+
347
+ Args:
348
+ filename: a path-like object or a readable stream. If `None`
349
+ (the default), read from a named file in a temporary directory.
350
+ module: a module object or the name of an importable module;
351
+ the module name and kind (i.e. imported or non-imported) must
352
+ match the name and kind of the module stored at ``filename``.
353
+ **kwds: extra keyword arguments passed to :py:class:`Unpickler()`.
354
+
355
+ Raises:
356
+ :py:exc:`UnpicklingError`: if unpickling fails.
357
+ :py:exc:`ValueError`: if the argument ``main`` and module saved
358
+ at ``filename`` are incompatible.
359
+
360
+ Returns:
361
+ A module object, if the saved module is not :py:mod:`__main__` or
362
+ a module instance wasn't provided with the argument ``main``.
363
+
364
+ Examples:
365
+
366
+ - Save the state of some modules:
367
+
368
+ >>> import dill
369
+ >>> squared = lambda x: x*x
370
+ >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl
371
+ >>>
372
+ >>> import pox # an imported module
373
+ >>> pox.plus_one = lambda x: x+1
374
+ >>> dill.dump_module('pox_session.pkl', module=pox)
375
+ >>>
376
+ >>> from types import ModuleType
377
+ >>> foo = ModuleType('foo') # a module-type object
378
+ >>> foo.values = [1,2,3]
379
+ >>> import math
380
+ >>> foo.sin = math.sin
381
+ >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True)
382
+
383
+ - Restore the state of the interpreter:
384
+
385
+ >>> import dill
386
+ >>> dill.load_module() # updates __main__ from /tmp/session.pkl
387
+ >>> squared(2)
388
+ 4
389
+
390
+ - Load the saved state of an importable module:
391
+
392
+ >>> import dill
393
+ >>> pox = dill.load_module('pox_session.pkl')
394
+ >>> pox.plus_one(1)
395
+ 2
396
+ >>> import sys
397
+ >>> pox in sys.modules.values()
398
+ True
399
+
400
+ - Load the saved state of a non-importable module-type object:
401
+
402
+ >>> import dill
403
+ >>> foo = dill.load_module('foo_session.pkl')
404
+ >>> [foo.sin(x) for x in foo.values]
405
+ [0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
406
+ >>> import math
407
+ >>> foo.sin is math.sin # foo.sin was saved by reference
408
+ True
409
+ >>> import sys
410
+ >>> foo in sys.modules.values()
411
+ False
412
+
413
+ - Update the state of a non-importable module-type object:
414
+
415
+ >>> import dill
416
+ >>> from types import ModuleType
417
+ >>> foo = ModuleType('foo')
418
+ >>> foo.values = ['a','b']
419
+ >>> foo.sin = lambda x: x*x
420
+ >>> dill.load_module('foo_session.pkl', module=foo)
421
+ >>> [foo.sin(x) for x in foo.values]
422
+ [0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
423
+
424
+ *Changed in version 0.3.6:* Function ``load_session()`` was renamed to
425
+ ``load_module()``. Parameter ``main`` was renamed to ``module``.
426
+
427
+ See also:
428
+ :py:func:`load_module_asdict` to load the contents of module saved
429
+ with :py:func:`dump_module` into a dictionary.
430
+ """
431
+ if 'main' in kwds:
432
+ warnings.warn(
433
+ "The argument 'main' has been renamed 'module'.",
434
+ PendingDeprecationWarning
435
+ )
436
+ if module is not None:
437
+ raise TypeError("both 'module' and 'main' arguments were used")
438
+ module = kwds.pop('main')
439
+ main = module
440
+ if hasattr(filename, 'read'):
441
+ file = filename
442
+ else:
443
+ if filename is None:
444
+ filename = str(TEMPDIR/'session.pkl')
445
+ file = open(filename, 'rb')
446
+ try:
447
+ file = _make_peekable(file)
448
+ #FIXME: dill.settings are disabled
449
+ unpickler = Unpickler(file, **kwds)
450
+ unpickler._session = True
451
+
452
+ # Resolve unpickler._main
453
+ pickle_main = _identify_module(file, main)
454
+ if main is None and pickle_main is not None:
455
+ main = pickle_main
456
+ if isinstance(main, str):
457
+ if main.startswith('__runtime__.'):
458
+ # Create runtime module to load the session into.
459
+ main = ModuleType(main.partition('.')[-1])
460
+ else:
461
+ main = _import_module(main)
462
+ if main is not None:
463
+ if not isinstance(main, ModuleType):
464
+ raise TypeError("%r is not a module" % main)
465
+ unpickler._main = main
466
+ else:
467
+ main = unpickler._main
468
+
469
+ # Check against the pickle's main.
470
+ is_main_imported = _is_imported_module(main)
471
+ if pickle_main is not None:
472
+ is_runtime_mod = pickle_main.startswith('__runtime__.')
473
+ if is_runtime_mod:
474
+ pickle_main = pickle_main.partition('.')[-1]
475
+ error_msg = "can't update{} module{} %r with the saved state of{} module{} %r"
476
+ if is_runtime_mod and is_main_imported:
477
+ raise ValueError(
478
+ error_msg.format(" imported", "", "", "-type object")
479
+ % (main.__name__, pickle_main)
480
+ )
481
+ if not is_runtime_mod and not is_main_imported:
482
+ raise ValueError(
483
+ error_msg.format("", "-type object", " imported", "")
484
+ % (pickle_main, main.__name__)
485
+ )
486
+ if main.__name__ != pickle_main:
487
+ raise ValueError(error_msg.format("", "", "", "") % (main.__name__, pickle_main))
488
+
489
+ # This is for find_class() to be able to locate it.
490
+ if not is_main_imported:
491
+ runtime_main = '__runtime__.%s' % main.__name__
492
+ sys.modules[runtime_main] = main
493
+
494
+ loaded = unpickler.load()
495
+ finally:
496
+ if not hasattr(filename, 'read'): # if newly opened file
497
+ file.close()
498
+ try:
499
+ del sys.modules[runtime_main]
500
+ except (KeyError, NameError):
501
+ pass
502
+ assert loaded is main
503
+ _restore_modules(unpickler, main)
504
+ if main is _main_module or main is module:
505
+ return None
506
+ else:
507
+ return main
508
+
509
+ # Backward compatibility.
510
+ def load_session(filename=None, main=None, **kwds):
511
+ warnings.warn("load_session() has been renamed load_module().", PendingDeprecationWarning)
512
+ load_module(filename, module=main, **kwds)
513
+ load_session.__doc__ = load_module.__doc__
514
+
515
+ def load_module_asdict(
516
+ filename: Union[str, os.PathLike] = None,
517
+ update: bool = False,
518
+ **kwds
519
+ ) -> dict:
520
+ """
521
+ Load the contents of a saved module into a dictionary.
522
+
523
+ ``load_module_asdict()`` is the near-equivalent of::
524
+
525
+ lambda filename: vars(dill.load_module(filename)).copy()
526
+
527
+ however, does not alter the original module. Also, the path of
528
+ the loaded module is stored in the ``__session__`` attribute.
529
+
530
+ Args:
531
+ filename: a path-like object or a readable stream. If `None`
532
+ (the default), read from a named file in a temporary directory.
533
+ update: if `True`, initialize the dictionary with the current state
534
+ of the module prior to loading the state stored at filename.
535
+ **kwds: extra keyword arguments passed to :py:class:`Unpickler()`
536
+
537
+ Raises:
538
+ :py:exc:`UnpicklingError`: if unpickling fails
539
+
540
+ Returns:
541
+ A copy of the restored module's dictionary.
542
+
543
+ Note:
544
+ If ``update`` is True, the corresponding module may first be imported
545
+ into the current namespace before the saved state is loaded from
546
+ filename to the dictionary. Note that any module that is imported into
547
+ the current namespace as a side-effect of using ``update`` will not be
548
+ modified by loading the saved module in filename to a dictionary.
549
+
550
+ Example:
551
+ >>> import dill
552
+ >>> alist = [1, 2, 3]
553
+ >>> anum = 42
554
+ >>> dill.dump_module()
555
+ >>> anum = 0
556
+ >>> new_var = 'spam'
557
+ >>> main = dill.load_module_asdict()
558
+ >>> main['__name__'], main['__session__']
559
+ ('__main__', '/tmp/session.pkl')
560
+ >>> main is globals() # loaded objects don't reference globals
561
+ False
562
+ >>> main['alist'] == alist
563
+ True
564
+ >>> main['alist'] is alist # was saved by value
565
+ False
566
+ >>> main['anum'] == anum # changed after the session was saved
567
+ False
568
+ >>> new_var in main # would be True if the option 'update' was set
569
+ False
570
+ """
571
+ if 'module' in kwds:
572
+ raise TypeError("'module' is an invalid keyword argument for load_module_asdict()")
573
+ if hasattr(filename, 'read'):
574
+ file = filename
575
+ else:
576
+ if filename is None:
577
+ filename = str(TEMPDIR/'session.pkl')
578
+ file = open(filename, 'rb')
579
+ try:
580
+ file = _make_peekable(file)
581
+ main_name = _identify_module(file)
582
+ old_main = sys.modules.get(main_name)
583
+ main = ModuleType(main_name)
584
+ if update:
585
+ if old_main is None:
586
+ old_main = _import_module(main_name)
587
+ main.__dict__.update(old_main.__dict__)
588
+ else:
589
+ main.__builtins__ = __builtin__
590
+ sys.modules[main_name] = main
591
+ load_module(file, **kwds)
592
+ finally:
593
+ if not hasattr(filename, 'read'): # if newly opened file
594
+ file.close()
595
+ try:
596
+ if old_main is None:
597
+ del sys.modules[main_name]
598
+ else:
599
+ sys.modules[main_name] = old_main
600
+ except NameError: # failed before setting old_main
601
+ pass
602
+ main.__session__ = str(filename)
603
+ return main.__dict__
604
+
605
+
606
+ # Internal exports for backward compatibility with dill v0.3.5.1
607
+ # Can't be placed in dill._dill because of circular import problems.
608
+ for name in (
609
+ '_lookup_module', '_module_map', '_restore_modules', '_stash_modules',
610
+ 'dump_session', 'load_session' # backward compatibility functions
611
+ ):
612
+ setattr(_dill, name, globals()[name])
613
+ del name
llmeval-env/lib/python3.10/site-packages/dill/settings.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ global settings for Pickler
10
+ """
11
+
12
+ from pickle import DEFAULT_PROTOCOL
13
+
14
+ settings = {
15
+ #'main' : None,
16
+ 'protocol' : DEFAULT_PROTOCOL,
17
+ 'byref' : False,
18
+ #'strictio' : False,
19
+ 'fmode' : 0, #HANDLE_FMODE
20
+ 'recurse' : False,
21
+ 'ignore' : False,
22
+ }
23
+
24
+ del DEFAULT_PROTOCOL
25
+
llmeval-env/lib/python3.10/site-packages/dill/source.py ADDED
@@ -0,0 +1,1017 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ #
9
+ # inspired by inspect.py from Python-2.7.6
10
+ # inspect.py author: 'Ka-Ping Yee <[email protected]>'
11
+ # inspect.py merged into original dill.source by Mike McKerns 4/13/14
12
+ """
13
+ Extensions to python's 'inspect' module, which can be used
14
+ to retrieve information from live python objects. The methods
15
+ defined in this module are augmented to facilitate access to
16
+ source code of interactively defined functions and classes,
17
+ as well as provide access to source code for objects defined
18
+ in a file.
19
+ """
20
+
21
+ __all__ = ['findsource', 'getsourcelines', 'getsource', 'indent', 'outdent', \
22
+ '_wrap', 'dumpsource', 'getname', '_namespace', 'getimport', \
23
+ '_importable', 'importable','isdynamic', 'isfrommain']
24
+
25
+ import linecache
26
+ import re
27
+ from inspect import (getblock, getfile, getmodule, getsourcefile, indentsize,
28
+ isbuiltin, isclass, iscode, isframe, isfunction, ismethod,
29
+ ismodule, istraceback)
30
+ from tokenize import TokenError
31
+
32
+ from ._dill import IS_IPYTHON
33
+
34
+
35
+ def isfrommain(obj):
36
+ "check if object was built in __main__"
37
+ module = getmodule(obj)
38
+ if module and module.__name__ == '__main__':
39
+ return True
40
+ return False
41
+
42
+
43
+ def isdynamic(obj):
44
+ "check if object was built in the interpreter"
45
+ try: file = getfile(obj)
46
+ except TypeError: file = None
47
+ if file == '<stdin>' and isfrommain(obj):
48
+ return True
49
+ return False
50
+
51
+
52
+ def _matchlambda(func, line):
53
+ """check if lambda object 'func' matches raw line of code 'line'"""
54
+ from .detect import code as getcode
55
+ from .detect import freevars, globalvars, varnames
56
+ dummy = lambda : '__this_is_a_big_dummy_function__'
57
+ # process the line (removing leading whitespace, etc)
58
+ lhs,rhs = line.split('lambda ',1)[-1].split(":", 1) #FIXME: if !1 inputs
59
+ try: #FIXME: unsafe
60
+ _ = eval("lambda %s : %s" % (lhs,rhs), globals(),locals())
61
+ except Exception: _ = dummy
62
+ # get code objects, for comparison
63
+ _, code = getcode(_).co_code, getcode(func).co_code
64
+ # check if func is in closure
65
+ _f = [line.count(i) for i in freevars(func).keys()]
66
+ if not _f: # not in closure
67
+ # check if code matches
68
+ if _ == code: return True
69
+ return False
70
+ # weak check on freevars
71
+ if not all(_f): return False #XXX: VERY WEAK
72
+ # weak check on varnames and globalvars
73
+ _f = varnames(func)
74
+ _f = [line.count(i) for i in _f[0]+_f[1]]
75
+ if _f and not all(_f): return False #XXX: VERY WEAK
76
+ _f = [line.count(i) for i in globalvars(func).keys()]
77
+ if _f and not all(_f): return False #XXX: VERY WEAK
78
+ # check if func is a double lambda
79
+ if (line.count('lambda ') > 1) and (lhs in freevars(func).keys()):
80
+ _lhs,_rhs = rhs.split('lambda ',1)[-1].split(":",1) #FIXME: if !1 inputs
81
+ try: #FIXME: unsafe
82
+ _f = eval("lambda %s : %s" % (_lhs,_rhs), globals(),locals())
83
+ except Exception: _f = dummy
84
+ # get code objects, for comparison
85
+ _, code = getcode(_f).co_code, getcode(func).co_code
86
+ if len(_) != len(code): return False
87
+ #NOTE: should be same code same order, but except for 't' and '\x88'
88
+ _ = set((i,j) for (i,j) in zip(_,code) if i != j)
89
+ if len(_) != 1: return False #('t','\x88')
90
+ return True
91
+ # check indentsize
92
+ if not indentsize(line): return False #FIXME: is this a good check???
93
+ # check if code 'pattern' matches
94
+ #XXX: or pattern match against dis.dis(code)? (or use uncompyle2?)
95
+ _ = _.split(_[0]) # 't' #XXX: remove matching values if starts the same?
96
+ _f = code.split(code[0]) # '\x88'
97
+ #NOTE: should be same code different order, with different first element
98
+ _ = dict(re.match(r'([\W\D\S])(.*)', _[i]).groups() for i in range(1,len(_)))
99
+ _f = dict(re.match(r'([\W\D\S])(.*)', _f[i]).groups() for i in range(1,len(_f)))
100
+ if (_.keys() == _f.keys()) and (sorted(_.values()) == sorted(_f.values())):
101
+ return True
102
+ return False
103
+
104
+
105
+ def findsource(object):
106
+ """Return the entire source file and starting line number for an object.
107
+ For interactively-defined objects, the 'file' is the interpreter's history.
108
+
109
+ The argument may be a module, class, method, function, traceback, frame,
110
+ or code object. The source code is returned as a list of all the lines
111
+ in the file and the line number indexes a line in that list. An IOError
112
+ is raised if the source code cannot be retrieved, while a TypeError is
113
+ raised for objects where the source code is unavailable (e.g. builtins)."""
114
+
115
+ module = getmodule(object)
116
+ try: file = getfile(module)
117
+ except TypeError: file = None
118
+ is_module_main = (module and module.__name__ == '__main__' and not file)
119
+ if IS_IPYTHON and is_module_main:
120
+ #FIXME: quick fix for functions and classes in IPython interpreter
121
+ try:
122
+ file = getfile(object)
123
+ sourcefile = getsourcefile(object)
124
+ except TypeError:
125
+ if isclass(object):
126
+ for object_method in filter(isfunction, object.__dict__.values()):
127
+ # look for a method of the class
128
+ file_candidate = getfile(object_method)
129
+ if not file_candidate.startswith('<ipython-input-'):
130
+ continue
131
+ file = file_candidate
132
+ sourcefile = getsourcefile(object_method)
133
+ break
134
+ if file:
135
+ lines = linecache.getlines(file)
136
+ else:
137
+ # fallback to use history
138
+ history = '\n'.join(get_ipython().history_manager.input_hist_parsed)
139
+ lines = [line + '\n' for line in history.splitlines()]
140
+ # use readline when working in interpreter (i.e. __main__ and not file)
141
+ elif is_module_main:
142
+ try:
143
+ import readline
144
+ err = ''
145
+ except ImportError:
146
+ import sys
147
+ err = sys.exc_info()[1].args[0]
148
+ if sys.platform[:3] == 'win':
149
+ err += ", please install 'pyreadline'"
150
+ if err:
151
+ raise IOError(err)
152
+ lbuf = readline.get_current_history_length()
153
+ lines = [readline.get_history_item(i)+'\n' for i in range(1,lbuf)]
154
+ else:
155
+ try: # special handling for class instances
156
+ if not isclass(object) and isclass(type(object)): # __class__
157
+ file = getfile(module)
158
+ sourcefile = getsourcefile(module)
159
+ else: # builtins fail with a TypeError
160
+ file = getfile(object)
161
+ sourcefile = getsourcefile(object)
162
+ except (TypeError, AttributeError): # fail with better error
163
+ file = getfile(object)
164
+ sourcefile = getsourcefile(object)
165
+ if not sourcefile and file[:1] + file[-1:] != '<>':
166
+ raise IOError('source code not available')
167
+ file = sourcefile if sourcefile else file
168
+
169
+ module = getmodule(object, file)
170
+ if module:
171
+ lines = linecache.getlines(file, module.__dict__)
172
+ else:
173
+ lines = linecache.getlines(file)
174
+
175
+ if not lines:
176
+ raise IOError('could not extract source code')
177
+
178
+ #FIXME: all below may fail if exec used (i.e. exec('f = lambda x:x') )
179
+ if ismodule(object):
180
+ return lines, 0
181
+
182
+ #NOTE: beneficial if search goes from end to start of buffer history
183
+ name = pat1 = obj = ''
184
+ pat2 = r'^(\s*@)'
185
+ # pat1b = r'^(\s*%s\W*=)' % name #FIXME: finds 'f = decorate(f)', not exec
186
+ if ismethod(object):
187
+ name = object.__name__
188
+ if name == '<lambda>': pat1 = r'(.*(?<!\w)lambda(:|\s))'
189
+ else: pat1 = r'^(\s*def\s)'
190
+ object = object.__func__
191
+ if isfunction(object):
192
+ name = object.__name__
193
+ if name == '<lambda>':
194
+ pat1 = r'(.*(?<!\w)lambda(:|\s))'
195
+ obj = object #XXX: better a copy?
196
+ else: pat1 = r'^(\s*def\s)'
197
+ object = object.__code__
198
+ if istraceback(object):
199
+ object = object.tb_frame
200
+ if isframe(object):
201
+ object = object.f_code
202
+ if iscode(object):
203
+ if not hasattr(object, 'co_firstlineno'):
204
+ raise IOError('could not find function definition')
205
+ stdin = object.co_filename == '<stdin>'
206
+ if stdin:
207
+ lnum = len(lines) - 1 # can't get lnum easily, so leverage pat
208
+ if not pat1: pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)'
209
+ else:
210
+ lnum = object.co_firstlineno - 1
211
+ pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)'
212
+ pat1 = re.compile(pat1); pat2 = re.compile(pat2)
213
+ #XXX: candidate_lnum = [n for n in range(lnum) if pat1.match(lines[n])]
214
+ while lnum > 0: #XXX: won't find decorators in <stdin> ?
215
+ line = lines[lnum]
216
+ if pat1.match(line):
217
+ if not stdin: break # co_firstlineno does the job
218
+ if name == '<lambda>': # hackery needed to confirm a match
219
+ if _matchlambda(obj, line): break
220
+ else: # not a lambda, just look for the name
221
+ if name in line: # need to check for decorator...
222
+ hats = 0
223
+ for _lnum in range(lnum-1,-1,-1):
224
+ if pat2.match(lines[_lnum]): hats += 1
225
+ else: break
226
+ lnum = lnum - hats
227
+ break
228
+ lnum = lnum - 1
229
+ return lines, lnum
230
+
231
+ try: # turn instances into classes
232
+ if not isclass(object) and isclass(type(object)): # __class__
233
+ object = object.__class__ #XXX: sometimes type(class) is better?
234
+ #XXX: we don't find how the instance was built
235
+ except AttributeError: pass
236
+ if isclass(object):
237
+ name = object.__name__
238
+ pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
239
+ # make some effort to find the best matching class definition:
240
+ # use the one with the least indentation, which is the one
241
+ # that's most probably not inside a function definition.
242
+ candidates = []
243
+ for i in range(len(lines)-1,-1,-1):
244
+ match = pat.match(lines[i])
245
+ if match:
246
+ # if it's at toplevel, it's already the best one
247
+ if lines[i][0] == 'c':
248
+ return lines, i
249
+ # else add whitespace to candidate list
250
+ candidates.append((match.group(1), i))
251
+ if candidates:
252
+ # this will sort by whitespace, and by line number,
253
+ # less whitespace first #XXX: should sort high lnum before low
254
+ candidates.sort()
255
+ return lines, candidates[0][1]
256
+ else:
257
+ raise IOError('could not find class definition')
258
+ raise IOError('could not find code object')
259
+
260
+
261
+ def getblocks(object, lstrip=False, enclosing=False, locate=False):
262
+ """Return a list of source lines and starting line number for an object.
263
+ Interactively-defined objects refer to lines in the interpreter's history.
264
+
265
+ If enclosing=True, then also return any enclosing code.
266
+ If lstrip=True, ensure there is no indentation in the first line of code.
267
+ If locate=True, then also return the line number for the block of code.
268
+
269
+ DEPRECATED: use 'getsourcelines' instead
270
+ """
271
+ lines, lnum = findsource(object)
272
+
273
+ if ismodule(object):
274
+ if lstrip: lines = _outdent(lines)
275
+ return ([lines], [0]) if locate is True else [lines]
276
+
277
+ #XXX: 'enclosing' means: closures only? or classes and files?
278
+ indent = indentsize(lines[lnum])
279
+ block = getblock(lines[lnum:]) #XXX: catch any TokenError here?
280
+
281
+ if not enclosing or not indent:
282
+ if lstrip: block = _outdent(block)
283
+ return ([block], [lnum]) if locate is True else [block]
284
+
285
+ pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))'; pat1 = re.compile(pat1)
286
+ pat2 = r'^(\s*@)'; pat2 = re.compile(pat2)
287
+ #pat3 = r'^(\s*class\s)'; pat3 = re.compile(pat3) #XXX: enclosing class?
288
+ #FIXME: bound methods need enclosing class (and then instantiation)
289
+ # *or* somehow apply a partial using the instance
290
+
291
+ skip = 0
292
+ line = 0
293
+ blocks = []; _lnum = []
294
+ target = ''.join(block)
295
+ while line <= lnum: #XXX: repeat lnum? or until line < lnum?
296
+ # see if starts with ('def','lambda') and contains our target block
297
+ if pat1.match(lines[line]):
298
+ if not skip:
299
+ try: code = getblock(lines[line:])
300
+ except TokenError: code = [lines[line]]
301
+ if indentsize(lines[line]) > indent: #XXX: should be >= ?
302
+ line += len(code) - skip
303
+ elif target in ''.join(code):
304
+ blocks.append(code) # save code block as the potential winner
305
+ _lnum.append(line - skip) # save the line number for the match
306
+ line += len(code) - skip
307
+ else:
308
+ line += 1
309
+ skip = 0
310
+ # find skip: the number of consecutive decorators
311
+ elif pat2.match(lines[line]):
312
+ try: code = getblock(lines[line:])
313
+ except TokenError: code = [lines[line]]
314
+ skip = 1
315
+ for _line in code[1:]: # skip lines that are decorators
316
+ if not pat2.match(_line): break
317
+ skip += 1
318
+ line += skip
319
+ # no match: reset skip and go to the next line
320
+ else:
321
+ line +=1
322
+ skip = 0
323
+
324
+ if not blocks:
325
+ blocks = [block]
326
+ _lnum = [lnum]
327
+ if lstrip: blocks = [_outdent(block) for block in blocks]
328
+ # return last match
329
+ return (blocks, _lnum) if locate is True else blocks
330
+
331
+
332
+ def getsourcelines(object, lstrip=False, enclosing=False):
333
+ """Return a list of source lines and starting line number for an object.
334
+ Interactively-defined objects refer to lines in the interpreter's history.
335
+
336
+ The argument may be a module, class, method, function, traceback, frame,
337
+ or code object. The source code is returned as a list of the lines
338
+ corresponding to the object and the line number indicates where in the
339
+ original source file the first line of code was found. An IOError is
340
+ raised if the source code cannot be retrieved, while a TypeError is
341
+ raised for objects where the source code is unavailable (e.g. builtins).
342
+
343
+ If lstrip=True, ensure there is no indentation in the first line of code.
344
+ If enclosing=True, then also return any enclosing code."""
345
+ code, n = getblocks(object, lstrip=lstrip, enclosing=enclosing, locate=True)
346
+ return code[-1], n[-1]
347
+
348
+
349
+ #NOTE: broke backward compatibility 4/16/14 (was lstrip=True, force=True)
350
+ def getsource(object, alias='', lstrip=False, enclosing=False, \
351
+ force=False, builtin=False):
352
+ """Return the text of the source code for an object. The source code for
353
+ interactively-defined objects are extracted from the interpreter's history.
354
+
355
+ The argument may be a module, class, method, function, traceback, frame,
356
+ or code object. The source code is returned as a single string. An
357
+ IOError is raised if the source code cannot be retrieved, while a
358
+ TypeError is raised for objects where the source code is unavailable
359
+ (e.g. builtins).
360
+
361
+ If alias is provided, then add a line of code that renames the object.
362
+ If lstrip=True, ensure there is no indentation in the first line of code.
363
+ If enclosing=True, then also return any enclosing code.
364
+ If force=True, catch (TypeError,IOError) and try to use import hooks.
365
+ If builtin=True, force an import for any builtins
366
+ """
367
+ # hascode denotes a callable
368
+ hascode = _hascode(object)
369
+ # is a class instance type (and not in builtins)
370
+ instance = _isinstance(object)
371
+
372
+ # get source lines; if fail, try to 'force' an import
373
+ try: # fails for builtins, and other assorted object types
374
+ lines, lnum = getsourcelines(object, enclosing=enclosing)
375
+ except (TypeError, IOError): # failed to get source, resort to import hooks
376
+ if not force: # don't try to get types that findsource can't get
377
+ raise
378
+ if not getmodule(object): # get things like 'None' and '1'
379
+ if not instance: return getimport(object, alias, builtin=builtin)
380
+ # special handling (numpy arrays, ...)
381
+ _import = getimport(object, builtin=builtin)
382
+ name = getname(object, force=True)
383
+ _alias = "%s = " % alias if alias else ""
384
+ if alias == name: _alias = ""
385
+ return _import+_alias+"%s\n" % name
386
+ else: #FIXME: could use a good bit of cleanup, since using getimport...
387
+ if not instance: return getimport(object, alias, builtin=builtin)
388
+ # now we are dealing with an instance...
389
+ name = object.__class__.__name__
390
+ module = object.__module__
391
+ if module in ['builtins','__builtin__']:
392
+ return getimport(object, alias, builtin=builtin)
393
+ else: #FIXME: leverage getimport? use 'from module import name'?
394
+ lines, lnum = ["%s = __import__('%s', fromlist=['%s']).%s\n" % (name,module,name,name)], 0
395
+ obj = eval(lines[0].lstrip(name + ' = '))
396
+ lines, lnum = getsourcelines(obj, enclosing=enclosing)
397
+
398
+ # strip leading indent (helps ensure can be imported)
399
+ if lstrip or alias:
400
+ lines = _outdent(lines)
401
+
402
+ # instantiate, if there's a nice repr #XXX: BAD IDEA???
403
+ if instance: #and force: #XXX: move into findsource or getsourcelines ?
404
+ if '(' in repr(object): lines.append('%r\n' % object)
405
+ #else: #XXX: better to somehow to leverage __reduce__ ?
406
+ # reconstructor,args = object.__reduce__()
407
+ # _ = reconstructor(*args)
408
+ else: # fall back to serialization #XXX: bad idea?
409
+ #XXX: better not duplicate work? #XXX: better new/enclose=True?
410
+ lines = dumpsource(object, alias='', new=force, enclose=False)
411
+ lines, lnum = [line+'\n' for line in lines.split('\n')][:-1], 0
412
+ #else: object.__code__ # raise AttributeError
413
+
414
+ # add an alias to the source code
415
+ if alias:
416
+ if hascode:
417
+ skip = 0
418
+ for line in lines: # skip lines that are decorators
419
+ if not line.startswith('@'): break
420
+ skip += 1
421
+ #XXX: use regex from findsource / getsourcelines ?
422
+ if lines[skip].lstrip().startswith('def '): # we have a function
423
+ if alias != object.__name__:
424
+ lines.append('\n%s = %s\n' % (alias, object.__name__))
425
+ elif 'lambda ' in lines[skip]: # we have a lambda
426
+ if alias != lines[skip].split('=')[0].strip():
427
+ lines[skip] = '%s = %s' % (alias, lines[skip])
428
+ else: # ...try to use the object's name
429
+ if alias != object.__name__:
430
+ lines.append('\n%s = %s\n' % (alias, object.__name__))
431
+ else: # class or class instance
432
+ if instance:
433
+ if alias != lines[-1].split('=')[0].strip():
434
+ lines[-1] = ('%s = ' % alias) + lines[-1]
435
+ else:
436
+ name = getname(object, force=True) or object.__name__
437
+ if alias != name:
438
+ lines.append('\n%s = %s\n' % (alias, name))
439
+ return ''.join(lines)
440
+
441
+
442
+ def _hascode(object):
443
+ '''True if object has an attribute that stores it's __code__'''
444
+ return getattr(object,'__code__',None) or getattr(object,'func_code',None)
445
+
446
+ def _isinstance(object):
447
+ '''True if object is a class instance type (and is not a builtin)'''
448
+ if _hascode(object) or isclass(object) or ismodule(object):
449
+ return False
450
+ if istraceback(object) or isframe(object) or iscode(object):
451
+ return False
452
+ # special handling (numpy arrays, ...)
453
+ if not getmodule(object) and getmodule(type(object)).__name__ in ['numpy']:
454
+ return True
455
+ # # check if is instance of a builtin
456
+ # if not getmodule(object) and getmodule(type(object)).__name__ in ['__builtin__','builtins']:
457
+ # return False
458
+ _types = ('<class ',"<type 'instance'>")
459
+ if not repr(type(object)).startswith(_types): #FIXME: weak hack
460
+ return False
461
+ if not getmodule(object) or object.__module__ in ['builtins','__builtin__'] or getname(object, force=True) in ['array']:
462
+ return False
463
+ return True # by process of elimination... it's what we want
464
+
465
+
466
+ def _intypes(object):
467
+ '''check if object is in the 'types' module'''
468
+ import types
469
+ # allow user to pass in object or object.__name__
470
+ if type(object) is not type(''):
471
+ object = getname(object, force=True)
472
+ if object == 'ellipsis': object = 'EllipsisType'
473
+ return True if hasattr(types, object) else False
474
+
475
+
476
+ def _isstring(object): #XXX: isstringlike better?
477
+ '''check if object is a string-like type'''
478
+ return isinstance(object, (str, bytes))
479
+
480
+
481
+ def indent(code, spaces=4):
482
+ '''indent a block of code with whitespace (default is 4 spaces)'''
483
+ indent = indentsize(code)
484
+ if type(spaces) is int: spaces = ' '*spaces
485
+ # if '\t' is provided, will indent with a tab
486
+ nspaces = indentsize(spaces)
487
+ # blank lines (etc) need to be ignored
488
+ lines = code.split('\n')
489
+ ## stq = "'''"; dtq = '"""'
490
+ ## in_stq = in_dtq = False
491
+ for i in range(len(lines)):
492
+ #FIXME: works... but shouldn't indent 2nd+ lines of multiline doc
493
+ _indent = indentsize(lines[i])
494
+ if indent > _indent: continue
495
+ lines[i] = spaces+lines[i]
496
+ ## #FIXME: may fail when stq and dtq in same line (depends on ordering)
497
+ ## nstq, ndtq = lines[i].count(stq), lines[i].count(dtq)
498
+ ## if not in_dtq and not in_stq:
499
+ ## lines[i] = spaces+lines[i] # we indent
500
+ ## # entering a comment block
501
+ ## if nstq%2: in_stq = not in_stq
502
+ ## if ndtq%2: in_dtq = not in_dtq
503
+ ## # leaving a comment block
504
+ ## elif in_dtq and ndtq%2: in_dtq = not in_dtq
505
+ ## elif in_stq and nstq%2: in_stq = not in_stq
506
+ ## else: pass
507
+ if lines[-1].strip() == '': lines[-1] = ''
508
+ return '\n'.join(lines)
509
+
510
+
511
+ def _outdent(lines, spaces=None, all=True):
512
+ '''outdent lines of code, accounting for docs and line continuations'''
513
+ indent = indentsize(lines[0])
514
+ if spaces is None or spaces > indent or spaces < 0: spaces = indent
515
+ for i in range(len(lines) if all else 1):
516
+ #FIXME: works... but shouldn't outdent 2nd+ lines of multiline doc
517
+ _indent = indentsize(lines[i])
518
+ if spaces > _indent: _spaces = _indent
519
+ else: _spaces = spaces
520
+ lines[i] = lines[i][_spaces:]
521
+ return lines
522
+
523
+ def outdent(code, spaces=None, all=True):
524
+ '''outdent a block of code (default is to strip all leading whitespace)'''
525
+ indent = indentsize(code)
526
+ if spaces is None or spaces > indent or spaces < 0: spaces = indent
527
+ #XXX: will this delete '\n' in some cases?
528
+ if not all: return code[spaces:]
529
+ return '\n'.join(_outdent(code.split('\n'), spaces=spaces, all=all))
530
+
531
+
532
+ #XXX: not sure what the point of _wrap is...
533
+ __globals__ = globals()
534
+ __locals__ = locals()
535
+ def _wrap(f):
536
+ """ encapsulate a function and it's __import__ """
537
+ def func(*args, **kwds):
538
+ try:
539
+ # _ = eval(getsource(f, force=True)) #XXX: safer but less robust
540
+ exec(getimportable(f, alias='_'), __globals__, __locals__)
541
+ except Exception:
542
+ raise ImportError('cannot import name ' + f.__name__)
543
+ return _(*args, **kwds)
544
+ func.__name__ = f.__name__
545
+ func.__doc__ = f.__doc__
546
+ return func
547
+
548
+
549
+ def _enclose(object, alias=''): #FIXME: needs alias to hold returned object
550
+ """create a function enclosure around the source of some object"""
551
+ #XXX: dummy and stub should append a random string
552
+ dummy = '__this_is_a_big_dummy_enclosing_function__'
553
+ stub = '__this_is_a_stub_variable__'
554
+ code = 'def %s():\n' % dummy
555
+ code += indent(getsource(object, alias=stub, lstrip=True, force=True))
556
+ code += indent('return %s\n' % stub)
557
+ if alias: code += '%s = ' % alias
558
+ code += '%s(); del %s\n' % (dummy, dummy)
559
+ #code += "globals().pop('%s',lambda :None)()\n" % dummy
560
+ return code
561
+
562
+
563
+ def dumpsource(object, alias='', new=False, enclose=True):
564
+ """'dump to source', where the code includes a pickled object.
565
+
566
+ If new=True and object is a class instance, then create a new
567
+ instance using the unpacked class source code. If enclose, then
568
+ create the object inside a function enclosure (thus minimizing
569
+ any global namespace pollution).
570
+ """
571
+ from dill import dumps
572
+ pik = repr(dumps(object))
573
+ code = 'import dill\n'
574
+ if enclose:
575
+ stub = '__this_is_a_stub_variable__' #XXX: *must* be same _enclose.stub
576
+ pre = '%s = ' % stub
577
+ new = False #FIXME: new=True doesn't work with enclose=True
578
+ else:
579
+ stub = alias
580
+ pre = '%s = ' % stub if alias else alias
581
+
582
+ # if a 'new' instance is not needed, then just dump and load
583
+ if not new or not _isinstance(object):
584
+ code += pre + 'dill.loads(%s)\n' % pik
585
+ else: #XXX: other cases where source code is needed???
586
+ code += getsource(object.__class__, alias='', lstrip=True, force=True)
587
+ mod = repr(object.__module__) # should have a module (no builtins here)
588
+ code += pre + 'dill.loads(%s.replace(b%s,bytes(__name__,"UTF-8")))\n' % (pik,mod)
589
+ #code += 'del %s' % object.__class__.__name__ #NOTE: kills any existing!
590
+
591
+ if enclose:
592
+ # generation of the 'enclosure'
593
+ dummy = '__this_is_a_big_dummy_object__'
594
+ dummy = _enclose(dummy, alias=alias)
595
+ # hack to replace the 'dummy' with the 'real' code
596
+ dummy = dummy.split('\n')
597
+ code = dummy[0]+'\n' + indent(code) + '\n'.join(dummy[-3:])
598
+
599
+ return code #XXX: better 'dumpsourcelines', returning list of lines?
600
+
601
+
602
+ def getname(obj, force=False, fqn=False): #XXX: throw(?) to raise error on fail?
603
+ """get the name of the object. for lambdas, get the name of the pointer """
604
+ if fqn: return '.'.join(_namespace(obj))
605
+ module = getmodule(obj)
606
+ if not module: # things like "None" and "1"
607
+ if not force: return None
608
+ return repr(obj)
609
+ try:
610
+ #XXX: 'wrong' for decorators and curried functions ?
611
+ # if obj.func_closure: ...use logic from getimportable, etc ?
612
+ name = obj.__name__
613
+ if name == '<lambda>':
614
+ return getsource(obj).split('=',1)[0].strip()
615
+ # handle some special cases
616
+ if module.__name__ in ['builtins','__builtin__']:
617
+ if name == 'ellipsis': name = 'EllipsisType'
618
+ return name
619
+ except AttributeError: #XXX: better to just throw AttributeError ?
620
+ if not force: return None
621
+ name = repr(obj)
622
+ if name.startswith('<'): # or name.split('('):
623
+ return None
624
+ return name
625
+
626
+
627
+ def _namespace(obj):
628
+ """_namespace(obj); return namespace hierarchy (as a list of names)
629
+ for the given object. For an instance, find the class hierarchy.
630
+
631
+ For example:
632
+
633
+ >>> from functools import partial
634
+ >>> p = partial(int, base=2)
635
+ >>> _namespace(p)
636
+ [\'functools\', \'partial\']
637
+ """
638
+ # mostly for functions and modules and such
639
+ #FIXME: 'wrong' for decorators and curried functions
640
+ try: #XXX: needs some work and testing on different types
641
+ module = qual = str(getmodule(obj)).split()[1].strip('>').strip('"').strip("'")
642
+ qual = qual.split('.')
643
+ if ismodule(obj):
644
+ return qual
645
+ # get name of a lambda, function, etc
646
+ name = getname(obj) or obj.__name__ # failing, raise AttributeError
647
+ # check special cases (NoneType, ...)
648
+ if module in ['builtins','__builtin__']: # BuiltinFunctionType
649
+ if _intypes(name): return ['types'] + [name]
650
+ return qual + [name] #XXX: can be wrong for some aliased objects
651
+ except Exception: pass
652
+ # special case: numpy.inf and numpy.nan (we don't want them as floats)
653
+ if str(obj) in ['inf','nan','Inf','NaN']: # is more, but are they needed?
654
+ return ['numpy'] + [str(obj)]
655
+ # mostly for classes and class instances and such
656
+ module = getattr(obj.__class__, '__module__', None)
657
+ qual = str(obj.__class__)
658
+ try: qual = qual[qual.index("'")+1:-2]
659
+ except ValueError: pass # str(obj.__class__) made the 'try' unnecessary
660
+ qual = qual.split(".")
661
+ if module in ['builtins','__builtin__']:
662
+ # check special cases (NoneType, Ellipsis, ...)
663
+ if qual[-1] == 'ellipsis': qual[-1] = 'EllipsisType'
664
+ if _intypes(qual[-1]): module = 'types' #XXX: BuiltinFunctionType
665
+ qual = [module] + qual
666
+ return qual
667
+
668
+
669
+ #NOTE: 05/25/14 broke backward compatibility: added 'alias' as 3rd argument
670
+ def _getimport(head, tail, alias='', verify=True, builtin=False):
671
+ """helper to build a likely import string from head and tail of namespace.
672
+ ('head','tail') are used in the following context: "from head import tail"
673
+
674
+ If verify=True, then test the import string before returning it.
675
+ If builtin=True, then force an import for builtins where possible.
676
+ If alias is provided, then rename the object on import.
677
+ """
678
+ # special handling for a few common types
679
+ if tail in ['Ellipsis', 'NotImplemented'] and head in ['types']:
680
+ head = len.__module__
681
+ elif tail in ['None'] and head in ['types']:
682
+ _alias = '%s = ' % alias if alias else ''
683
+ if alias == tail: _alias = ''
684
+ return _alias+'%s\n' % tail
685
+ # we don't need to import from builtins, so return ''
686
+ # elif tail in ['NoneType','int','float','long','complex']: return '' #XXX: ?
687
+ if head in ['builtins','__builtin__']:
688
+ # special cases (NoneType, Ellipsis, ...) #XXX: BuiltinFunctionType
689
+ if tail == 'ellipsis': tail = 'EllipsisType'
690
+ if _intypes(tail): head = 'types'
691
+ elif not builtin:
692
+ _alias = '%s = ' % alias if alias else ''
693
+ if alias == tail: _alias = ''
694
+ return _alias+'%s\n' % tail
695
+ else: pass # handle builtins below
696
+ # get likely import string
697
+ if not head: _str = "import %s" % tail
698
+ else: _str = "from %s import %s" % (head, tail)
699
+ _alias = " as %s\n" % alias if alias else "\n"
700
+ if alias == tail: _alias = "\n"
701
+ _str += _alias
702
+ # FIXME: fails on most decorators, currying, and such...
703
+ # (could look for magic __wrapped__ or __func__ attr)
704
+ # (could fix in 'namespace' to check obj for closure)
705
+ if verify and not head.startswith('dill.'):# weird behavior for dill
706
+ #print(_str)
707
+ try: exec(_str) #XXX: check if == obj? (name collision)
708
+ except ImportError: #XXX: better top-down or bottom-up recursion?
709
+ _head = head.rsplit(".",1)[0] #(or get all, then compare == obj?)
710
+ if not _head: raise
711
+ if _head != head:
712
+ _str = _getimport(_head, tail, alias, verify)
713
+ return _str
714
+
715
+
716
+ #XXX: rename builtin to force? vice versa? verify to force? (as in getsource)
717
+ #NOTE: 05/25/14 broke backward compatibility: added 'alias' as 2nd argument
718
+ def getimport(obj, alias='', verify=True, builtin=False, enclosing=False):
719
+ """get the likely import string for the given object
720
+
721
+ obj is the object to inspect
722
+ If verify=True, then test the import string before returning it.
723
+ If builtin=True, then force an import for builtins where possible.
724
+ If enclosing=True, get the import for the outermost enclosing callable.
725
+ If alias is provided, then rename the object on import.
726
+ """
727
+ if enclosing:
728
+ from .detect import outermost
729
+ _obj = outermost(obj)
730
+ obj = _obj if _obj else obj
731
+ # get the namespace
732
+ qual = _namespace(obj)
733
+ head = '.'.join(qual[:-1])
734
+ tail = qual[-1]
735
+ # for named things... with a nice repr #XXX: move into _namespace?
736
+ try: # look for '<...>' and be mindful it might be in lists, dicts, etc...
737
+ name = repr(obj).split('<',1)[1].split('>',1)[1]
738
+ name = None # we have a 'object'-style repr
739
+ except Exception: # it's probably something 'importable'
740
+ if head in ['builtins','__builtin__']:
741
+ name = repr(obj) #XXX: catch [1,2], (1,2), set([1,2])... others?
742
+ else:
743
+ name = repr(obj).split('(')[0]
744
+ #if not repr(obj).startswith('<'): name = repr(obj).split('(')[0]
745
+ #else: name = None
746
+ if name: # try using name instead of tail
747
+ try: return _getimport(head, name, alias, verify, builtin)
748
+ except ImportError: pass
749
+ except SyntaxError:
750
+ if head in ['builtins','__builtin__']:
751
+ _alias = '%s = ' % alias if alias else ''
752
+ if alias == name: _alias = ''
753
+ return _alias+'%s\n' % name
754
+ else: pass
755
+ try:
756
+ #if type(obj) is type(abs): _builtin = builtin # BuiltinFunctionType
757
+ #else: _builtin = False
758
+ return _getimport(head, tail, alias, verify, builtin)
759
+ except ImportError:
760
+ raise # could do some checking against obj
761
+ except SyntaxError:
762
+ if head in ['builtins','__builtin__']:
763
+ _alias = '%s = ' % alias if alias else ''
764
+ if alias == tail: _alias = ''
765
+ return _alias+'%s\n' % tail
766
+ raise # could do some checking against obj
767
+
768
+
769
+ def _importable(obj, alias='', source=None, enclosing=False, force=True, \
770
+ builtin=True, lstrip=True):
771
+ """get an import string (or the source code) for the given object
772
+
773
+ This function will attempt to discover the name of the object, or the repr
774
+ of the object, or the source code for the object. To attempt to force
775
+ discovery of the source code, use source=True, to attempt to force the
776
+ use of an import, use source=False; otherwise an import will be sought
777
+ for objects not defined in __main__. The intent is to build a string
778
+ that can be imported from a python file. obj is the object to inspect.
779
+ If alias is provided, then rename the object with the given alias.
780
+
781
+ If source=True, use these options:
782
+ If enclosing=True, then also return any enclosing code.
783
+ If force=True, catch (TypeError,IOError) and try to use import hooks.
784
+ If lstrip=True, ensure there is no indentation in the first line of code.
785
+
786
+ If source=False, use these options:
787
+ If enclosing=True, get the import for the outermost enclosing callable.
788
+ If force=True, then don't test the import string before returning it.
789
+ If builtin=True, then force an import for builtins where possible.
790
+ """
791
+ if source is None:
792
+ source = True if isfrommain(obj) else False
793
+ if source: # first try to get the source
794
+ try:
795
+ return getsource(obj, alias, enclosing=enclosing, \
796
+ force=force, lstrip=lstrip, builtin=builtin)
797
+ except Exception: pass
798
+ try:
799
+ if not _isinstance(obj):
800
+ return getimport(obj, alias, enclosing=enclosing, \
801
+ verify=(not force), builtin=builtin)
802
+ # first 'get the import', then 'get the instance'
803
+ _import = getimport(obj, enclosing=enclosing, \
804
+ verify=(not force), builtin=builtin)
805
+ name = getname(obj, force=True)
806
+ if not name:
807
+ raise AttributeError("object has no atribute '__name__'")
808
+ _alias = "%s = " % alias if alias else ""
809
+ if alias == name: _alias = ""
810
+ return _import+_alias+"%s\n" % name
811
+
812
+ except Exception: pass
813
+ if not source: # try getsource, only if it hasn't been tried yet
814
+ try:
815
+ return getsource(obj, alias, enclosing=enclosing, \
816
+ force=force, lstrip=lstrip, builtin=builtin)
817
+ except Exception: pass
818
+ # get the name (of functions, lambdas, and classes)
819
+ # or hope that obj can be built from the __repr__
820
+ #XXX: what to do about class instances and such?
821
+ obj = getname(obj, force=force)
822
+ # we either have __repr__ or __name__ (or None)
823
+ if not obj or obj.startswith('<'):
824
+ raise AttributeError("object has no atribute '__name__'")
825
+ _alias = '%s = ' % alias if alias else ''
826
+ if alias == obj: _alias = ''
827
+ return _alias+'%s\n' % obj
828
+ #XXX: possible failsafe... (for example, for instances when source=False)
829
+ # "import dill; result = dill.loads(<pickled_object>); # repr(<object>)"
830
+
831
+ def _closuredimport(func, alias='', builtin=False):
832
+ """get import for closured objects; return a dict of 'name' and 'import'"""
833
+ import re
834
+ from .detect import freevars, outermost
835
+ free_vars = freevars(func)
836
+ func_vars = {}
837
+ # split into 'funcs' and 'non-funcs'
838
+ for name,obj in list(free_vars.items()):
839
+ if not isfunction(obj): continue
840
+ # get import for 'funcs'
841
+ fobj = free_vars.pop(name)
842
+ src = getsource(fobj)
843
+ if src.lstrip().startswith('@'): # we have a decorator
844
+ src = getimport(fobj, alias=alias, builtin=builtin)
845
+ else: # we have to "hack" a bit... and maybe be lucky
846
+ encl = outermost(func)
847
+ # pattern: 'func = enclosing(fobj'
848
+ pat = r'.*[\w\s]=\s*'+getname(encl)+r'\('+getname(fobj)
849
+ mod = getname(getmodule(encl))
850
+ #HACK: get file containing 'outer' function; is func there?
851
+ lines,_ = findsource(encl)
852
+ candidate = [line for line in lines if getname(encl) in line and \
853
+ re.match(pat, line)]
854
+ if not candidate:
855
+ mod = getname(getmodule(fobj))
856
+ #HACK: get file containing 'inner' function; is func there?
857
+ lines,_ = findsource(fobj)
858
+ candidate = [line for line in lines \
859
+ if getname(fobj) in line and re.match(pat, line)]
860
+ if not len(candidate): raise TypeError('import could not be found')
861
+ candidate = candidate[-1]
862
+ name = candidate.split('=',1)[0].split()[-1].strip()
863
+ src = _getimport(mod, name, alias=alias, builtin=builtin)
864
+ func_vars[name] = src
865
+ if not func_vars:
866
+ name = outermost(func)
867
+ mod = getname(getmodule(name))
868
+ if not mod or name is func: # then it can be handled by getimport
869
+ name = getname(func, force=True) #XXX: better key?
870
+ src = getimport(func, alias=alias, builtin=builtin)
871
+ else:
872
+ lines,_ = findsource(name)
873
+ # pattern: 'func = enclosing('
874
+ candidate = [line for line in lines if getname(name) in line and \
875
+ re.match(r'.*[\w\s]=\s*'+getname(name)+r'\(', line)]
876
+ if not len(candidate): raise TypeError('import could not be found')
877
+ candidate = candidate[-1]
878
+ name = candidate.split('=',1)[0].split()[-1].strip()
879
+ src = _getimport(mod, name, alias=alias, builtin=builtin)
880
+ func_vars[name] = src
881
+ return func_vars
882
+
883
+ #XXX: should be able to use __qualname__
884
+ def _closuredsource(func, alias=''):
885
+ """get source code for closured objects; return a dict of 'name'
886
+ and 'code blocks'"""
887
+ #FIXME: this entire function is a messy messy HACK
888
+ # - pollutes global namespace
889
+ # - fails if name of freevars are reused
890
+ # - can unnecessarily duplicate function code
891
+ from .detect import freevars
892
+ free_vars = freevars(func)
893
+ func_vars = {}
894
+ # split into 'funcs' and 'non-funcs'
895
+ for name,obj in list(free_vars.items()):
896
+ if not isfunction(obj):
897
+ # get source for 'non-funcs'
898
+ free_vars[name] = getsource(obj, force=True, alias=name)
899
+ continue
900
+ # get source for 'funcs'
901
+ fobj = free_vars.pop(name)
902
+ src = getsource(fobj, alias) # DO NOT include dependencies
903
+ # if source doesn't start with '@', use name as the alias
904
+ if not src.lstrip().startswith('@'): #FIXME: 'enclose' in dummy;
905
+ src = importable(fobj,alias=name)# wrong ref 'name'
906
+ org = getsource(func, alias, enclosing=False, lstrip=True)
907
+ src = (src, org) # undecorated first, then target
908
+ else: #NOTE: reproduces the code!
909
+ org = getsource(func, enclosing=True, lstrip=False)
910
+ src = importable(fobj, alias, source=True) # include dependencies
911
+ src = (org, src) # target first, then decorated
912
+ func_vars[name] = src
913
+ src = ''.join(free_vars.values())
914
+ if not func_vars: #FIXME: 'enclose' in dummy; wrong ref 'name'
915
+ org = getsource(func, alias, force=True, enclosing=False, lstrip=True)
916
+ src = (src, org) # variables first, then target
917
+ else:
918
+ src = (src, None) # just variables (better '' instead of None?)
919
+ func_vars[None] = src
920
+ # FIXME: remove duplicates (however, order is important...)
921
+ return func_vars
922
+
923
+ def importable(obj, alias='', source=None, builtin=True):
924
+ """get an importable string (i.e. source code or the import string)
925
+ for the given object, including any required objects from the enclosing
926
+ and global scope
927
+
928
+ This function will attempt to discover the name of the object, or the repr
929
+ of the object, or the source code for the object. To attempt to force
930
+ discovery of the source code, use source=True, to attempt to force the
931
+ use of an import, use source=False; otherwise an import will be sought
932
+ for objects not defined in __main__. The intent is to build a string
933
+ that can be imported from a python file.
934
+
935
+ obj is the object to inspect. If alias is provided, then rename the
936
+ object with the given alias. If builtin=True, then force an import for
937
+ builtins where possible.
938
+ """
939
+ #NOTE: we always 'force', and 'lstrip' as necessary
940
+ #NOTE: for 'enclosing', use importable(outermost(obj))
941
+ if source is None:
942
+ source = True if isfrommain(obj) else False
943
+ elif builtin and isbuiltin(obj):
944
+ source = False
945
+ tried_source = tried_import = False
946
+ while True:
947
+ if not source: # we want an import
948
+ try:
949
+ if _isinstance(obj): # for instances, punt to _importable
950
+ return _importable(obj, alias, source=False, builtin=builtin)
951
+ src = _closuredimport(obj, alias=alias, builtin=builtin)
952
+ if len(src) == 0:
953
+ raise NotImplementedError('not implemented')
954
+ if len(src) > 1:
955
+ raise NotImplementedError('not implemented')
956
+ return list(src.values())[0]
957
+ except Exception:
958
+ if tried_source: raise
959
+ tried_import = True
960
+ # we want the source
961
+ try:
962
+ src = _closuredsource(obj, alias=alias)
963
+ if len(src) == 0:
964
+ raise NotImplementedError('not implemented')
965
+ # groan... an inline code stitcher
966
+ def _code_stitcher(block):
967
+ "stitch together the strings in tuple 'block'"
968
+ if block[0] and block[-1]: block = '\n'.join(block)
969
+ elif block[0]: block = block[0]
970
+ elif block[-1]: block = block[-1]
971
+ else: block = ''
972
+ return block
973
+ # get free_vars first
974
+ _src = _code_stitcher(src.pop(None))
975
+ _src = [_src] if _src else []
976
+ # get func_vars
977
+ for xxx in src.values():
978
+ xxx = _code_stitcher(xxx)
979
+ if xxx: _src.append(xxx)
980
+ # make a single source string
981
+ if not len(_src):
982
+ src = ''
983
+ elif len(_src) == 1:
984
+ src = _src[0]
985
+ else:
986
+ src = '\n'.join(_src)
987
+ # get source code of objects referred to by obj in global scope
988
+ from .detect import globalvars
989
+ obj = globalvars(obj) #XXX: don't worry about alias? recurse? etc?
990
+ obj = list(getsource(_obj,name,force=True) for (name,_obj) in obj.items() if not isbuiltin(_obj))
991
+ obj = '\n'.join(obj) if obj else ''
992
+ # combine all referred-to source (global then enclosing)
993
+ if not obj: return src
994
+ if not src: return obj
995
+ return obj + src
996
+ except Exception:
997
+ if tried_import: raise
998
+ tried_source = True
999
+ source = not source
1000
+ # should never get here
1001
+ return
1002
+
1003
+
1004
+ # backward compatibility
1005
+ def getimportable(obj, alias='', byname=True, explicit=False):
1006
+ return importable(obj,alias,source=(not byname),builtin=explicit)
1007
+ #return outdent(_importable(obj,alias,source=(not byname),builtin=explicit))
1008
+ def likely_import(obj, passive=False, explicit=False):
1009
+ return getimport(obj, verify=(not passive), builtin=explicit)
1010
+ def _likely_import(first, last, passive=False, explicit=True):
1011
+ return _getimport(first, last, verify=(not passive), builtin=explicit)
1012
+ _get_name = getname
1013
+ getblocks_from_history = getblocks
1014
+
1015
+
1016
+
1017
+ # EOF
llmeval-env/lib/python3.10/site-packages/dill/tests/__init__.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2018-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ """
8
+ to run this test suite, first build and install `dill`.
9
+
10
+ $ python -m pip install ../..
11
+
12
+
13
+ then run the tests with:
14
+
15
+ $ python -m dill.tests
16
+
17
+
18
+ or, if `nose` is installed:
19
+
20
+ $ nosetests
21
+
22
+ """
llmeval-env/lib/python3.10/site-packages/dill/tests/__main__.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2018-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import glob
9
+ import os
10
+ import sys
11
+ import subprocess as sp
12
+ python = sys.executable
13
+ try:
14
+ import pox
15
+ python = pox.which_python(version=True) or python
16
+ except ImportError:
17
+ pass
18
+ shell = sys.platform[:3] == 'win'
19
+
20
+ suite = os.path.dirname(__file__) or os.path.curdir
21
+ tests = glob.glob(suite + os.path.sep + 'test_*.py')
22
+
23
+
24
+ if __name__ == '__main__':
25
+
26
+ failed = 0
27
+ for test in tests:
28
+ p = sp.Popen([python, test], shell=shell).wait()
29
+ if p:
30
+ print('F', end='', flush=True)
31
+ failed = 1
32
+ else:
33
+ print('.', end='', flush=True)
34
+ print('')
35
+ exit(failed)
llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc ADDED
Binary file (1.04 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_fglobals.cpython-310.pyc ADDED
Binary file (1.89 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc ADDED
Binary file (977 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc ADDED
Binary file (1.73 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_session.cpython-310.pyc ADDED
Binary file (8.19 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dill/tests/__pycache__/test_source.cpython-310.pyc ADDED
Binary file (5.32 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dill/tests/test_abc.py ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2023-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ """
8
+ test dill's ability to pickle abstract base class objects
9
+ """
10
+ import dill
11
+ import abc
12
+ from abc import ABC
13
+ import warnings
14
+
15
+ from types import FunctionType
16
+
17
+ dill.settings['recurse'] = True
18
+
19
+ class OneTwoThree(ABC):
20
+ @abc.abstractmethod
21
+ def foo(self):
22
+ """A method"""
23
+ pass
24
+
25
+ @property
26
+ @abc.abstractmethod
27
+ def bar(self):
28
+ """Property getter"""
29
+ pass
30
+
31
+ @bar.setter
32
+ @abc.abstractmethod
33
+ def bar(self, value):
34
+ """Property setter"""
35
+ pass
36
+
37
+ @classmethod
38
+ @abc.abstractmethod
39
+ def cfoo(cls):
40
+ """Class method"""
41
+ pass
42
+
43
+ @staticmethod
44
+ @abc.abstractmethod
45
+ def sfoo():
46
+ """Static method"""
47
+ pass
48
+
49
+ class EasyAsAbc(OneTwoThree):
50
+ def __init__(self):
51
+ self._bar = None
52
+
53
+ def foo(self):
54
+ return "Instance Method FOO"
55
+
56
+ @property
57
+ def bar(self):
58
+ return self._bar
59
+
60
+ @bar.setter
61
+ def bar(self, value):
62
+ self._bar = value
63
+
64
+ @classmethod
65
+ def cfoo(cls):
66
+ return "Class Method CFOO"
67
+
68
+ @staticmethod
69
+ def sfoo():
70
+ return "Static Method SFOO"
71
+
72
+ def test_abc_non_local():
73
+ assert dill.copy(OneTwoThree) is not OneTwoThree
74
+ assert dill.copy(EasyAsAbc) is not EasyAsAbc
75
+
76
+ with warnings.catch_warnings():
77
+ warnings.simplefilter("ignore", dill.PicklingWarning)
78
+ assert dill.copy(OneTwoThree, byref=True) is OneTwoThree
79
+ assert dill.copy(EasyAsAbc, byref=True) is EasyAsAbc
80
+
81
+ instance = EasyAsAbc()
82
+ # Set a property that StockPickle can't preserve
83
+ instance.bar = lambda x: x**2
84
+ depickled = dill.copy(instance)
85
+ assert type(depickled) is type(instance) #NOTE: issue #612, test_abc_local
86
+ #NOTE: dill.copy of local (or non-local) classes should (not) be the same?
87
+ assert type(depickled.bar) is FunctionType
88
+ assert depickled.bar(3) == 9
89
+ assert depickled.sfoo() == "Static Method SFOO"
90
+ assert depickled.cfoo() == "Class Method CFOO"
91
+ assert depickled.foo() == "Instance Method FOO"
92
+
93
+ def test_abc_local():
94
+ """
95
+ Test using locally scoped ABC class
96
+ """
97
+ class LocalABC(ABC):
98
+ @abc.abstractmethod
99
+ def foo(self):
100
+ pass
101
+
102
+ def baz(self):
103
+ return repr(self)
104
+
105
+ labc = dill.copy(LocalABC)
106
+ assert labc is not LocalABC
107
+ assert type(labc) is type(LocalABC)
108
+ #NOTE: dill.copy of local (or non-local) classes should (not) be the same?
109
+ # <class '__main__.LocalABC'>
110
+ # <class '__main__.test_abc_local.<locals>.LocalABC'>
111
+
112
+ class Real(labc):
113
+ def foo(self):
114
+ return "True!"
115
+
116
+ def baz(self):
117
+ return "My " + super(Real, self).baz()
118
+
119
+ real = Real()
120
+ assert real.foo() == "True!"
121
+
122
+ try:
123
+ labc()
124
+ except TypeError as e:
125
+ # Expected error
126
+ pass
127
+ else:
128
+ print('Failed to raise type error')
129
+ assert False
130
+
131
+ labc2, pik = dill.copy((labc, Real()))
132
+ assert 'Real' == type(pik).__name__
133
+ assert '.Real' in type(pik).__qualname__
134
+ assert type(pik) is not Real
135
+ assert labc2 is not LocalABC
136
+ assert labc2 is not labc
137
+ assert isinstance(pik, labc2)
138
+ assert not isinstance(pik, labc)
139
+ assert not isinstance(pik, LocalABC)
140
+ assert pik.baz() == "My " + repr(pik)
141
+
142
+ def test_meta_local_no_cache():
143
+ """
144
+ Test calling metaclass and cache registration
145
+ """
146
+ LocalMetaABC = abc.ABCMeta('LocalMetaABC', (), {})
147
+
148
+ class ClassyClass:
149
+ pass
150
+
151
+ class KlassyClass:
152
+ pass
153
+
154
+ LocalMetaABC.register(ClassyClass)
155
+
156
+ assert not issubclass(KlassyClass, LocalMetaABC)
157
+ assert issubclass(ClassyClass, LocalMetaABC)
158
+
159
+ res = dill.dumps((LocalMetaABC, ClassyClass, KlassyClass))
160
+
161
+ lmabc, cc, kc = dill.loads(res)
162
+ assert type(lmabc) == type(LocalMetaABC)
163
+ assert not issubclass(kc, lmabc)
164
+ assert issubclass(cc, lmabc)
165
+
166
+ if __name__ == '__main__':
167
+ test_abc_non_local()
168
+ test_abc_local()
169
+ test_meta_local_no_cache()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_dataclasses.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Anirudh Vegesana ([email protected])
5
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ test pickling a dataclass
10
+ """
11
+
12
+ import dill
13
+ import dataclasses
14
+
15
+ def test_dataclasses():
16
+ # Issue #500
17
+ @dataclasses.dataclass
18
+ class A:
19
+ x: int
20
+ y: str
21
+
22
+ @dataclasses.dataclass
23
+ class B:
24
+ a: A
25
+
26
+ a = A(1, "test")
27
+ before = B(a)
28
+ save = dill.dumps(before)
29
+ after = dill.loads(save)
30
+ assert before != after # classes don't match
31
+ assert before == B(A(**dataclasses.asdict(after.a)))
32
+ assert dataclasses.asdict(before) == dataclasses.asdict(after)
33
+
34
+ if __name__ == '__main__':
35
+ test_dataclasses()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_dictviews.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Author: Anirudh Vegesana ([email protected])
5
+ # Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ from dill._dill import OLD310, MAPPING_PROXY_TRICK, DictProxyType
11
+
12
+ def test_dictproxy():
13
+ assert dill.copy(DictProxyType({'a': 2}))
14
+
15
+ def test_dictviews():
16
+ x = {'a': 1}
17
+ assert dill.copy(x.keys())
18
+ assert dill.copy(x.values())
19
+ assert dill.copy(x.items())
20
+
21
+ def test_dictproxy_trick():
22
+ if not OLD310 and MAPPING_PROXY_TRICK:
23
+ x = {'a': 1}
24
+ all_views = (x.values(), x.items(), x.keys(), x)
25
+ seperate_views = dill.copy(all_views)
26
+ new_x = seperate_views[-1]
27
+ new_x['b'] = 2
28
+ new_x['c'] = 1
29
+ assert len(new_x) == 3 and len(x) == 1
30
+ assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1
31
+ assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1
32
+ assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1
33
+ assert dict(all_views[1]) == x
34
+ assert dict(seperate_views[1]) == new_x
35
+
36
+ if __name__ == '__main__':
37
+ test_dictproxy()
38
+ test_dictviews()
39
+ test_dictproxy_trick()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_diff.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ from dill import __diff as diff
10
+
11
+ import sys
12
+ IS_PYPY = not hasattr(sys, 'getrefcount')
13
+
14
+ class A:
15
+ pass
16
+
17
+ def test_diff():
18
+ a = A()
19
+ b = A()
20
+ c = A()
21
+ a.a = b
22
+ b.a = c
23
+ diff.memorise(a)
24
+ assert not diff.has_changed(a)
25
+ c.a = 1
26
+ assert diff.has_changed(a)
27
+ diff.memorise(c, force=True)
28
+ assert not diff.has_changed(a)
29
+ c.a = 2
30
+ assert diff.has_changed(a)
31
+ changed = diff.whats_changed(a)
32
+ assert list(changed[0].keys()) == ["a"]
33
+ assert not changed[1]
34
+
35
+ a2 = []
36
+ b2 = [a2]
37
+ c2 = [b2]
38
+ diff.memorise(c2)
39
+ assert not diff.has_changed(c2)
40
+ a2.append(1)
41
+ assert diff.has_changed(c2)
42
+ changed = diff.whats_changed(c2)
43
+ assert changed[0] == {}
44
+ assert changed[1]
45
+
46
+ a3 = {}
47
+ b3 = {1: a3}
48
+ c3 = {1: b3}
49
+ diff.memorise(c3)
50
+ assert not diff.has_changed(c3)
51
+ a3[1] = 1
52
+ assert diff.has_changed(c3)
53
+ changed = diff.whats_changed(c3)
54
+ assert changed[0] == {}
55
+ assert changed[1]
56
+
57
+ if not IS_PYPY:
58
+ import abc
59
+ # make sure the "_abc_invaldation_counter" doesn't make test fail
60
+ diff.memorise(abc.ABCMeta, force=True)
61
+ assert not diff.has_changed(abc)
62
+ abc.ABCMeta.zzz = 1
63
+ assert diff.has_changed(abc)
64
+ changed = diff.whats_changed(abc)
65
+ assert list(changed[0].keys()) == ["ABCMeta"]
66
+ assert not changed[1]
67
+
68
+ '''
69
+ import Queue
70
+ diff.memorise(Queue, force=True)
71
+ assert not diff.has_changed(Queue)
72
+ Queue.Queue.zzz = 1
73
+ assert diff.has_changed(Queue)
74
+ changed = diff.whats_changed(Queue)
75
+ assert list(changed[0].keys()) == ["Queue"]
76
+ assert not changed[1]
77
+
78
+ import math
79
+ diff.memorise(math, force=True)
80
+ assert not diff.has_changed(math)
81
+ math.zzz = 1
82
+ assert diff.has_changed(math)
83
+ changed = diff.whats_changed(math)
84
+ assert list(changed[0].keys()) == ["zzz"]
85
+ assert not changed[1]
86
+ '''
87
+
88
+ a = A()
89
+ b = A()
90
+ c = A()
91
+ a.a = b
92
+ b.a = c
93
+ diff.memorise(a)
94
+ assert not diff.has_changed(a)
95
+ c.a = 1
96
+ assert diff.has_changed(a)
97
+ diff.memorise(c, force=True)
98
+ assert not diff.has_changed(a)
99
+ del c.a
100
+ assert diff.has_changed(a)
101
+ changed = diff.whats_changed(a)
102
+ assert list(changed[0].keys()) == ["a"]
103
+ assert not changed[1]
104
+
105
+
106
+ if __name__ == '__main__':
107
+ test_diff()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_extendpickle.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill as pickle
10
+ from io import BytesIO as StringIO
11
+
12
+
13
+ def my_fn(x):
14
+ return x * 17
15
+
16
+
17
+ def test_extend():
18
+ obj = lambda : my_fn(34)
19
+ assert obj() == 578
20
+
21
+ obj_io = StringIO()
22
+ pickler = pickle.Pickler(obj_io)
23
+ pickler.dump(obj)
24
+
25
+ obj_str = obj_io.getvalue()
26
+
27
+ obj2_io = StringIO(obj_str)
28
+ unpickler = pickle.Unpickler(obj2_io)
29
+ obj2 = unpickler.load()
30
+
31
+ assert obj2() == 578
32
+
33
+
34
+ def test_isdill():
35
+ obj_io = StringIO()
36
+ pickler = pickle.Pickler(obj_io)
37
+ assert pickle._dill.is_dill(pickler) is True
38
+
39
+ pickler = pickle._dill.StockPickler(obj_io)
40
+ assert pickle._dill.is_dill(pickler) is False
41
+
42
+ try:
43
+ import multiprocess as mp
44
+ pickler = mp.reduction.ForkingPickler(obj_io)
45
+ assert pickle._dill.is_dill(pickler, child=True) is True
46
+ assert pickle._dill.is_dill(pickler, child=False) is False
47
+ except Exception:
48
+ pass
49
+
50
+
51
+ if __name__ == '__main__':
52
+ test_extend()
53
+ test_isdill()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_functions.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2019-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import functools
9
+ import dill
10
+ import sys
11
+ dill.settings['recurse'] = True
12
+
13
+
14
+ def function_a(a):
15
+ return a
16
+
17
+
18
+ def function_b(b, b1):
19
+ return b + b1
20
+
21
+
22
+ def function_c(c, c1=1):
23
+ return c + c1
24
+
25
+
26
+ def function_d(d, d1, d2=1):
27
+ """doc string"""
28
+ return d + d1 + d2
29
+
30
+ function_d.__module__ = 'a module'
31
+
32
+
33
+ exec('''
34
+ def function_e(e, *e1, e2=1, e3=2):
35
+ return e + sum(e1) + e2 + e3''')
36
+
37
+ globalvar = 0
38
+
39
+ @functools.lru_cache(None)
40
+ def function_with_cache(x):
41
+ global globalvar
42
+ globalvar += x
43
+ return globalvar
44
+
45
+
46
+ def function_with_unassigned_variable():
47
+ if False:
48
+ value = None
49
+ return (lambda: value)
50
+
51
+
52
+ def test_issue_510():
53
+ # A very bizzare use of functions and methods that pickle doesn't get
54
+ # correctly for odd reasons.
55
+ class Foo:
56
+ def __init__(self):
57
+ def f2(self):
58
+ return self
59
+ self.f2 = f2.__get__(self)
60
+
61
+ import dill, pickletools
62
+ f = Foo()
63
+ f1 = dill.copy(f)
64
+ assert f1.f2() is f1
65
+
66
+
67
+ def test_functions():
68
+ dumped_func_a = dill.dumps(function_a)
69
+ assert dill.loads(dumped_func_a)(0) == 0
70
+
71
+ dumped_func_b = dill.dumps(function_b)
72
+ assert dill.loads(dumped_func_b)(1,2) == 3
73
+
74
+ dumped_func_c = dill.dumps(function_c)
75
+ assert dill.loads(dumped_func_c)(1) == 2
76
+ assert dill.loads(dumped_func_c)(1, 2) == 3
77
+
78
+ dumped_func_d = dill.dumps(function_d)
79
+ assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__
80
+ assert dill.loads(dumped_func_d).__module__ == function_d.__module__
81
+ assert dill.loads(dumped_func_d)(1, 2) == 4
82
+ assert dill.loads(dumped_func_d)(1, 2, 3) == 6
83
+ assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6
84
+
85
+ function_with_cache(1)
86
+ globalvar = 0
87
+ dumped_func_cache = dill.dumps(function_with_cache)
88
+ assert function_with_cache(2) == 3
89
+ assert function_with_cache(1) == 1
90
+ assert function_with_cache(3) == 6
91
+ assert function_with_cache(2) == 3
92
+
93
+ empty_cell = function_with_unassigned_variable()
94
+ cell_copy = dill.loads(dill.dumps(empty_cell))
95
+ assert 'empty' in str(cell_copy.__closure__[0])
96
+ try:
97
+ cell_copy()
98
+ except Exception:
99
+ # this is good
100
+ pass
101
+ else:
102
+ raise AssertionError('cell_copy() did not read an empty cell')
103
+
104
+ exec('''
105
+ dumped_func_e = dill.dumps(function_e)
106
+ assert dill.loads(dumped_func_e)(1, 2) == 6
107
+ assert dill.loads(dumped_func_e)(1, 2, 3) == 9
108
+ assert dill.loads(dumped_func_e)(1, 2, e2=3) == 8
109
+ assert dill.loads(dumped_func_e)(1, 2, e2=3, e3=4) == 10
110
+ assert dill.loads(dumped_func_e)(1, 2, 3, e2=4) == 12
111
+ assert dill.loads(dumped_func_e)(1, 2, 3, e2=4, e3=5) == 15''')
112
+
113
+ def test_code_object():
114
+ import warnings
115
+ from dill._dill import ALL_CODE_PARAMS, CODE_PARAMS, CODE_VERSION, _create_code
116
+ code = function_c.__code__
117
+ warnings.filterwarnings('ignore', category=DeprecationWarning) # issue 597
118
+ LNOTAB = getattr(code, 'co_lnotab', b'')
119
+ if warnings.filters: del warnings.filters[0]
120
+ fields = {f: getattr(code, 'co_'+f) for f in CODE_PARAMS}
121
+ fields.setdefault('posonlyargcount', 0) # python >= 3.8
122
+ fields.setdefault('lnotab', LNOTAB) # python <= 3.9
123
+ fields.setdefault('linetable', b'') # python >= 3.10
124
+ fields.setdefault('qualname', fields['name']) # python >= 3.11
125
+ fields.setdefault('exceptiontable', b'') # python >= 3.11
126
+ fields.setdefault('endlinetable', None) # python == 3.11a
127
+ fields.setdefault('columntable', None) # python == 3.11a
128
+
129
+ for version, _, params in ALL_CODE_PARAMS:
130
+ args = tuple(fields[p] for p in params.split())
131
+ try:
132
+ _create_code(*args)
133
+ if version >= (3,10):
134
+ _create_code(fields['lnotab'], *args)
135
+ except Exception as error:
136
+ raise Exception("failed to construct code object with format version {}".format(version)) from error
137
+
138
+ if __name__ == '__main__':
139
+ test_functions()
140
+ test_issue_510()
141
+ test_code_object()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_functors.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import functools
10
+ import dill
11
+ dill.settings['recurse'] = True
12
+
13
+
14
+ def f(a, b, c): # without keywords
15
+ pass
16
+
17
+
18
+ def g(a, b, c=2): # with keywords
19
+ pass
20
+
21
+
22
+ def h(a=1, b=2, c=3): # without args
23
+ pass
24
+
25
+
26
+ def test_functools():
27
+ fp = functools.partial(f, 1, 2)
28
+ gp = functools.partial(g, 1, c=2)
29
+ hp = functools.partial(h, 1, c=2)
30
+ bp = functools.partial(int, base=2)
31
+
32
+ assert dill.pickles(fp, safe=True)
33
+ assert dill.pickles(gp, safe=True)
34
+ assert dill.pickles(hp, safe=True)
35
+ assert dill.pickles(bp, safe=True)
36
+
37
+
38
+ if __name__ == '__main__':
39
+ test_functools()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_logger.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Author: Leonardo Gama (@leogama)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import logging
9
+ import re
10
+ import tempfile
11
+
12
+ import dill
13
+ from dill import detect
14
+ from dill.logger import stderr_handler, adapter as logger
15
+
16
+ try:
17
+ from StringIO import StringIO
18
+ except ImportError:
19
+ from io import StringIO
20
+
21
+ test_obj = {'a': (1, 2), 'b': object(), 'f': lambda x: x**2, 'big': list(range(10))}
22
+
23
+ def test_logging(should_trace):
24
+ buffer = StringIO()
25
+ handler = logging.StreamHandler(buffer)
26
+ logger.addHandler(handler)
27
+ try:
28
+ dill.dumps(test_obj)
29
+ if should_trace:
30
+ regex = re.compile(r'(\S*┬ \w.*[^)]' # begin pickling object
31
+ r'|│*└ # \w.* \[\d+ (\wi)?B])' # object written (with size)
32
+ )
33
+ for line in buffer.getvalue().splitlines():
34
+ assert regex.fullmatch(line)
35
+ return buffer.getvalue()
36
+ else:
37
+ assert buffer.getvalue() == ""
38
+ finally:
39
+ logger.removeHandler(handler)
40
+ buffer.close()
41
+
42
+ def test_trace_to_file(stream_trace):
43
+ file = tempfile.NamedTemporaryFile(mode='r')
44
+ with detect.trace(file.name, mode='w'):
45
+ dill.dumps(test_obj)
46
+ file_trace = file.read()
47
+ file.close()
48
+ # Apparently, objects can change location in memory...
49
+ reghex = re.compile(r'0x[0-9A-Za-z]+')
50
+ file_trace, stream_trace = reghex.sub('0x', file_trace), reghex.sub('0x', stream_trace)
51
+ # PyPy prints dictionary contents with repr(dict)...
52
+ regdict = re.compile(r'(dict\.__repr__ of ).*')
53
+ file_trace, stream_trace = regdict.sub(r'\1{}>', file_trace), regdict.sub(r'\1{}>', stream_trace)
54
+ assert file_trace == stream_trace
55
+
56
+ if __name__ == '__main__':
57
+ logger.removeHandler(stderr_handler)
58
+ test_logging(should_trace=False)
59
+ detect.trace(True)
60
+ test_logging(should_trace=True)
61
+ detect.trace(False)
62
+ test_logging(should_trace=False)
63
+
64
+ loglevel = logging.ERROR
65
+ logger.setLevel(loglevel)
66
+ with detect.trace():
67
+ stream_trace = test_logging(should_trace=True)
68
+ test_logging(should_trace=False)
69
+ assert logger.getEffectiveLevel() == loglevel
70
+ test_trace_to_file(stream_trace)
llmeval-env/lib/python3.10/site-packages/dill/tests/test_mixins.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ dill.settings['recurse'] = True
11
+
12
+
13
+ def wtf(x,y,z):
14
+ def zzz():
15
+ return x
16
+ def yyy():
17
+ return y
18
+ def xxx():
19
+ return z
20
+ return zzz,yyy
21
+
22
+
23
+ def quad(a=1, b=1, c=0):
24
+ inverted = [False]
25
+ def invert():
26
+ inverted[0] = not inverted[0]
27
+ def dec(f):
28
+ def func(*args, **kwds):
29
+ x = f(*args, **kwds)
30
+ if inverted[0]: x = -x
31
+ return a*x**2 + b*x + c
32
+ func.__wrapped__ = f
33
+ func.invert = invert
34
+ func.inverted = inverted
35
+ return func
36
+ return dec
37
+
38
+
39
+ @quad(a=0,b=2)
40
+ def double_add(*args):
41
+ return sum(args)
42
+
43
+
44
+ fx = sum([1,2,3])
45
+
46
+
47
+ ### to make it interesting...
48
+ def quad_factory(a=1,b=1,c=0):
49
+ def dec(f):
50
+ def func(*args,**kwds):
51
+ fx = f(*args,**kwds)
52
+ return a*fx**2 + b*fx + c
53
+ return func
54
+ return dec
55
+
56
+
57
+ @quad_factory(a=0,b=4,c=0)
58
+ def quadish(x):
59
+ return x+1
60
+
61
+
62
+ quadratic = quad_factory()
63
+
64
+
65
+ def doubler(f):
66
+ def inner(*args, **kwds):
67
+ fx = f(*args, **kwds)
68
+ return 2*fx
69
+ return inner
70
+
71
+
72
+ @doubler
73
+ def quadruple(x):
74
+ return 2*x
75
+
76
+
77
+ def test_mixins():
78
+ # test mixins
79
+ assert double_add(1,2,3) == 2*fx
80
+ double_add.invert()
81
+ assert double_add(1,2,3) == -2*fx
82
+
83
+ _d = dill.copy(double_add)
84
+ assert _d(1,2,3) == -2*fx
85
+ #_d.invert() #FIXME: fails seemingly randomly
86
+ #assert _d(1,2,3) == 2*fx
87
+
88
+ assert _d.__wrapped__(1,2,3) == fx
89
+
90
+ # XXX: issue or feature? in python3.4, inverted is linked through copy
91
+ if not double_add.inverted[0]:
92
+ double_add.invert()
93
+
94
+ # test some stuff from source and pointers
95
+ ds = dill.source
96
+ dd = dill.detect
97
+ assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
98
+ assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n'
99
+ assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__
100
+ assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__
101
+ assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__
102
+ assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__
103
+ assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n'
104
+ #***** #FIXME: this needs work
105
+ result = ds.importable(quadish, source=True)
106
+ a,b,c,_,result = result.split('\n',4)
107
+ assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
108
+ assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4'])
109
+ result = ds.importable(quadratic, source=True)
110
+ a,b,c,result = result.split('\n',3)
111
+ assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
112
+ assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1'])
113
+ result = ds.importable(double_add, source=True)
114
+ a,b,c,d,_,result = result.split('\n',5)
115
+ assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n'
116
+ assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]'])
117
+ #*****
118
+
119
+
120
+ if __name__ == '__main__':
121
+ test_mixins()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_module.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import sys
10
+ import dill
11
+ import test_mixins as module
12
+ from importlib import reload
13
+ dill.settings['recurse'] = True
14
+
15
+ cached = (module.__cached__ if hasattr(module, "__cached__")
16
+ else module.__file__.split(".", 1)[0] + ".pyc")
17
+
18
+ module.a = 1234
19
+
20
+ pik_mod = dill.dumps(module)
21
+
22
+ module.a = 0
23
+
24
+ # remove module
25
+ del sys.modules[module.__name__]
26
+ del module
27
+
28
+ module = dill.loads(pik_mod)
29
+ def test_attributes():
30
+ #assert hasattr(module, "a") and module.a == 1234 #FIXME: -m dill.tests
31
+ assert module.double_add(1, 2, 3) == 2 * module.fx
32
+
33
+ # Restart, and test use_diff
34
+
35
+ reload(module)
36
+
37
+ try:
38
+ dill.use_diff()
39
+
40
+ module.a = 1234
41
+
42
+ pik_mod = dill.dumps(module)
43
+
44
+ module.a = 0
45
+
46
+ # remove module
47
+ del sys.modules[module.__name__]
48
+ del module
49
+
50
+ module = dill.loads(pik_mod)
51
+ def test_diff_attributes():
52
+ assert hasattr(module, "a") and module.a == 1234
53
+ assert module.double_add(1, 2, 3) == 2 * module.fx
54
+
55
+ except AttributeError:
56
+ def test_diff_attributes():
57
+ pass
58
+
59
+ # clean up
60
+ import os
61
+ if os.path.exists(cached):
62
+ os.remove(cached)
63
+ pycache = os.path.join(os.path.dirname(module.__file__), "__pycache__")
64
+ if os.path.exists(pycache) and not os.listdir(pycache):
65
+ os.removedirs(pycache)
66
+
67
+
68
+ # test when module is None
69
+ import math
70
+
71
+ def get_lambda(str, **kwarg):
72
+ return eval(str, kwarg, None)
73
+
74
+ obj = get_lambda('lambda x: math.exp(x)', math=math)
75
+
76
+ def test_module_is_none():
77
+ assert obj.__module__ is None
78
+ assert dill.copy(obj)(3) == obj(3)
79
+
80
+
81
+ if __name__ == '__main__':
82
+ test_attributes()
83
+ test_diff_attributes()
84
+ test_module_is_none()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_moduledict.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+ dill.settings['recurse'] = True
11
+
12
+ def f(func):
13
+ def w(*args):
14
+ return f(*args)
15
+ return w
16
+
17
+ @f
18
+ def f2(): pass
19
+
20
+ # check when __main__ and on import
21
+ def test_decorated():
22
+ assert dill.pickles(f2)
23
+
24
+
25
+ import doctest
26
+ import logging
27
+ logging.basicConfig(level=logging.DEBUG)
28
+
29
+ class SomeUnreferencedUnpicklableClass(object):
30
+ def __reduce__(self):
31
+ raise Exception
32
+
33
+ unpicklable = SomeUnreferencedUnpicklableClass()
34
+
35
+ # This works fine outside of Doctest:
36
+ def test_normal():
37
+ serialized = dill.dumps(lambda x: x)
38
+
39
+ # should not try to pickle unpicklable object in __globals__
40
+ def tests():
41
+ """
42
+ >>> serialized = dill.dumps(lambda x: x)
43
+ """
44
+ return
45
+
46
+ #print("\n\nRunning Doctest:")
47
+ def test_doctest():
48
+ doctest.testmod()
49
+
50
+
51
+ if __name__ == '__main__':
52
+ test_decorated()
53
+ test_normal()
54
+ test_doctest()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_nested.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ test dill's ability to handle nested functions
10
+ """
11
+
12
+ import os
13
+ import math
14
+
15
+ import dill as pickle
16
+ pickle.settings['recurse'] = True
17
+
18
+
19
+ # the nested function: pickle should fail here, but dill is ok.
20
+ def adder(augend):
21
+ zero = [0]
22
+
23
+ def inner(addend):
24
+ return addend + augend + zero[0]
25
+ return inner
26
+
27
+
28
+ # rewrite the nested function using a class: standard pickle should work here.
29
+ class cadder(object):
30
+ def __init__(self, augend):
31
+ self.augend = augend
32
+ self.zero = [0]
33
+
34
+ def __call__(self, addend):
35
+ return addend + self.augend + self.zero[0]
36
+
37
+
38
+ # rewrite again, but as an old-style class
39
+ class c2adder:
40
+ def __init__(self, augend):
41
+ self.augend = augend
42
+ self.zero = [0]
43
+
44
+ def __call__(self, addend):
45
+ return addend + self.augend + self.zero[0]
46
+
47
+
48
+ # some basic class stuff
49
+ class basic(object):
50
+ pass
51
+
52
+
53
+ class basic2:
54
+ pass
55
+
56
+
57
+ x = 5
58
+ y = 1
59
+
60
+
61
+ def test_basic():
62
+ a = [0, 1, 2]
63
+ pa = pickle.dumps(a)
64
+ pmath = pickle.dumps(math) #XXX: FAILS in pickle
65
+ pmap = pickle.dumps(map)
66
+ # ...
67
+ la = pickle.loads(pa)
68
+ lmath = pickle.loads(pmath)
69
+ lmap = pickle.loads(pmap)
70
+ assert list(map(math.sin, a)) == list(lmap(lmath.sin, la))
71
+
72
+
73
+ def test_basic_class():
74
+ pbasic2 = pickle.dumps(basic2)
75
+ _pbasic2 = pickle.loads(pbasic2)()
76
+ pbasic = pickle.dumps(basic)
77
+ _pbasic = pickle.loads(pbasic)()
78
+
79
+
80
+ def test_c2adder():
81
+ pc2adder = pickle.dumps(c2adder)
82
+ pc2add5 = pickle.loads(pc2adder)(x)
83
+ assert pc2add5(y) == x+y
84
+
85
+
86
+ def test_pickled_cadder():
87
+ pcadder = pickle.dumps(cadder)
88
+ pcadd5 = pickle.loads(pcadder)(x)
89
+ assert pcadd5(y) == x+y
90
+
91
+
92
+ def test_raw_adder_and_inner():
93
+ add5 = adder(x)
94
+ assert add5(y) == x+y
95
+
96
+
97
+ def test_pickled_adder():
98
+ padder = pickle.dumps(adder)
99
+ padd5 = pickle.loads(padder)(x)
100
+ assert padd5(y) == x+y
101
+
102
+
103
+ def test_pickled_inner():
104
+ add5 = adder(x)
105
+ pinner = pickle.dumps(add5) #XXX: FAILS in pickle
106
+ p5add = pickle.loads(pinner)
107
+ assert p5add(y) == x+y
108
+
109
+
110
+ def test_moduledict_where_not_main():
111
+ try:
112
+ from . import test_moduledict
113
+ except ImportError:
114
+ import test_moduledict
115
+ name = 'test_moduledict.py'
116
+ if os.path.exists(name) and os.path.exists(name+'c'):
117
+ os.remove(name+'c')
118
+
119
+ if os.path.exists(name) and hasattr(test_moduledict, "__cached__") \
120
+ and os.path.exists(test_moduledict.__cached__):
121
+ os.remove(getattr(test_moduledict, "__cached__"))
122
+
123
+ if os.path.exists("__pycache__") and not os.listdir("__pycache__"):
124
+ os.removedirs("__pycache__")
125
+
126
+
127
+ if __name__ == '__main__':
128
+ test_basic()
129
+ test_basic_class()
130
+ test_c2adder()
131
+ test_pickled_cadder()
132
+ test_raw_adder_and_inner()
133
+ test_pickled_adder()
134
+ test_pickled_inner()
135
+ test_moduledict_where_not_main()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_objects.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ demonstrate dill's ability to pickle different python types
10
+ test pickling of all Python Standard Library objects (currently: CH 1-14 @ 2.7)
11
+ """
12
+
13
+ import dill as pickle
14
+ pickle.settings['recurse'] = True
15
+ #pickle.detect.trace(True)
16
+ #import pickle
17
+
18
+ # get all objects for testing
19
+ from dill import load_types, objects, extend
20
+ load_types(pickleable=True,unpickleable=False)
21
+
22
+ # uncomment the next two lines to test cloudpickle
23
+ #extend(False)
24
+ #import cloudpickle as pickle
25
+
26
+ # helper objects
27
+ class _class:
28
+ def _method(self):
29
+ pass
30
+
31
+ # objects that *fail* if imported
32
+ special = {}
33
+ special['LambdaType'] = _lambda = lambda x: lambda y: x
34
+ special['MethodType'] = _method = _class()._method
35
+ special['UnboundMethodType'] = _class._method
36
+ objects.update(special)
37
+
38
+ def pickles(name, exact=False, verbose=True):
39
+ """quick check if object pickles with dill"""
40
+ obj = objects[name]
41
+ try:
42
+ pik = pickle.loads(pickle.dumps(obj))
43
+ if exact:
44
+ try:
45
+ assert pik == obj
46
+ except AssertionError:
47
+ assert type(obj) == type(pik)
48
+ if verbose: print ("weak: %s %s" % (name, type(obj)))
49
+ else:
50
+ assert type(obj) == type(pik)
51
+ except Exception:
52
+ if verbose: print ("fails: %s %s" % (name, type(obj)))
53
+
54
+
55
+ def test_objects(verbose=True):
56
+ for member in objects.keys():
57
+ #pickles(member, exact=True, verbose=verbose)
58
+ pickles(member, exact=False, verbose=verbose)
59
+
60
+ if __name__ == '__main__':
61
+ import warnings
62
+ warnings.simplefilter('ignore')
63
+ test_objects(verbose=False)
llmeval-env/lib/python3.10/site-packages/dill/tests/test_properties.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import sys
10
+
11
+ import dill
12
+ dill.settings['recurse'] = True
13
+
14
+
15
+ class Foo(object):
16
+ def __init__(self):
17
+ self._data = 1
18
+
19
+ def _get_data(self):
20
+ return self._data
21
+
22
+ def _set_data(self, x):
23
+ self._data = x
24
+
25
+ data = property(_get_data, _set_data)
26
+
27
+
28
+ def test_data_not_none():
29
+ FooS = dill.copy(Foo)
30
+ assert FooS.data.fget is not None
31
+ assert FooS.data.fset is not None
32
+ assert FooS.data.fdel is None
33
+
34
+
35
+ def test_data_unchanged():
36
+ FooS = dill.copy(Foo)
37
+ try:
38
+ res = FooS().data
39
+ except Exception:
40
+ e = sys.exc_info()[1]
41
+ raise AssertionError(str(e))
42
+ else:
43
+ assert res == 1
44
+
45
+
46
+ def test_data_changed():
47
+ FooS = dill.copy(Foo)
48
+ try:
49
+ f = FooS()
50
+ f.data = 1024
51
+ res = f.data
52
+ except Exception:
53
+ e = sys.exc_info()[1]
54
+ raise AssertionError(str(e))
55
+ else:
56
+ assert res == 1024
57
+
58
+
59
+ if __name__ == '__main__':
60
+ test_data_not_none()
61
+ test_data_unchanged()
62
+ test_data_changed()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_registered.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+ """
8
+ test pickling registered objects
9
+ """
10
+
11
+ import dill
12
+ from dill._objects import failures, registered, succeeds
13
+ import warnings
14
+ warnings.filterwarnings('ignore')
15
+
16
+ def check(d, ok=True):
17
+ res = []
18
+ for k,v in d.items():
19
+ try:
20
+ z = dill.copy(v)
21
+ if ok: res.append(k)
22
+ except:
23
+ if not ok: res.append(k)
24
+ return res
25
+
26
+ fails = check(failures)
27
+ try:
28
+ assert not bool(fails)
29
+ except AssertionError as e:
30
+ print("FAILS: %s" % fails)
31
+ raise e from None
32
+
33
+ register = check(registered, ok=False)
34
+ try:
35
+ assert not bool(register)
36
+ except AssertionError as e:
37
+ print("REGISTER: %s" % register)
38
+ raise e from None
39
+
40
+ success = check(succeeds, ok=False)
41
+ try:
42
+ assert not bool(success)
43
+ except AssertionError as e:
44
+ print("SUCCESS: %s" % success)
45
+ raise e from None
46
+
47
+ import builtins
48
+ import types
49
+ q = dill._dill._reverse_typemap
50
+ p = {k:v for k,v in q.items() if k not in vars(builtins) and k not in vars(types)}
51
+
52
+ diff = set(p.keys()).difference(registered.keys())
53
+ try:
54
+ assert not bool(diff)
55
+ except AssertionError as e:
56
+ print("DIFF: %s" % diff)
57
+ raise e from None
58
+
59
+ miss = set(registered.keys()).difference(p.keys())
60
+ try:
61
+ assert not bool(miss)
62
+ except AssertionError as e:
63
+ print("MISS: %s" % miss)
64
+ raise e from None
llmeval-env/lib/python3.10/site-packages/dill/tests/test_restricted.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Kirill Makhonin (@kirillmakhonin)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ import dill
10
+
11
+ class RestrictedType:
12
+ def __bool__(*args, **kwargs):
13
+ raise Exception('Restricted function')
14
+
15
+ __eq__ = __lt__ = __le__ = __ne__ = __gt__ = __ge__ = __hash__ = __bool__
16
+
17
+ glob_obj = RestrictedType()
18
+
19
+ def restricted_func():
20
+ a = glob_obj
21
+
22
+ def test_function_with_restricted_object():
23
+ deserialized = dill.loads(dill.dumps(restricted_func, recurse=True))
24
+
25
+
26
+ if __name__ == '__main__':
27
+ test_function_with_restricted_object()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_selected.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+ """
9
+ testing some selected object types
10
+ """
11
+
12
+ import dill
13
+ dill.settings['recurse'] = True
14
+
15
+ verbose = False
16
+
17
+ def test_dict_contents():
18
+ c = type.__dict__
19
+ for i,j in c.items():
20
+ #try:
21
+ ok = dill.pickles(j)
22
+ #except Exception:
23
+ # print ("FAIL: %s with %s" % (i, dill.detect.errors(j)))
24
+ if verbose: print ("%s: %s, %s" % (ok, type(j), j))
25
+ assert ok
26
+ if verbose: print ("")
27
+
28
+ def _g(x): yield x;
29
+
30
+ def _f():
31
+ try: raise
32
+ except Exception:
33
+ from sys import exc_info
34
+ e, er, tb = exc_info()
35
+ return er, tb
36
+
37
+ class _d(object):
38
+ def _method(self):
39
+ pass
40
+
41
+ from dill import objects
42
+ from dill import load_types
43
+ load_types(pickleable=True,unpickleable=False)
44
+ _newclass = objects['ClassObjectType']
45
+ # some clean-up #FIXME: should happen internal to dill
46
+ objects['TemporaryFileType'].close()
47
+ objects['TextWrapperType'].close()
48
+ objects['BufferedRandomType'].close()
49
+ objects['BufferedReaderType'].close()
50
+ objects['BufferedWriterType'].close()
51
+ objects['FileType'].close()
52
+ del objects
53
+
54
+ # getset_descriptor for new-style classes (fails on '_method', if not __main__)
55
+ def test_class_descriptors():
56
+ d = _d.__dict__
57
+ for i in d.values():
58
+ ok = dill.pickles(i)
59
+ if verbose: print ("%s: %s, %s" % (ok, type(i), i))
60
+ assert ok
61
+ if verbose: print ("")
62
+ od = _newclass.__dict__
63
+ for i in od.values():
64
+ ok = dill.pickles(i)
65
+ if verbose: print ("%s: %s, %s" % (ok, type(i), i))
66
+ assert ok
67
+ if verbose: print ("")
68
+
69
+ # (__main__) class instance for new-style classes
70
+ def test_class():
71
+ o = _d()
72
+ oo = _newclass()
73
+ ok = dill.pickles(o)
74
+ if verbose: print ("%s: %s, %s" % (ok, type(o), o))
75
+ assert ok
76
+ ok = dill.pickles(oo)
77
+ if verbose: print ("%s: %s, %s" % (ok, type(oo), oo))
78
+ assert ok
79
+ if verbose: print ("")
80
+
81
+ # frames, generators, and tracebacks (all depend on frame)
82
+ def test_frame_related():
83
+ g = _g(1)
84
+ f = g.gi_frame
85
+ e,t = _f()
86
+ _is = lambda ok: ok
87
+ ok = dill.pickles(f)
88
+ if verbose: print ("%s: %s, %s" % (ok, type(f), f))
89
+ assert not ok
90
+ ok = dill.pickles(g)
91
+ if verbose: print ("%s: %s, %s" % (ok, type(g), g))
92
+ assert _is(not ok) #XXX: dill fails
93
+ ok = dill.pickles(t)
94
+ if verbose: print ("%s: %s, %s" % (ok, type(t), t))
95
+ assert not ok #XXX: dill fails
96
+ ok = dill.pickles(e)
97
+ if verbose: print ("%s: %s, %s" % (ok, type(e), e))
98
+ assert ok
99
+ if verbose: print ("")
100
+
101
+ def test_typing():
102
+ import typing
103
+ x = typing.Any
104
+ assert x == dill.copy(x)
105
+ x = typing.Dict[int, str]
106
+ assert x == dill.copy(x)
107
+ x = typing.List[int]
108
+ assert x == dill.copy(x)
109
+ x = typing.Tuple[int, str]
110
+ assert x == dill.copy(x)
111
+ x = typing.Tuple[int]
112
+ assert x == dill.copy(x)
113
+ x = typing.Tuple[()]
114
+ assert x == dill.copy(x)
115
+ x = typing.Tuple[()].copy_with(())
116
+ assert x == dill.copy(x)
117
+ return
118
+
119
+
120
+ if __name__ == '__main__':
121
+ test_frame_related()
122
+ test_dict_contents()
123
+ test_class()
124
+ test_class_descriptors()
125
+ test_typing()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_session.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Author: Leonardo Gama (@leogama)
4
+ # Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
5
+ # License: 3-clause BSD. The full license text is available at:
6
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
7
+
8
+ import atexit
9
+ import os
10
+ import sys
11
+ import __main__
12
+ from contextlib import suppress
13
+ from io import BytesIO
14
+
15
+ import dill
16
+
17
+ session_file = os.path.join(os.path.dirname(__file__), 'session-refimported-%s.pkl')
18
+
19
+ ###################
20
+ # Child process #
21
+ ###################
22
+
23
+ def _error_line(error, obj, refimported):
24
+ import traceback
25
+ line = traceback.format_exc().splitlines()[-2].replace('[obj]', '['+repr(obj)+']')
26
+ return "while testing (with refimported=%s): %s" % (refimported, line.lstrip())
27
+
28
+ if __name__ == '__main__' and len(sys.argv) >= 3 and sys.argv[1] == '--child':
29
+ # Test session loading in a fresh interpreter session.
30
+ refimported = (sys.argv[2] == 'True')
31
+ dill.load_module(session_file % refimported, module='__main__')
32
+
33
+ def test_modules(refimported):
34
+ # FIXME: In this test setting with CPython 3.7, 'calendar' is not included
35
+ # in sys.modules, independent of the value of refimported. Tried to
36
+ # run garbage collection just before loading the session with no luck. It
37
+ # fails even when preceding them with 'import calendar'. Needed to run
38
+ # these kinds of tests in a supbrocess. Failing test sample:
39
+ # assert globals()['day_name'] is sys.modules['calendar'].__dict__['day_name']
40
+ try:
41
+ for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
42
+ assert globals()[obj].__name__ in sys.modules
43
+ assert 'calendar' in sys.modules and 'cmath' in sys.modules
44
+ import calendar, cmath
45
+
46
+ for obj in ('Calendar', 'isleap'):
47
+ assert globals()[obj] is sys.modules['calendar'].__dict__[obj]
48
+ assert __main__.day_name.__module__ == 'calendar'
49
+ if refimported:
50
+ assert __main__.day_name is calendar.day_name
51
+
52
+ assert __main__.complex_log is cmath.log
53
+
54
+ except AssertionError as error:
55
+ error.args = (_error_line(error, obj, refimported),)
56
+ raise
57
+
58
+ test_modules(refimported)
59
+ sys.exit()
60
+
61
+ ####################
62
+ # Parent process #
63
+ ####################
64
+
65
+ # Create various kinds of objects to test different internal logics.
66
+
67
+ ## Modules.
68
+ import json # top-level module
69
+ import urllib as url # top-level module under alias
70
+ from xml import sax # submodule
71
+ import xml.dom.minidom as dom # submodule under alias
72
+ import test_dictviews as local_mod # non-builtin top-level module
73
+
74
+ ## Imported objects.
75
+ from calendar import Calendar, isleap, day_name # class, function, other object
76
+ from cmath import log as complex_log # imported with alias
77
+
78
+ ## Local objects.
79
+ x = 17
80
+ empty = None
81
+ names = ['Alice', 'Bob', 'Carol']
82
+ def squared(x): return x**2
83
+ cubed = lambda x: x**3
84
+ class Person:
85
+ def __init__(self, name, age):
86
+ self.name = name
87
+ self.age = age
88
+ person = Person(names[0], x)
89
+ class CalendarSubclass(Calendar):
90
+ def weekdays(self):
91
+ return [day_name[i] for i in self.iterweekdays()]
92
+ cal = CalendarSubclass()
93
+ selfref = __main__
94
+
95
+ # Setup global namespace for session saving tests.
96
+ class TestNamespace:
97
+ test_globals = globals().copy()
98
+ def __init__(self, **extra):
99
+ self.extra = extra
100
+ def __enter__(self):
101
+ self.backup = globals().copy()
102
+ globals().clear()
103
+ globals().update(self.test_globals)
104
+ globals().update(self.extra)
105
+ return self
106
+ def __exit__(self, *exc_info):
107
+ globals().clear()
108
+ globals().update(self.backup)
109
+
110
+ def _clean_up_cache(module):
111
+ cached = module.__file__.split('.', 1)[0] + '.pyc'
112
+ cached = module.__cached__ if hasattr(module, '__cached__') else cached
113
+ pycache = os.path.join(os.path.dirname(module.__file__), '__pycache__')
114
+ for remove, file in [(os.remove, cached), (os.removedirs, pycache)]:
115
+ with suppress(OSError):
116
+ remove(file)
117
+
118
+ atexit.register(_clean_up_cache, local_mod)
119
+
120
+ def _test_objects(main, globals_copy, refimported):
121
+ try:
122
+ main_dict = __main__.__dict__
123
+ global Person, person, Calendar, CalendarSubclass, cal, selfref
124
+
125
+ for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
126
+ assert globals()[obj].__name__ == globals_copy[obj].__name__
127
+
128
+ for obj in ('x', 'empty', 'names'):
129
+ assert main_dict[obj] == globals_copy[obj]
130
+
131
+ for obj in ['squared', 'cubed']:
132
+ assert main_dict[obj].__globals__ is main_dict
133
+ assert main_dict[obj](3) == globals_copy[obj](3)
134
+
135
+ assert Person.__module__ == __main__.__name__
136
+ assert isinstance(person, Person)
137
+ assert person.age == globals_copy['person'].age
138
+
139
+ assert issubclass(CalendarSubclass, Calendar)
140
+ assert isinstance(cal, CalendarSubclass)
141
+ assert cal.weekdays() == globals_copy['cal'].weekdays()
142
+
143
+ assert selfref is __main__
144
+
145
+ except AssertionError as error:
146
+ error.args = (_error_line(error, obj, refimported),)
147
+ raise
148
+
149
+ def test_session_main(refimported):
150
+ """test dump/load_module() for __main__, both in this process and in a subprocess"""
151
+ extra_objects = {}
152
+ if refimported:
153
+ # Test unpickleable imported object in main.
154
+ from sys import flags
155
+ extra_objects['flags'] = flags
156
+
157
+ with TestNamespace(**extra_objects) as ns:
158
+ try:
159
+ # Test session loading in a new session.
160
+ dill.dump_module(session_file % refimported, refimported=refimported)
161
+ from dill.tests.__main__ import python, shell, sp
162
+ error = sp.call([python, __file__, '--child', str(refimported)], shell=shell)
163
+ if error: sys.exit(error)
164
+ finally:
165
+ with suppress(OSError):
166
+ os.remove(session_file % refimported)
167
+
168
+ # Test session loading in the same session.
169
+ session_buffer = BytesIO()
170
+ dill.dump_module(session_buffer, refimported=refimported)
171
+ session_buffer.seek(0)
172
+ dill.load_module(session_buffer, module='__main__')
173
+ ns.backup['_test_objects'](__main__, ns.backup, refimported)
174
+
175
+ def test_session_other():
176
+ """test dump/load_module() for a module other than __main__"""
177
+ import test_classdef as module
178
+ atexit.register(_clean_up_cache, module)
179
+ module.selfref = module
180
+ dict_objects = [obj for obj in module.__dict__.keys() if not obj.startswith('__')]
181
+
182
+ session_buffer = BytesIO()
183
+ dill.dump_module(session_buffer, module)
184
+
185
+ for obj in dict_objects:
186
+ del module.__dict__[obj]
187
+
188
+ session_buffer.seek(0)
189
+ dill.load_module(session_buffer, module)
190
+
191
+ assert all(obj in module.__dict__ for obj in dict_objects)
192
+ assert module.selfref is module
193
+
194
+ def test_runtime_module():
195
+ from types import ModuleType
196
+ modname = '__runtime__'
197
+ runtime = ModuleType(modname)
198
+ runtime.x = 42
199
+
200
+ mod = dill.session._stash_modules(runtime)
201
+ if mod is not runtime:
202
+ print("There are objects to save by referenece that shouldn't be:",
203
+ mod.__dill_imported, mod.__dill_imported_as, mod.__dill_imported_top_level,
204
+ file=sys.stderr)
205
+
206
+ # This is also for code coverage, tests the use case of dump_module(refimported=True)
207
+ # without imported objects in the namespace. It's a contrived example because
208
+ # even dill can't be in it. This should work after fixing #462.
209
+ session_buffer = BytesIO()
210
+ dill.dump_module(session_buffer, module=runtime, refimported=True)
211
+ session_dump = session_buffer.getvalue()
212
+
213
+ # Pass a new runtime created module with the same name.
214
+ runtime = ModuleType(modname) # empty
215
+ return_val = dill.load_module(BytesIO(session_dump), module=runtime)
216
+ assert return_val is None
217
+ assert runtime.__name__ == modname
218
+ assert runtime.x == 42
219
+ assert runtime not in sys.modules.values()
220
+
221
+ # Pass nothing as main. load_module() must create it.
222
+ session_buffer.seek(0)
223
+ runtime = dill.load_module(BytesIO(session_dump))
224
+ assert runtime.__name__ == modname
225
+ assert runtime.x == 42
226
+ assert runtime not in sys.modules.values()
227
+
228
+ def test_refimported_imported_as():
229
+ import collections
230
+ import concurrent.futures
231
+ import types
232
+ import typing
233
+ mod = sys.modules['__test__'] = types.ModuleType('__test__')
234
+ dill.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
235
+ mod.Dict = collections.UserDict # select by type
236
+ mod.AsyncCM = typing.AsyncContextManager # select by __module__
237
+ mod.thread_exec = dill.executor # select by __module__ with regex
238
+
239
+ session_buffer = BytesIO()
240
+ dill.dump_module(session_buffer, mod, refimported=True)
241
+ session_buffer.seek(0)
242
+ mod = dill.load(session_buffer)
243
+ del sys.modules['__test__']
244
+
245
+ assert set(mod.__dill_imported_as) == {
246
+ ('collections', 'UserDict', 'Dict'),
247
+ ('typing', 'AsyncContextManager', 'AsyncCM'),
248
+ ('dill', 'executor', 'thread_exec'),
249
+ }
250
+
251
+ def test_load_module_asdict():
252
+ with TestNamespace():
253
+ session_buffer = BytesIO()
254
+ dill.dump_module(session_buffer)
255
+
256
+ global empty, names, x, y
257
+ x = y = 0 # change x and create y
258
+ del empty
259
+ globals_state = globals().copy()
260
+
261
+ session_buffer.seek(0)
262
+ main_vars = dill.load_module_asdict(session_buffer)
263
+
264
+ assert main_vars is not globals()
265
+ assert globals() == globals_state
266
+
267
+ assert main_vars['__name__'] == '__main__'
268
+ assert main_vars['names'] == names
269
+ assert main_vars['names'] is not names
270
+ assert main_vars['x'] != x
271
+ assert 'y' not in main_vars
272
+ assert 'empty' in main_vars
273
+
274
+ if __name__ == '__main__':
275
+ test_session_main(refimported=False)
276
+ test_session_main(refimported=True)
277
+ test_session_other()
278
+ test_runtime_module()
279
+ test_refimported_imported_as()
280
+ test_load_module_asdict()
llmeval-env/lib/python3.10/site-packages/dill/tests/test_source.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4
+ # Copyright (c) 2008-2016 California Institute of Technology.
5
+ # Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6
+ # License: 3-clause BSD. The full license text is available at:
7
+ # - https://github.com/uqfoundation/dill/blob/master/LICENSE
8
+
9
+ from dill.source import getsource, getname, _wrap, likely_import
10
+ from dill.source import getimportable
11
+ from dill._dill import IS_PYPY
12
+
13
+ import sys
14
+ PY310b = 0x30a00b1
15
+
16
+ f = lambda x: x**2
17
+ def g(x): return f(x) - x
18
+
19
+ def h(x):
20
+ def g(x): return x
21
+ return g(x) - x
22
+
23
+ class Foo(object):
24
+ def bar(self, x):
25
+ return x*x+x
26
+ _foo = Foo()
27
+
28
+ def add(x,y):
29
+ return x+y
30
+
31
+ # yes, same as 'f', but things are tricky when it comes to pointers
32
+ squared = lambda x:x**2
33
+
34
+ class Bar:
35
+ pass
36
+ _bar = Bar()
37
+
38
+ # inspect.getsourcelines # dill.source.getblocks
39
+ def test_getsource():
40
+ assert getsource(f) == 'f = lambda x: x**2\n'
41
+ assert getsource(g) == 'def g(x): return f(x) - x\n'
42
+ assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n'
43
+ assert getname(f) == 'f'
44
+ assert getname(g) == 'g'
45
+ assert getname(h) == 'h'
46
+ assert _wrap(f)(4) == 16
47
+ assert _wrap(g)(4) == 12
48
+ assert _wrap(h)(4) == 0
49
+
50
+ assert getname(Foo) == 'Foo'
51
+ assert getname(Bar) == 'Bar'
52
+ assert getsource(Bar) == 'class Bar:\n pass\n'
53
+ assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
54
+ #XXX: add getsource for _foo, _bar
55
+
56
+ # test itself
57
+ def test_itself():
58
+ assert likely_import(likely_import)=='from dill.source import likely_import\n'
59
+
60
+ # builtin functions and objects
61
+ def test_builtin():
62
+ assert likely_import(pow) == 'pow\n'
63
+ assert likely_import(100) == '100\n'
64
+ assert likely_import(True) == 'True\n'
65
+ assert likely_import(pow, explicit=True) == 'from builtins import pow\n'
66
+ assert likely_import(100, explicit=True) == '100\n'
67
+ assert likely_import(True, explicit=True) == 'True\n'
68
+ # this is kinda BS... you can't import a None
69
+ assert likely_import(None) == 'None\n'
70
+ assert likely_import(None, explicit=True) == 'None\n'
71
+
72
+
73
+ # other imported functions
74
+ def test_imported():
75
+ from math import sin
76
+ assert likely_import(sin) == 'from math import sin\n'
77
+
78
+ # interactively defined functions
79
+ def test_dynamic():
80
+ assert likely_import(add) == 'from %s import add\n' % __name__
81
+ # interactive lambdas
82
+ assert likely_import(squared) == 'from %s import squared\n' % __name__
83
+
84
+ # classes and class instances
85
+ def test_classes():
86
+ from io import BytesIO as StringIO
87
+ y = "from _io import BytesIO\n"
88
+ x = y if (IS_PYPY or sys.hexversion >= PY310b) else "from io import BytesIO\n"
89
+ s = StringIO()
90
+
91
+ assert likely_import(StringIO) == x
92
+ assert likely_import(s) == y
93
+ # interactively defined classes and class instances
94
+ assert likely_import(Foo) == 'from %s import Foo\n' % __name__
95
+ assert likely_import(_foo) == 'from %s import Foo\n' % __name__
96
+
97
+
98
+ # test getimportable
99
+ def test_importable():
100
+ assert getimportable(add) == 'from %s import add\n' % __name__
101
+ assert getimportable(squared) == 'from %s import squared\n' % __name__
102
+ assert getimportable(Foo) == 'from %s import Foo\n' % __name__
103
+ assert getimportable(Foo.bar) == 'from %s import bar\n' % __name__
104
+ assert getimportable(_foo.bar) == 'from %s import bar\n' % __name__
105
+ assert getimportable(None) == 'None\n'
106
+ assert getimportable(100) == '100\n'
107
+
108
+ assert getimportable(add, byname=False) == 'def add(x,y):\n return x+y\n'
109
+ assert getimportable(squared, byname=False) == 'squared = lambda x:x**2\n'
110
+ assert getimportable(None, byname=False) == 'None\n'
111
+ assert getimportable(Bar, byname=False) == 'class Bar:\n pass\n'
112
+ assert getimportable(Foo, byname=False) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
113
+ assert getimportable(Foo.bar, byname=False) == 'def bar(self, x):\n return x*x+x\n'
114
+ assert getimportable(Foo.bar, byname=True) == 'from %s import bar\n' % __name__
115
+ assert getimportable(Foo.bar, alias='memo', byname=True) == 'from %s import bar as memo\n' % __name__
116
+ assert getimportable(Foo, alias='memo', byname=True) == 'from %s import Foo as memo\n' % __name__
117
+ assert getimportable(squared, alias='memo', byname=True) == 'from %s import squared as memo\n' % __name__
118
+ assert getimportable(squared, alias='memo', byname=False) == 'memo = squared = lambda x:x**2\n'
119
+ assert getimportable(add, alias='memo', byname=False) == 'def add(x,y):\n return x+y\n\nmemo = add\n'
120
+ assert getimportable(None, alias='memo', byname=False) == 'memo = None\n'
121
+ assert getimportable(100, alias='memo', byname=False) == 'memo = 100\n'
122
+ assert getimportable(add, explicit=True) == 'from %s import add\n' % __name__
123
+ assert getimportable(squared, explicit=True) == 'from %s import squared\n' % __name__
124
+ assert getimportable(Foo, explicit=True) == 'from %s import Foo\n' % __name__
125
+ assert getimportable(Foo.bar, explicit=True) == 'from %s import bar\n' % __name__
126
+ assert getimportable(_foo.bar, explicit=True) == 'from %s import bar\n' % __name__
127
+ assert getimportable(None, explicit=True) == 'None\n'
128
+ assert getimportable(100, explicit=True) == '100\n'
129
+
130
+
131
+ def test_numpy():
132
+ try:
133
+ from numpy import array
134
+ x = array([1,2,3])
135
+ assert getimportable(x) == 'from numpy import array\narray([1, 2, 3])\n'
136
+ assert getimportable(array) == 'from %s import array\n' % array.__module__
137
+ assert getimportable(x, byname=False) == 'from numpy import array\narray([1, 2, 3])\n'
138
+ assert getimportable(array, byname=False) == 'from %s import array\n' % array.__module__
139
+ except ImportError: pass
140
+
141
+ #NOTE: if before likely_import(pow), will cause pow to throw AssertionError
142
+ def test_foo():
143
+ assert getimportable(_foo, byname=False).startswith("import dill\nclass Foo(object):\n def bar(self, x):\n return x*x+x\ndill.loads(")
144
+
145
+ if __name__ == '__main__':
146
+ test_getsource()
147
+ test_itself()
148
+ test_builtin()
149
+ test_imported()
150
+ test_dynamic()
151
+ test_classes()
152
+ test_importable()
153
+ test_numpy()
154
+ test_foo()
llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/WHEEL ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.42.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp310-cp310-manylinux_2_5_x86_64
5
+ Tag: cp310-cp310-manylinux1_x86_64
6
+ Tag: cp310-cp310-manylinux_2_17_x86_64
7
+ Tag: cp310-cp310-manylinux2014_x86_64
8
+
llmeval-env/lib/python3.10/site-packages/frozenlist-1.4.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ frozenlist
llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (172 kB). View file
 
llmeval-env/lib/python3.10/site-packages/multiprocess/tests/__pycache__/mp_fork_bomb.cpython-310.pyc ADDED
Binary file (538 Bytes). View file