Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/dill/__diff.py +234 -0
- env-llmeval/lib/python3.10/site-packages/dill/__info__.py +291 -0
- env-llmeval/lib/python3.10/site-packages/dill/__init__.py +119 -0
- env-llmeval/lib/python3.10/site-packages/dill/_dill.py +2198 -0
- env-llmeval/lib/python3.10/site-packages/dill/_objects.py +537 -0
- env-llmeval/lib/python3.10/site-packages/dill/_shims.py +193 -0
- env-llmeval/lib/python3.10/site-packages/dill/detect.py +284 -0
- env-llmeval/lib/python3.10/site-packages/dill/logger.py +285 -0
- env-llmeval/lib/python3.10/site-packages/dill/objtypes.py +24 -0
- env-llmeval/lib/python3.10/site-packages/dill/pointers.py +122 -0
- env-llmeval/lib/python3.10/site-packages/dill/session.py +613 -0
- env-llmeval/lib/python3.10/site-packages/dill/settings.py +25 -0
- env-llmeval/lib/python3.10/site-packages/dill/source.py +1017 -0
- env-llmeval/lib/python3.10/site-packages/dill/temp.py +252 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__main__.py +35 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_abc.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_extendpickle.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_module.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_selected.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_abc.py +169 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_check.py +62 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_classdef.py +340 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_detect.py +159 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_dictviews.py +39 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_extendpickle.py +53 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_fglobals.py +55 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_functions.py +141 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_functors.py +39 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_logger.py +70 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_mixins.py +121 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_moduledict.py +54 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_nested.py +135 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_pycapsule.py +45 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_registered.py +64 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_restricted.py +27 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_selected.py +125 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_session.py +280 -0
- env-llmeval/lib/python3.10/site-packages/dill/tests/test_source.py +154 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_api.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_soft.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_unix.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_windows.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/INSTALLER +1 -0
- env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/License.txt +1568 -0
- env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/METADATA +35 -0
- env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/RECORD +20 -0
env-llmeval/lib/python3.10/site-packages/dill/__diff.py
ADDED
@@ -0,0 +1,234 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
"""
|
10 |
+
Module to show if an object has changed since it was memorised
|
11 |
+
"""
|
12 |
+
|
13 |
+
import builtins
|
14 |
+
import os
|
15 |
+
import sys
|
16 |
+
import types
|
17 |
+
try:
|
18 |
+
import numpy
|
19 |
+
HAS_NUMPY = True
|
20 |
+
except ImportError:
|
21 |
+
HAS_NUMPY = False
|
22 |
+
|
23 |
+
# pypy doesn't use reference counting
|
24 |
+
getrefcount = getattr(sys, 'getrefcount', lambda x:0)
|
25 |
+
|
26 |
+
# memo of objects indexed by id to a tuple (attributes, sequence items)
|
27 |
+
# attributes is a dict indexed by attribute name to attribute id
|
28 |
+
# sequence items is either a list of ids, of a dictionary of keys to ids
|
29 |
+
memo = {}
|
30 |
+
id_to_obj = {}
|
31 |
+
# types that cannot have changing attributes
|
32 |
+
builtins_types = set((str, list, dict, set, frozenset, int))
|
33 |
+
dont_memo = set(id(i) for i in (memo, sys.modules, sys.path_importer_cache,
|
34 |
+
os.environ, id_to_obj))
|
35 |
+
|
36 |
+
|
37 |
+
def get_attrs(obj):
|
38 |
+
"""
|
39 |
+
Gets all the attributes of an object though its __dict__ or return None
|
40 |
+
"""
|
41 |
+
if type(obj) in builtins_types \
|
42 |
+
or type(obj) is type and obj in builtins_types:
|
43 |
+
return
|
44 |
+
return getattr(obj, '__dict__', None)
|
45 |
+
|
46 |
+
|
47 |
+
def get_seq(obj, cache={str: False, frozenset: False, list: True, set: True,
|
48 |
+
dict: True, tuple: True, type: False,
|
49 |
+
types.ModuleType: False, types.FunctionType: False,
|
50 |
+
types.BuiltinFunctionType: False}):
|
51 |
+
"""
|
52 |
+
Gets all the items in a sequence or return None
|
53 |
+
"""
|
54 |
+
try:
|
55 |
+
o_type = obj.__class__
|
56 |
+
except AttributeError:
|
57 |
+
o_type = type(obj)
|
58 |
+
hsattr = hasattr
|
59 |
+
if o_type in cache:
|
60 |
+
if cache[o_type]:
|
61 |
+
if hsattr(obj, "copy"):
|
62 |
+
return obj.copy()
|
63 |
+
return obj
|
64 |
+
elif HAS_NUMPY and o_type in (numpy.ndarray, numpy.ma.core.MaskedConstant):
|
65 |
+
if obj.shape and obj.size:
|
66 |
+
return obj
|
67 |
+
else:
|
68 |
+
return []
|
69 |
+
elif hsattr(obj, "__contains__") and hsattr(obj, "__iter__") \
|
70 |
+
and hsattr(obj, "__len__") and hsattr(o_type, "__contains__") \
|
71 |
+
and hsattr(o_type, "__iter__") and hsattr(o_type, "__len__"):
|
72 |
+
cache[o_type] = True
|
73 |
+
if hsattr(obj, "copy"):
|
74 |
+
return obj.copy()
|
75 |
+
return obj
|
76 |
+
else:
|
77 |
+
cache[o_type] = False
|
78 |
+
return None
|
79 |
+
|
80 |
+
|
81 |
+
def memorise(obj, force=False):
|
82 |
+
"""
|
83 |
+
Adds an object to the memo, and recursively adds all the objects
|
84 |
+
attributes, and if it is a container, its items. Use force=True to update
|
85 |
+
an object already in the memo. Updating is not recursively done.
|
86 |
+
"""
|
87 |
+
obj_id = id(obj)
|
88 |
+
if obj_id in memo and not force or obj_id in dont_memo:
|
89 |
+
return
|
90 |
+
id_ = id
|
91 |
+
g = get_attrs(obj)
|
92 |
+
if g is None:
|
93 |
+
attrs_id = None
|
94 |
+
else:
|
95 |
+
attrs_id = dict((key,id_(value)) for key, value in g.items())
|
96 |
+
|
97 |
+
s = get_seq(obj)
|
98 |
+
if s is None:
|
99 |
+
seq_id = None
|
100 |
+
elif hasattr(s, "items"):
|
101 |
+
seq_id = dict((id_(key),id_(value)) for key, value in s.items())
|
102 |
+
elif not hasattr(s, "__len__"): #XXX: avoid TypeError from unexpected case
|
103 |
+
seq_id = None
|
104 |
+
else:
|
105 |
+
seq_id = [id_(i) for i in s]
|
106 |
+
|
107 |
+
memo[obj_id] = attrs_id, seq_id
|
108 |
+
id_to_obj[obj_id] = obj
|
109 |
+
mem = memorise
|
110 |
+
if g is not None:
|
111 |
+
[mem(value) for key, value in g.items()]
|
112 |
+
|
113 |
+
if s is not None:
|
114 |
+
if hasattr(s, "items"):
|
115 |
+
[(mem(key), mem(item))
|
116 |
+
for key, item in s.items()]
|
117 |
+
else:
|
118 |
+
if hasattr(s, '__len__'):
|
119 |
+
[mem(item) for item in s]
|
120 |
+
else: mem(s)
|
121 |
+
|
122 |
+
|
123 |
+
def release_gone():
|
124 |
+
itop, mp, src = id_to_obj.pop, memo.pop, getrefcount
|
125 |
+
[(itop(id_), mp(id_)) for id_, obj in list(id_to_obj.items())
|
126 |
+
if src(obj) < 4] #XXX: correct for pypy?
|
127 |
+
|
128 |
+
|
129 |
+
def whats_changed(obj, seen=None, simple=False, first=True):
|
130 |
+
"""
|
131 |
+
Check an object against the memo. Returns a list in the form
|
132 |
+
(attribute changes, container changed). Attribute changes is a dict of
|
133 |
+
attribute name to attribute value. container changed is a boolean.
|
134 |
+
If simple is true, just returns a boolean. None for either item means
|
135 |
+
that it has not been checked yet
|
136 |
+
"""
|
137 |
+
# Special cases
|
138 |
+
if first:
|
139 |
+
# ignore the _ variable, which only appears in interactive sessions
|
140 |
+
if "_" in builtins.__dict__:
|
141 |
+
del builtins._
|
142 |
+
if seen is None:
|
143 |
+
seen = {}
|
144 |
+
|
145 |
+
obj_id = id(obj)
|
146 |
+
|
147 |
+
if obj_id in seen:
|
148 |
+
if simple:
|
149 |
+
return any(seen[obj_id])
|
150 |
+
return seen[obj_id]
|
151 |
+
|
152 |
+
# Safety checks
|
153 |
+
if obj_id in dont_memo:
|
154 |
+
seen[obj_id] = [{}, False]
|
155 |
+
if simple:
|
156 |
+
return False
|
157 |
+
return seen[obj_id]
|
158 |
+
elif obj_id not in memo:
|
159 |
+
if simple:
|
160 |
+
return True
|
161 |
+
else:
|
162 |
+
raise RuntimeError("Object not memorised " + str(obj))
|
163 |
+
|
164 |
+
seen[obj_id] = ({}, False)
|
165 |
+
|
166 |
+
chngd = whats_changed
|
167 |
+
id_ = id
|
168 |
+
|
169 |
+
# compare attributes
|
170 |
+
attrs = get_attrs(obj)
|
171 |
+
if attrs is None:
|
172 |
+
changed = {}
|
173 |
+
else:
|
174 |
+
obj_attrs = memo[obj_id][0]
|
175 |
+
obj_get = obj_attrs.get
|
176 |
+
changed = dict((key,None) for key in obj_attrs if key not in attrs)
|
177 |
+
for key, o in attrs.items():
|
178 |
+
if id_(o) != obj_get(key, None) or chngd(o, seen, True, False):
|
179 |
+
changed[key] = o
|
180 |
+
|
181 |
+
# compare sequence
|
182 |
+
items = get_seq(obj)
|
183 |
+
seq_diff = False
|
184 |
+
if (items is not None) and (hasattr(items, '__len__')):
|
185 |
+
obj_seq = memo[obj_id][1]
|
186 |
+
if (len(items) != len(obj_seq)):
|
187 |
+
seq_diff = True
|
188 |
+
elif hasattr(obj, "items"): # dict type obj
|
189 |
+
obj_get = obj_seq.get
|
190 |
+
for key, item in items.items():
|
191 |
+
if id_(item) != obj_get(id_(key)) \
|
192 |
+
or chngd(key, seen, True, False) \
|
193 |
+
or chngd(item, seen, True, False):
|
194 |
+
seq_diff = True
|
195 |
+
break
|
196 |
+
else:
|
197 |
+
for i, j in zip(items, obj_seq): # list type obj
|
198 |
+
if id_(i) != j or chngd(i, seen, True, False):
|
199 |
+
seq_diff = True
|
200 |
+
break
|
201 |
+
seen[obj_id] = changed, seq_diff
|
202 |
+
if simple:
|
203 |
+
return changed or seq_diff
|
204 |
+
return changed, seq_diff
|
205 |
+
|
206 |
+
|
207 |
+
def has_changed(*args, **kwds):
|
208 |
+
kwds['simple'] = True # ignore simple if passed in
|
209 |
+
return whats_changed(*args, **kwds)
|
210 |
+
|
211 |
+
__import__ = __import__
|
212 |
+
|
213 |
+
|
214 |
+
def _imp(*args, **kwds):
|
215 |
+
"""
|
216 |
+
Replaces the default __import__, to allow a module to be memorised
|
217 |
+
before the user can change it
|
218 |
+
"""
|
219 |
+
before = set(sys.modules.keys())
|
220 |
+
mod = __import__(*args, **kwds)
|
221 |
+
after = set(sys.modules.keys()).difference(before)
|
222 |
+
for m in after:
|
223 |
+
memorise(sys.modules[m])
|
224 |
+
return mod
|
225 |
+
|
226 |
+
builtins.__import__ = _imp
|
227 |
+
if hasattr(builtins, "_"):
|
228 |
+
del builtins._
|
229 |
+
|
230 |
+
# memorise all already imported modules. This implies that this must be
|
231 |
+
# imported first for any changes to be recorded
|
232 |
+
for mod in list(sys.modules.values()):
|
233 |
+
memorise(mod)
|
234 |
+
release_gone()
|
env-llmeval/lib/python3.10/site-packages/dill/__info__.py
ADDED
@@ -0,0 +1,291 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
'''
|
8 |
+
-----------------------------
|
9 |
+
dill: serialize all of Python
|
10 |
+
-----------------------------
|
11 |
+
|
12 |
+
About Dill
|
13 |
+
==========
|
14 |
+
|
15 |
+
``dill`` extends Python's ``pickle`` module for serializing and de-serializing
|
16 |
+
Python objects to the majority of the built-in Python types. Serialization
|
17 |
+
is the process of converting an object to a byte stream, and the inverse
|
18 |
+
of which is converting a byte stream back to a Python object hierarchy.
|
19 |
+
|
20 |
+
``dill`` provides the user the same interface as the ``pickle`` module, and
|
21 |
+
also includes some additional features. In addition to pickling Python
|
22 |
+
objects, ``dill`` provides the ability to save the state of an interpreter
|
23 |
+
session in a single command. Hence, it would be feasible to save an
|
24 |
+
interpreter session, close the interpreter, ship the pickled file to
|
25 |
+
another computer, open a new interpreter, unpickle the session and
|
26 |
+
thus continue from the 'saved' state of the original interpreter
|
27 |
+
session.
|
28 |
+
|
29 |
+
``dill`` can be used to store Python objects to a file, but the primary
|
30 |
+
usage is to send Python objects across the network as a byte stream.
|
31 |
+
``dill`` is quite flexible, and allows arbitrary user defined classes
|
32 |
+
and functions to be serialized. Thus ``dill`` is not intended to be
|
33 |
+
secure against erroneously or maliciously constructed data. It is
|
34 |
+
left to the user to decide whether the data they unpickle is from
|
35 |
+
a trustworthy source.
|
36 |
+
|
37 |
+
``dill`` is part of ``pathos``, a Python framework for heterogeneous computing.
|
38 |
+
``dill`` is in active development, so any user feedback, bug reports, comments,
|
39 |
+
or suggestions are highly appreciated. A list of issues is located at
|
40 |
+
https://github.com/uqfoundation/dill/issues, with a legacy list maintained at
|
41 |
+
https://uqfoundation.github.io/project/pathos/query.
|
42 |
+
|
43 |
+
|
44 |
+
Major Features
|
45 |
+
==============
|
46 |
+
|
47 |
+
``dill`` can pickle the following standard types:
|
48 |
+
|
49 |
+
- none, type, bool, int, float, complex, bytes, str,
|
50 |
+
- tuple, list, dict, file, buffer, builtin,
|
51 |
+
- Python classes, namedtuples, dataclasses, metaclasses,
|
52 |
+
- instances of classes,
|
53 |
+
- set, frozenset, array, functions, exceptions
|
54 |
+
|
55 |
+
``dill`` can also pickle more 'exotic' standard types:
|
56 |
+
|
57 |
+
- functions with yields, nested functions, lambdas,
|
58 |
+
- cell, method, unboundmethod, module, code, methodwrapper,
|
59 |
+
- methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor,
|
60 |
+
- dictproxy, slice, notimplemented, ellipsis, quit
|
61 |
+
|
62 |
+
``dill`` cannot yet pickle these standard types:
|
63 |
+
|
64 |
+
- frame, generator, traceback
|
65 |
+
|
66 |
+
``dill`` also provides the capability to:
|
67 |
+
|
68 |
+
- save and load Python interpreter sessions
|
69 |
+
- save and extract the source code from functions and classes
|
70 |
+
- interactively diagnose pickling errors
|
71 |
+
|
72 |
+
|
73 |
+
Current Release
|
74 |
+
===============
|
75 |
+
|
76 |
+
The latest released version of ``dill`` is available from:
|
77 |
+
|
78 |
+
https://pypi.org/project/dill
|
79 |
+
|
80 |
+
``dill`` is distributed under a 3-clause BSD license.
|
81 |
+
|
82 |
+
|
83 |
+
Development Version
|
84 |
+
===================
|
85 |
+
|
86 |
+
You can get the latest development version with all the shiny new features at:
|
87 |
+
|
88 |
+
https://github.com/uqfoundation
|
89 |
+
|
90 |
+
If you have a new contribution, please submit a pull request.
|
91 |
+
|
92 |
+
|
93 |
+
Installation
|
94 |
+
============
|
95 |
+
|
96 |
+
``dill`` can be installed with ``pip``::
|
97 |
+
|
98 |
+
$ pip install dill
|
99 |
+
|
100 |
+
To optionally include the ``objgraph`` diagnostic tool in the install::
|
101 |
+
|
102 |
+
$ pip install dill[graph]
|
103 |
+
|
104 |
+
To optionally include the ``gprof2dot`` diagnostic tool in the install::
|
105 |
+
|
106 |
+
$ pip install dill[profile]
|
107 |
+
|
108 |
+
For windows users, to optionally install session history tools::
|
109 |
+
|
110 |
+
$ pip install dill[readline]
|
111 |
+
|
112 |
+
|
113 |
+
Requirements
|
114 |
+
============
|
115 |
+
|
116 |
+
``dill`` requires:
|
117 |
+
|
118 |
+
- ``python`` (or ``pypy``), **>=3.8**
|
119 |
+
- ``setuptools``, **>=42**
|
120 |
+
|
121 |
+
Optional requirements:
|
122 |
+
|
123 |
+
- ``objgraph``, **>=1.7.2**
|
124 |
+
- ``gprof2dot``, **>=2022.7.29**
|
125 |
+
- ``pyreadline``, **>=1.7.1** (on windows)
|
126 |
+
|
127 |
+
|
128 |
+
Basic Usage
|
129 |
+
===========
|
130 |
+
|
131 |
+
``dill`` is a drop-in replacement for ``pickle``. Existing code can be
|
132 |
+
updated to allow complete pickling using::
|
133 |
+
|
134 |
+
>>> import dill as pickle
|
135 |
+
|
136 |
+
or::
|
137 |
+
|
138 |
+
>>> from dill import dumps, loads
|
139 |
+
|
140 |
+
``dumps`` converts the object to a unique byte string, and ``loads`` performs
|
141 |
+
the inverse operation::
|
142 |
+
|
143 |
+
>>> squared = lambda x: x**2
|
144 |
+
>>> loads(dumps(squared))(3)
|
145 |
+
9
|
146 |
+
|
147 |
+
There are a number of options to control serialization which are provided
|
148 |
+
as keyword arguments to several ``dill`` functions:
|
149 |
+
|
150 |
+
* with *protocol*, the pickle protocol level can be set. This uses the
|
151 |
+
same value as the ``pickle`` module, *DEFAULT_PROTOCOL*.
|
152 |
+
* with *byref=True*, ``dill`` to behave a lot more like pickle with
|
153 |
+
certain objects (like modules) pickled by reference as opposed to
|
154 |
+
attempting to pickle the object itself.
|
155 |
+
* with *recurse=True*, objects referred to in the global dictionary are
|
156 |
+
recursively traced and pickled, instead of the default behavior of
|
157 |
+
attempting to store the entire global dictionary.
|
158 |
+
* with *fmode*, the contents of the file can be pickled along with the file
|
159 |
+
handle, which is useful if the object is being sent over the wire to a
|
160 |
+
remote system which does not have the original file on disk. Options are
|
161 |
+
*HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content
|
162 |
+
and *FILE_FMODE* for content and handle.
|
163 |
+
* with *ignore=False*, objects reconstructed with types defined in the
|
164 |
+
top-level script environment use the existing type in the environment
|
165 |
+
rather than a possibly different reconstructed type.
|
166 |
+
|
167 |
+
The default serialization can also be set globally in *dill.settings*.
|
168 |
+
Thus, we can modify how ``dill`` handles references to the global dictionary
|
169 |
+
locally or globally::
|
170 |
+
|
171 |
+
>>> import dill.settings
|
172 |
+
>>> dumps(absolute) == dumps(absolute, recurse=True)
|
173 |
+
False
|
174 |
+
>>> dill.settings['recurse'] = True
|
175 |
+
>>> dumps(absolute) == dumps(absolute, recurse=True)
|
176 |
+
True
|
177 |
+
|
178 |
+
``dill`` also includes source code inspection, as an alternate to pickling::
|
179 |
+
|
180 |
+
>>> import dill.source
|
181 |
+
>>> print(dill.source.getsource(squared))
|
182 |
+
squared = lambda x:x**2
|
183 |
+
|
184 |
+
To aid in debugging pickling issues, use *dill.detect* which provides
|
185 |
+
tools like pickle tracing::
|
186 |
+
|
187 |
+
>>> import dill.detect
|
188 |
+
>>> with dill.detect.trace():
|
189 |
+
>>> dumps(squared)
|
190 |
+
┬ F1: <function <lambda> at 0x7fe074f8c280>
|
191 |
+
├┬ F2: <function _create_function at 0x7fe074c49c10>
|
192 |
+
│└ # F2 [34 B]
|
193 |
+
├┬ Co: <code object <lambda> at 0x7fe07501eb30, file "<stdin>", line 1>
|
194 |
+
│├┬ F2: <function _create_code at 0x7fe074c49ca0>
|
195 |
+
││└ # F2 [19 B]
|
196 |
+
│└ # Co [87 B]
|
197 |
+
├┬ D1: <dict object at 0x7fe0750d4680>
|
198 |
+
│└ # D1 [22 B]
|
199 |
+
├┬ D2: <dict object at 0x7fe074c5a1c0>
|
200 |
+
│└ # D2 [2 B]
|
201 |
+
├┬ D2: <dict object at 0x7fe074f903c0>
|
202 |
+
│├┬ D2: <dict object at 0x7fe074f8ebc0>
|
203 |
+
││└ # D2 [2 B]
|
204 |
+
│└ # D2 [23 B]
|
205 |
+
└ # F1 [180 B]
|
206 |
+
|
207 |
+
With trace, we see how ``dill`` stored the lambda (``F1``) by first storing
|
208 |
+
``_create_function``, the underlying code object (``Co``) and ``_create_code``
|
209 |
+
(which is used to handle code objects), then we handle the reference to
|
210 |
+
the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that
|
211 |
+
save the lambda object's state. A ``#`` marks when the object is actually stored.
|
212 |
+
|
213 |
+
|
214 |
+
More Information
|
215 |
+
================
|
216 |
+
|
217 |
+
Probably the best way to get started is to look at the documentation at
|
218 |
+
http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that
|
219 |
+
demonstrate how ``dill`` can serialize different Python objects. You can
|
220 |
+
run the test suite with ``python -m dill.tests``. The contents of any
|
221 |
+
pickle file can be examined with ``undill``. As ``dill`` conforms to
|
222 |
+
the ``pickle`` interface, the examples and documentation found at
|
223 |
+
http://docs.python.org/library/pickle.html also apply to ``dill``
|
224 |
+
if one will ``import dill as pickle``. The source code is also generally
|
225 |
+
well documented, so further questions may be resolved by inspecting the
|
226 |
+
code itself. Please feel free to submit a ticket on github, or ask a
|
227 |
+
question on stackoverflow (**@Mike McKerns**).
|
228 |
+
If you would like to share how you use ``dill`` in your work, please send
|
229 |
+
an email (to **mmckerns at uqfoundation dot org**).
|
230 |
+
|
231 |
+
|
232 |
+
Citation
|
233 |
+
========
|
234 |
+
|
235 |
+
If you use ``dill`` to do research that leads to publication, we ask that you
|
236 |
+
acknowledge use of ``dill`` by citing the following in your publication::
|
237 |
+
|
238 |
+
M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis,
|
239 |
+
"Building a framework for predictive science", Proceedings of
|
240 |
+
the 10th Python in Science Conference, 2011;
|
241 |
+
http://arxiv.org/pdf/1202.1056
|
242 |
+
|
243 |
+
Michael McKerns and Michael Aivazis,
|
244 |
+
"pathos: a framework for heterogeneous computing", 2010- ;
|
245 |
+
https://uqfoundation.github.io/project/pathos
|
246 |
+
|
247 |
+
Please see https://uqfoundation.github.io/project/pathos or
|
248 |
+
http://arxiv.org/pdf/1202.1056 for further information.
|
249 |
+
|
250 |
+
'''
|
251 |
+
|
252 |
+
__version__ = '0.3.8'
|
253 |
+
__author__ = 'Mike McKerns'
|
254 |
+
|
255 |
+
__license__ = '''
|
256 |
+
Copyright (c) 2004-2016 California Institute of Technology.
|
257 |
+
Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
258 |
+
All rights reserved.
|
259 |
+
|
260 |
+
This software is available subject to the conditions and terms laid
|
261 |
+
out below. By downloading and using this software you are agreeing
|
262 |
+
to the following conditions.
|
263 |
+
|
264 |
+
Redistribution and use in source and binary forms, with or without
|
265 |
+
modification, are permitted provided that the following conditions
|
266 |
+
are met:
|
267 |
+
|
268 |
+
- Redistributions of source code must retain the above copyright
|
269 |
+
notice, this list of conditions and the following disclaimer.
|
270 |
+
|
271 |
+
- Redistributions in binary form must reproduce the above copyright
|
272 |
+
notice, this list of conditions and the following disclaimer in the
|
273 |
+
documentation and/or other materials provided with the distribution.
|
274 |
+
|
275 |
+
- Neither the names of the copyright holders nor the names of any of
|
276 |
+
the contributors may be used to endorse or promote products derived
|
277 |
+
from this software without specific prior written permission.
|
278 |
+
|
279 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
280 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
281 |
+
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
282 |
+
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
283 |
+
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
284 |
+
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
285 |
+
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
286 |
+
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
287 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
288 |
+
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
289 |
+
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
290 |
+
|
291 |
+
'''
|
env-llmeval/lib/python3.10/site-packages/dill/__init__.py
ADDED
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
# author, version, license, and long description
|
10 |
+
try: # the package is installed
|
11 |
+
from .__info__ import __version__, __author__, __doc__, __license__
|
12 |
+
except: # pragma: no cover
|
13 |
+
import os
|
14 |
+
import sys
|
15 |
+
parent = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
|
16 |
+
sys.path.append(parent)
|
17 |
+
# get distribution meta info
|
18 |
+
from version import (__version__, __author__,
|
19 |
+
get_license_text, get_readme_as_rst)
|
20 |
+
__license__ = get_license_text(os.path.join(parent, 'LICENSE'))
|
21 |
+
__license__ = "\n%s" % __license__
|
22 |
+
__doc__ = get_readme_as_rst(os.path.join(parent, 'README.md'))
|
23 |
+
del os, sys, parent, get_license_text, get_readme_as_rst
|
24 |
+
|
25 |
+
|
26 |
+
from ._dill import (
|
27 |
+
dump, dumps, load, loads, copy,
|
28 |
+
Pickler, Unpickler, register, pickle, pickles, check,
|
29 |
+
DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, HANDLE_FMODE, CONTENTS_FMODE, FILE_FMODE,
|
30 |
+
PickleError, PickleWarning, PicklingError, PicklingWarning, UnpicklingError,
|
31 |
+
UnpicklingWarning,
|
32 |
+
)
|
33 |
+
from .session import (
|
34 |
+
dump_module, load_module, load_module_asdict,
|
35 |
+
dump_session, load_session # backward compatibility
|
36 |
+
)
|
37 |
+
from . import detect, logger, session, source, temp
|
38 |
+
|
39 |
+
# get global settings
|
40 |
+
from .settings import settings
|
41 |
+
|
42 |
+
# make sure "trace" is turned off
|
43 |
+
logger.trace(False)
|
44 |
+
|
45 |
+
objects = {}
|
46 |
+
# local import of dill._objects
|
47 |
+
#from . import _objects
|
48 |
+
#objects.update(_objects.succeeds)
|
49 |
+
#del _objects
|
50 |
+
|
51 |
+
# local import of dill.objtypes
|
52 |
+
from . import objtypes as types
|
53 |
+
|
54 |
+
def load_types(pickleable=True, unpickleable=True):
|
55 |
+
"""load pickleable and/or unpickleable types to ``dill.types``
|
56 |
+
|
57 |
+
``dill.types`` is meant to mimic the ``types`` module, providing a
|
58 |
+
registry of object types. By default, the module is empty (for import
|
59 |
+
speed purposes). Use the ``load_types`` function to load selected object
|
60 |
+
types to the ``dill.types`` module.
|
61 |
+
|
62 |
+
Args:
|
63 |
+
pickleable (bool, default=True): if True, load pickleable types.
|
64 |
+
unpickleable (bool, default=True): if True, load unpickleable types.
|
65 |
+
|
66 |
+
Returns:
|
67 |
+
None
|
68 |
+
"""
|
69 |
+
from importlib import reload
|
70 |
+
# local import of dill.objects
|
71 |
+
from . import _objects
|
72 |
+
if pickleable:
|
73 |
+
objects.update(_objects.succeeds)
|
74 |
+
else:
|
75 |
+
[objects.pop(obj,None) for obj in _objects.succeeds]
|
76 |
+
if unpickleable:
|
77 |
+
objects.update(_objects.failures)
|
78 |
+
else:
|
79 |
+
[objects.pop(obj,None) for obj in _objects.failures]
|
80 |
+
objects.update(_objects.registered)
|
81 |
+
del _objects
|
82 |
+
# reset contents of types to 'empty'
|
83 |
+
[types.__dict__.pop(obj) for obj in list(types.__dict__.keys()) \
|
84 |
+
if obj.find('Type') != -1]
|
85 |
+
# add corresponding types from objects to types
|
86 |
+
reload(types)
|
87 |
+
|
88 |
+
def extend(use_dill=True):
|
89 |
+
'''add (or remove) dill types to/from the pickle registry
|
90 |
+
|
91 |
+
by default, ``dill`` populates its types to ``pickle.Pickler.dispatch``.
|
92 |
+
Thus, all ``dill`` types are available upon calling ``'import pickle'``.
|
93 |
+
To drop all ``dill`` types from the ``pickle`` dispatch, *use_dill=False*.
|
94 |
+
|
95 |
+
Args:
|
96 |
+
use_dill (bool, default=True): if True, extend the dispatch table.
|
97 |
+
|
98 |
+
Returns:
|
99 |
+
None
|
100 |
+
'''
|
101 |
+
from ._dill import _revert_extension, _extend
|
102 |
+
if use_dill: _extend()
|
103 |
+
else: _revert_extension()
|
104 |
+
return
|
105 |
+
|
106 |
+
extend()
|
107 |
+
|
108 |
+
|
109 |
+
def license():
|
110 |
+
"""print license"""
|
111 |
+
print (__license__)
|
112 |
+
return
|
113 |
+
|
114 |
+
def citation():
|
115 |
+
"""print citation"""
|
116 |
+
print (__doc__[-491:-118])
|
117 |
+
return
|
118 |
+
|
119 |
+
# end of file
|
env-llmeval/lib/python3.10/site-packages/dill/_dill.py
ADDED
@@ -0,0 +1,2198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2015 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
dill: a utility for serialization of python objects
|
10 |
+
|
11 |
+
The primary functions in `dill` are :func:`dump` and
|
12 |
+
:func:`dumps` for serialization ("pickling") to a
|
13 |
+
file or to a string, respectively, and :func:`load`
|
14 |
+
and :func:`loads` for deserialization ("unpickling"),
|
15 |
+
similarly, from a file or from a string. Other notable
|
16 |
+
functions are :func:`~dill.dump_module` and
|
17 |
+
:func:`~dill.load_module`, which are used to save and
|
18 |
+
restore module objects, including an intepreter session.
|
19 |
+
|
20 |
+
Based on code written by Oren Tirosh and Armin Ronacher.
|
21 |
+
Extended to a (near) full set of the builtin types (in types module),
|
22 |
+
and coded to the pickle interface, by <[email protected]>.
|
23 |
+
Initial port to python3 by Jonathan Dobson, continued by mmckerns.
|
24 |
+
Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns.
|
25 |
+
Tested against CH16+ Std. Lib. ... TBD.
|
26 |
+
"""
|
27 |
+
|
28 |
+
from __future__ import annotations
|
29 |
+
|
30 |
+
__all__ = [
|
31 |
+
'dump','dumps','load','loads','copy',
|
32 |
+
'Pickler','Unpickler','register','pickle','pickles','check',
|
33 |
+
'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE',
|
34 |
+
'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError',
|
35 |
+
'UnpicklingWarning',
|
36 |
+
]
|
37 |
+
|
38 |
+
__module__ = 'dill'
|
39 |
+
|
40 |
+
import warnings
|
41 |
+
from .logger import adapter as logger
|
42 |
+
from .logger import trace as _trace
|
43 |
+
log = logger # backward compatibility (see issue #582)
|
44 |
+
|
45 |
+
import os
|
46 |
+
import sys
|
47 |
+
diff = None
|
48 |
+
_use_diff = False
|
49 |
+
OLD38 = (sys.hexversion < 0x3080000)
|
50 |
+
OLD39 = (sys.hexversion < 0x3090000)
|
51 |
+
OLD310 = (sys.hexversion < 0x30a0000)
|
52 |
+
OLD312a7 = (sys.hexversion < 0x30c00a7)
|
53 |
+
#XXX: get types from .objtypes ?
|
54 |
+
import builtins as __builtin__
|
55 |
+
from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler
|
56 |
+
from pickle import GLOBAL, POP
|
57 |
+
from _thread import LockType
|
58 |
+
from _thread import RLock as RLockType
|
59 |
+
#from io import IOBase
|
60 |
+
from types import CodeType, FunctionType, MethodType, GeneratorType, \
|
61 |
+
TracebackType, FrameType, ModuleType, BuiltinMethodType
|
62 |
+
BufferType = memoryview #XXX: unregistered
|
63 |
+
ClassType = type # no 'old-style' classes
|
64 |
+
EllipsisType = type(Ellipsis)
|
65 |
+
#FileType = IOBase
|
66 |
+
NotImplementedType = type(NotImplemented)
|
67 |
+
SliceType = slice
|
68 |
+
TypeType = type # 'new-style' classes #XXX: unregistered
|
69 |
+
XRangeType = range
|
70 |
+
from types import MappingProxyType as DictProxyType, new_class
|
71 |
+
from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError
|
72 |
+
import __main__ as _main_module
|
73 |
+
import marshal
|
74 |
+
import gc
|
75 |
+
# import zlib
|
76 |
+
import abc
|
77 |
+
import dataclasses
|
78 |
+
from weakref import ReferenceType, ProxyType, CallableProxyType
|
79 |
+
from collections import OrderedDict
|
80 |
+
from enum import Enum, EnumMeta
|
81 |
+
from functools import partial
|
82 |
+
from operator import itemgetter, attrgetter
|
83 |
+
GENERATOR_FAIL = False
|
84 |
+
import importlib.machinery
|
85 |
+
EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
|
86 |
+
try:
|
87 |
+
import ctypes
|
88 |
+
HAS_CTYPES = True
|
89 |
+
# if using `pypy`, pythonapi is not found
|
90 |
+
IS_PYPY = not hasattr(ctypes, 'pythonapi')
|
91 |
+
except ImportError:
|
92 |
+
HAS_CTYPES = False
|
93 |
+
IS_PYPY = False
|
94 |
+
NumpyUfuncType = None
|
95 |
+
NumpyDType = None
|
96 |
+
NumpyArrayType = None
|
97 |
+
try:
|
98 |
+
if not importlib.machinery.PathFinder().find_spec('numpy'):
|
99 |
+
raise ImportError("No module named 'numpy'")
|
100 |
+
NumpyUfuncType = True
|
101 |
+
NumpyDType = True
|
102 |
+
NumpyArrayType = True
|
103 |
+
except ImportError:
|
104 |
+
pass
|
105 |
+
def __hook__():
|
106 |
+
global NumpyArrayType, NumpyDType, NumpyUfuncType
|
107 |
+
from numpy import ufunc as NumpyUfuncType
|
108 |
+
from numpy import ndarray as NumpyArrayType
|
109 |
+
from numpy import dtype as NumpyDType
|
110 |
+
return True
|
111 |
+
if NumpyArrayType: # then has numpy
|
112 |
+
def ndarraysubclassinstance(obj_type):
|
113 |
+
if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__):
|
114 |
+
return False
|
115 |
+
# anything below here is a numpy array (or subclass) instance
|
116 |
+
__hook__() # import numpy (so the following works!!!)
|
117 |
+
# verify that __reduce__ has not been overridden
|
118 |
+
if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \
|
119 |
+
or obj_type.__reduce__ is not NumpyArrayType.__reduce__:
|
120 |
+
return False
|
121 |
+
return True
|
122 |
+
def numpyufunc(obj_type):
|
123 |
+
return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__)
|
124 |
+
def numpydtype(obj_type):
|
125 |
+
if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__):
|
126 |
+
return False
|
127 |
+
# anything below here is a numpy dtype
|
128 |
+
__hook__() # import numpy (so the following works!!!)
|
129 |
+
return obj_type is type(NumpyDType) # handles subclasses
|
130 |
+
else:
|
131 |
+
def ndarraysubclassinstance(obj): return False
|
132 |
+
def numpyufunc(obj): return False
|
133 |
+
def numpydtype(obj): return False
|
134 |
+
|
135 |
+
from types import GetSetDescriptorType, ClassMethodDescriptorType, \
|
136 |
+
WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \
|
137 |
+
MethodWrapperType #XXX: unused
|
138 |
+
|
139 |
+
# make sure to add these 'hand-built' types to _typemap
|
140 |
+
CellType = type((lambda x: lambda y: x)(0).__closure__[0])
|
141 |
+
PartialType = type(partial(int, base=2))
|
142 |
+
SuperType = type(super(Exception, TypeError()))
|
143 |
+
ItemGetterType = type(itemgetter(0))
|
144 |
+
AttrGetterType = type(attrgetter('__repr__'))
|
145 |
+
|
146 |
+
try:
|
147 |
+
from functools import _lru_cache_wrapper as LRUCacheType
|
148 |
+
except ImportError:
|
149 |
+
LRUCacheType = None
|
150 |
+
|
151 |
+
if not isinstance(LRUCacheType, type):
|
152 |
+
LRUCacheType = None
|
153 |
+
|
154 |
+
def get_file_type(*args, **kwargs):
|
155 |
+
open = kwargs.pop("open", __builtin__.open)
|
156 |
+
f = open(os.devnull, *args, **kwargs)
|
157 |
+
t = type(f)
|
158 |
+
f.close()
|
159 |
+
return t
|
160 |
+
|
161 |
+
IS_PYODIDE = sys.platform == 'emscripten'
|
162 |
+
|
163 |
+
FileType = get_file_type('rb', buffering=0)
|
164 |
+
TextWrapperType = get_file_type('r', buffering=-1)
|
165 |
+
BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1)
|
166 |
+
BufferedReaderType = get_file_type('rb', buffering=-1)
|
167 |
+
BufferedWriterType = get_file_type('wb', buffering=-1)
|
168 |
+
try:
|
169 |
+
from _pyio import open as _open
|
170 |
+
PyTextWrapperType = get_file_type('r', buffering=-1, open=_open)
|
171 |
+
PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open)
|
172 |
+
PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open)
|
173 |
+
PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open)
|
174 |
+
except ImportError:
|
175 |
+
PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None
|
176 |
+
from io import BytesIO as StringIO
|
177 |
+
InputType = OutputType = None
|
178 |
+
from socket import socket as SocketType
|
179 |
+
#FIXME: additionally calls ForkingPickler.register several times
|
180 |
+
from multiprocessing.reduction import _reduce_socket as reduce_socket
|
181 |
+
try: #pragma: no cover
|
182 |
+
IS_IPYTHON = __IPYTHON__ # is True
|
183 |
+
ExitType = None # IPython.core.autocall.ExitAutocall
|
184 |
+
IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython')
|
185 |
+
except NameError:
|
186 |
+
IS_IPYTHON = False
|
187 |
+
try: ExitType = type(exit) # apparently 'exit' can be removed
|
188 |
+
except NameError: ExitType = None
|
189 |
+
IPYTHON_SINGLETONS = ()
|
190 |
+
|
191 |
+
import inspect
|
192 |
+
import typing
|
193 |
+
|
194 |
+
|
195 |
+
### Shims for different versions of Python and dill
|
196 |
+
class Sentinel(object):
|
197 |
+
"""
|
198 |
+
Create a unique sentinel object that is pickled as a constant.
|
199 |
+
"""
|
200 |
+
def __init__(self, name, module_name=None):
|
201 |
+
self.name = name
|
202 |
+
if module_name is None:
|
203 |
+
# Use the calling frame's module
|
204 |
+
self.__module__ = inspect.currentframe().f_back.f_globals['__name__']
|
205 |
+
else:
|
206 |
+
self.__module__ = module_name # pragma: no cover
|
207 |
+
def __repr__(self):
|
208 |
+
return self.__module__ + '.' + self.name # pragma: no cover
|
209 |
+
def __copy__(self):
|
210 |
+
return self # pragma: no cover
|
211 |
+
def __deepcopy__(self, memo):
|
212 |
+
return self # pragma: no cover
|
213 |
+
def __reduce__(self):
|
214 |
+
return self.name
|
215 |
+
def __reduce_ex__(self, protocol):
|
216 |
+
return self.name
|
217 |
+
|
218 |
+
from . import _shims
|
219 |
+
from ._shims import Reduce, Getattr
|
220 |
+
|
221 |
+
### File modes
|
222 |
+
#: Pickles the file handle, preserving mode. The position of the unpickled
|
223 |
+
#: object is as for a new file handle.
|
224 |
+
HANDLE_FMODE = 0
|
225 |
+
#: Pickles the file contents, creating a new file if on load the file does
|
226 |
+
#: not exist. The position = min(pickled position, EOF) and mode is chosen
|
227 |
+
#: as such that "best" preserves behavior of the original file.
|
228 |
+
CONTENTS_FMODE = 1
|
229 |
+
#: Pickles the entire file (handle and contents), preserving mode and position.
|
230 |
+
FILE_FMODE = 2
|
231 |
+
|
232 |
+
### Shorthands (modified from python2.5/lib/pickle.py)
|
233 |
+
def copy(obj, *args, **kwds):
|
234 |
+
"""
|
235 |
+
Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`).
|
236 |
+
|
237 |
+
See :func:`dumps` and :func:`loads` for keyword arguments.
|
238 |
+
"""
|
239 |
+
ignore = kwds.pop('ignore', Unpickler.settings['ignore'])
|
240 |
+
return loads(dumps(obj, *args, **kwds), ignore=ignore)
|
241 |
+
|
242 |
+
def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
|
243 |
+
"""
|
244 |
+
Pickle an object to a file.
|
245 |
+
|
246 |
+
See :func:`dumps` for keyword arguments.
|
247 |
+
"""
|
248 |
+
from .settings import settings
|
249 |
+
protocol = settings['protocol'] if protocol is None else int(protocol)
|
250 |
+
_kwds = kwds.copy()
|
251 |
+
_kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse))
|
252 |
+
Pickler(file, protocol, **_kwds).dump(obj)
|
253 |
+
return
|
254 |
+
|
255 |
+
def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
|
256 |
+
"""
|
257 |
+
Pickle an object to a string.
|
258 |
+
|
259 |
+
*protocol* is the pickler protocol, as defined for Python *pickle*.
|
260 |
+
|
261 |
+
If *byref=True*, then dill behaves a lot more like pickle as certain
|
262 |
+
objects (like modules) are pickled by reference as opposed to attempting
|
263 |
+
to pickle the object itself.
|
264 |
+
|
265 |
+
If *recurse=True*, then objects referred to in the global dictionary
|
266 |
+
are recursively traced and pickled, instead of the default behavior
|
267 |
+
of attempting to store the entire global dictionary. This is needed for
|
268 |
+
functions defined via *exec()*.
|
269 |
+
|
270 |
+
*fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`,
|
271 |
+
or :const:`FILE_FMODE`) indicates how file handles will be pickled.
|
272 |
+
For example, when pickling a data file handle for transfer to a remote
|
273 |
+
compute service, *FILE_FMODE* will include the file contents in the
|
274 |
+
pickle and cursor position so that a remote method can operate
|
275 |
+
transparently on an object with an open file handle.
|
276 |
+
|
277 |
+
Default values for keyword arguments can be set in :mod:`dill.settings`.
|
278 |
+
"""
|
279 |
+
file = StringIO()
|
280 |
+
dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio)
|
281 |
+
return file.getvalue()
|
282 |
+
|
283 |
+
def load(file, ignore=None, **kwds):
|
284 |
+
"""
|
285 |
+
Unpickle an object from a file.
|
286 |
+
|
287 |
+
See :func:`loads` for keyword arguments.
|
288 |
+
"""
|
289 |
+
return Unpickler(file, ignore=ignore, **kwds).load()
|
290 |
+
|
291 |
+
def loads(str, ignore=None, **kwds):
|
292 |
+
"""
|
293 |
+
Unpickle an object from a string.
|
294 |
+
|
295 |
+
If *ignore=False* then objects whose class is defined in the module
|
296 |
+
*__main__* are updated to reference the existing class in *__main__*,
|
297 |
+
otherwise they are left to refer to the reconstructed type, which may
|
298 |
+
be different.
|
299 |
+
|
300 |
+
Default values for keyword arguments can be set in :mod:`dill.settings`.
|
301 |
+
"""
|
302 |
+
file = StringIO(str)
|
303 |
+
return load(file, ignore, **kwds)
|
304 |
+
|
305 |
+
# def dumpzs(obj, protocol=None):
|
306 |
+
# """pickle an object to a compressed string"""
|
307 |
+
# return zlib.compress(dumps(obj, protocol))
|
308 |
+
|
309 |
+
# def loadzs(str):
|
310 |
+
# """unpickle an object from a compressed string"""
|
311 |
+
# return loads(zlib.decompress(str))
|
312 |
+
|
313 |
+
### End: Shorthands ###
|
314 |
+
|
315 |
+
class MetaCatchingDict(dict):
|
316 |
+
def get(self, key, default=None):
|
317 |
+
try:
|
318 |
+
return self[key]
|
319 |
+
except KeyError:
|
320 |
+
return default
|
321 |
+
|
322 |
+
def __missing__(self, key):
|
323 |
+
if issubclass(key, type):
|
324 |
+
return save_type
|
325 |
+
else:
|
326 |
+
raise KeyError()
|
327 |
+
|
328 |
+
class PickleWarning(Warning, PickleError):
|
329 |
+
pass
|
330 |
+
|
331 |
+
class PicklingWarning(PickleWarning, PicklingError):
|
332 |
+
pass
|
333 |
+
|
334 |
+
class UnpicklingWarning(PickleWarning, UnpicklingError):
|
335 |
+
pass
|
336 |
+
|
337 |
+
### Extend the Picklers
|
338 |
+
class Pickler(StockPickler):
|
339 |
+
"""python's Pickler extended to interpreter sessions"""
|
340 |
+
dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \
|
341 |
+
= MetaCatchingDict(StockPickler.dispatch.copy())
|
342 |
+
"""The dispatch table, a dictionary of serializing functions used
|
343 |
+
by Pickler to save objects of specific types. Use :func:`pickle`
|
344 |
+
or :func:`register` to associate types to custom functions.
|
345 |
+
|
346 |
+
:meta hide-value:
|
347 |
+
"""
|
348 |
+
_session = False
|
349 |
+
from .settings import settings
|
350 |
+
|
351 |
+
def __init__(self, file, *args, **kwds):
|
352 |
+
settings = Pickler.settings
|
353 |
+
_byref = kwds.pop('byref', None)
|
354 |
+
#_strictio = kwds.pop('strictio', None)
|
355 |
+
_fmode = kwds.pop('fmode', None)
|
356 |
+
_recurse = kwds.pop('recurse', None)
|
357 |
+
StockPickler.__init__(self, file, *args, **kwds)
|
358 |
+
self._main = _main_module
|
359 |
+
self._diff_cache = {}
|
360 |
+
self._byref = settings['byref'] if _byref is None else _byref
|
361 |
+
self._strictio = False #_strictio
|
362 |
+
self._fmode = settings['fmode'] if _fmode is None else _fmode
|
363 |
+
self._recurse = settings['recurse'] if _recurse is None else _recurse
|
364 |
+
self._postproc = OrderedDict()
|
365 |
+
self._file = file
|
366 |
+
|
367 |
+
def save(self, obj, save_persistent_id=True):
|
368 |
+
# numpy hack
|
369 |
+
obj_type = type(obj)
|
370 |
+
if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch):
|
371 |
+
# register if the object is a numpy ufunc
|
372 |
+
# thanks to Paul Kienzle for pointing out ufuncs didn't pickle
|
373 |
+
if numpyufunc(obj_type):
|
374 |
+
@register(obj_type)
|
375 |
+
def save_numpy_ufunc(pickler, obj):
|
376 |
+
logger.trace(pickler, "Nu: %s", obj)
|
377 |
+
name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
|
378 |
+
StockPickler.save_global(pickler, obj, name=name)
|
379 |
+
logger.trace(pickler, "# Nu")
|
380 |
+
return
|
381 |
+
# NOTE: the above 'save' performs like:
|
382 |
+
# import copy_reg
|
383 |
+
# def udump(f): return f.__name__
|
384 |
+
# def uload(name): return getattr(numpy, name)
|
385 |
+
# copy_reg.pickle(NumpyUfuncType, udump, uload)
|
386 |
+
# register if the object is a numpy dtype
|
387 |
+
if numpydtype(obj_type):
|
388 |
+
@register(obj_type)
|
389 |
+
def save_numpy_dtype(pickler, obj):
|
390 |
+
logger.trace(pickler, "Dt: %s", obj)
|
391 |
+
pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj)
|
392 |
+
logger.trace(pickler, "# Dt")
|
393 |
+
return
|
394 |
+
# NOTE: the above 'save' performs like:
|
395 |
+
# import copy_reg
|
396 |
+
# def uload(name): return type(NumpyDType(name))
|
397 |
+
# def udump(f): return uload, (f.type,)
|
398 |
+
# copy_reg.pickle(NumpyDTypeType, udump, uload)
|
399 |
+
# register if the object is a subclassed numpy array instance
|
400 |
+
if ndarraysubclassinstance(obj_type):
|
401 |
+
@register(obj_type)
|
402 |
+
def save_numpy_array(pickler, obj):
|
403 |
+
logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype)
|
404 |
+
npdict = getattr(obj, '__dict__', None)
|
405 |
+
f, args, state = obj.__reduce__()
|
406 |
+
pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj)
|
407 |
+
logger.trace(pickler, "# Nu")
|
408 |
+
return
|
409 |
+
# end numpy hack
|
410 |
+
|
411 |
+
if GENERATOR_FAIL and obj_type is GeneratorType:
|
412 |
+
msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType
|
413 |
+
raise PicklingError(msg)
|
414 |
+
StockPickler.save(self, obj, save_persistent_id)
|
415 |
+
|
416 |
+
save.__doc__ = StockPickler.save.__doc__
|
417 |
+
|
418 |
+
def dump(self, obj): #NOTE: if settings change, need to update attributes
|
419 |
+
logger.trace_setup(self)
|
420 |
+
StockPickler.dump(self, obj)
|
421 |
+
dump.__doc__ = StockPickler.dump.__doc__
|
422 |
+
|
423 |
+
class Unpickler(StockUnpickler):
|
424 |
+
"""python's Unpickler extended to interpreter sessions and more types"""
|
425 |
+
from .settings import settings
|
426 |
+
_session = False
|
427 |
+
|
428 |
+
def find_class(self, module, name):
|
429 |
+
if (module, name) == ('__builtin__', '__main__'):
|
430 |
+
return self._main.__dict__ #XXX: above set w/save_module_dict
|
431 |
+
elif (module, name) == ('__builtin__', 'NoneType'):
|
432 |
+
return type(None) #XXX: special case: NoneType missing
|
433 |
+
if module == 'dill.dill': module = 'dill._dill'
|
434 |
+
return StockUnpickler.find_class(self, module, name)
|
435 |
+
|
436 |
+
def __init__(self, *args, **kwds):
|
437 |
+
settings = Pickler.settings
|
438 |
+
_ignore = kwds.pop('ignore', None)
|
439 |
+
StockUnpickler.__init__(self, *args, **kwds)
|
440 |
+
self._main = _main_module
|
441 |
+
self._ignore = settings['ignore'] if _ignore is None else _ignore
|
442 |
+
|
443 |
+
def load(self): #NOTE: if settings change, need to update attributes
|
444 |
+
obj = StockUnpickler.load(self)
|
445 |
+
if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
|
446 |
+
if not self._ignore:
|
447 |
+
# point obj class to main
|
448 |
+
try: obj.__class__ = getattr(self._main, type(obj).__name__)
|
449 |
+
except (AttributeError,TypeError): pass # defined in a file
|
450 |
+
#_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ?
|
451 |
+
return obj
|
452 |
+
load.__doc__ = StockUnpickler.load.__doc__
|
453 |
+
pass
|
454 |
+
|
455 |
+
'''
|
456 |
+
def dispatch_table():
|
457 |
+
"""get the dispatch table of registered types"""
|
458 |
+
return Pickler.dispatch
|
459 |
+
'''
|
460 |
+
|
461 |
+
pickle_dispatch_copy = StockPickler.dispatch.copy()
|
462 |
+
|
463 |
+
def pickle(t, func):
|
464 |
+
"""expose :attr:`~Pickler.dispatch` table for user-created extensions"""
|
465 |
+
Pickler.dispatch[t] = func
|
466 |
+
return
|
467 |
+
|
468 |
+
def register(t):
|
469 |
+
"""decorator to register types to Pickler's :attr:`~Pickler.dispatch` table"""
|
470 |
+
def proxy(func):
|
471 |
+
Pickler.dispatch[t] = func
|
472 |
+
return func
|
473 |
+
return proxy
|
474 |
+
|
475 |
+
def _revert_extension():
|
476 |
+
"""drop dill-registered types from pickle's dispatch table"""
|
477 |
+
for type, func in list(StockPickler.dispatch.items()):
|
478 |
+
if func.__module__ == __name__:
|
479 |
+
del StockPickler.dispatch[type]
|
480 |
+
if type in pickle_dispatch_copy:
|
481 |
+
StockPickler.dispatch[type] = pickle_dispatch_copy[type]
|
482 |
+
|
483 |
+
def use_diff(on=True):
|
484 |
+
"""
|
485 |
+
Reduces size of pickles by only including object which have changed.
|
486 |
+
|
487 |
+
Decreases pickle size but increases CPU time needed.
|
488 |
+
Also helps avoid some unpickleable objects.
|
489 |
+
MUST be called at start of script, otherwise changes will not be recorded.
|
490 |
+
"""
|
491 |
+
global _use_diff, diff
|
492 |
+
_use_diff = on
|
493 |
+
if _use_diff and diff is None:
|
494 |
+
try:
|
495 |
+
from . import diff as d
|
496 |
+
except ImportError:
|
497 |
+
import diff as d
|
498 |
+
diff = d
|
499 |
+
|
500 |
+
def _create_typemap():
|
501 |
+
import types
|
502 |
+
d = dict(list(__builtin__.__dict__.items()) + \
|
503 |
+
list(types.__dict__.items())).items()
|
504 |
+
for key, value in d:
|
505 |
+
if getattr(value, '__module__', None) == 'builtins' \
|
506 |
+
and type(value) is type:
|
507 |
+
yield key, value
|
508 |
+
return
|
509 |
+
_reverse_typemap = dict(_create_typemap())
|
510 |
+
_reverse_typemap.update({
|
511 |
+
'PartialType': PartialType,
|
512 |
+
'SuperType': SuperType,
|
513 |
+
'ItemGetterType': ItemGetterType,
|
514 |
+
'AttrGetterType': AttrGetterType,
|
515 |
+
})
|
516 |
+
if sys.hexversion < 0x30800a2:
|
517 |
+
_reverse_typemap.update({
|
518 |
+
'CellType': CellType,
|
519 |
+
})
|
520 |
+
|
521 |
+
# "Incidental" implementation specific types. Unpickling these types in another
|
522 |
+
# implementation of Python (PyPy -> CPython) is not guaranteed to work
|
523 |
+
|
524 |
+
# This dictionary should contain all types that appear in Python implementations
|
525 |
+
# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types
|
526 |
+
x=OrderedDict()
|
527 |
+
_incedental_reverse_typemap = {
|
528 |
+
'FileType': FileType,
|
529 |
+
'BufferedRandomType': BufferedRandomType,
|
530 |
+
'BufferedReaderType': BufferedReaderType,
|
531 |
+
'BufferedWriterType': BufferedWriterType,
|
532 |
+
'TextWrapperType': TextWrapperType,
|
533 |
+
'PyBufferedRandomType': PyBufferedRandomType,
|
534 |
+
'PyBufferedReaderType': PyBufferedReaderType,
|
535 |
+
'PyBufferedWriterType': PyBufferedWriterType,
|
536 |
+
'PyTextWrapperType': PyTextWrapperType,
|
537 |
+
}
|
538 |
+
|
539 |
+
_incedental_reverse_typemap.update({
|
540 |
+
"DictKeysType": type({}.keys()),
|
541 |
+
"DictValuesType": type({}.values()),
|
542 |
+
"DictItemsType": type({}.items()),
|
543 |
+
|
544 |
+
"OdictKeysType": type(x.keys()),
|
545 |
+
"OdictValuesType": type(x.values()),
|
546 |
+
"OdictItemsType": type(x.items()),
|
547 |
+
})
|
548 |
+
|
549 |
+
if ExitType:
|
550 |
+
_incedental_reverse_typemap['ExitType'] = ExitType
|
551 |
+
if InputType:
|
552 |
+
_incedental_reverse_typemap['InputType'] = InputType
|
553 |
+
_incedental_reverse_typemap['OutputType'] = OutputType
|
554 |
+
|
555 |
+
'''
|
556 |
+
try:
|
557 |
+
import symtable
|
558 |
+
_incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table)
|
559 |
+
except: #FIXME: fails to pickle
|
560 |
+
pass
|
561 |
+
|
562 |
+
if sys.hexversion >= 0x30a00a0:
|
563 |
+
_incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines())
|
564 |
+
'''
|
565 |
+
|
566 |
+
if sys.hexversion >= 0x30b00b0:
|
567 |
+
from types import GenericAlias
|
568 |
+
_incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,))))
|
569 |
+
'''
|
570 |
+
_incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions())
|
571 |
+
'''
|
572 |
+
|
573 |
+
try:
|
574 |
+
import winreg
|
575 |
+
_incedental_reverse_typemap["HKEYType"] = winreg.HKEYType
|
576 |
+
except ImportError:
|
577 |
+
pass
|
578 |
+
|
579 |
+
_reverse_typemap.update(_incedental_reverse_typemap)
|
580 |
+
_incedental_types = set(_incedental_reverse_typemap.values())
|
581 |
+
|
582 |
+
del x
|
583 |
+
|
584 |
+
_typemap = dict((v, k) for k, v in _reverse_typemap.items())
|
585 |
+
|
586 |
+
def _unmarshal(string):
|
587 |
+
return marshal.loads(string)
|
588 |
+
|
589 |
+
def _load_type(name):
|
590 |
+
return _reverse_typemap[name]
|
591 |
+
|
592 |
+
def _create_type(typeobj, *args):
|
593 |
+
return typeobj(*args)
|
594 |
+
|
595 |
+
def _create_function(fcode, fglobals, fname=None, fdefaults=None,
|
596 |
+
fclosure=None, fdict=None, fkwdefaults=None):
|
597 |
+
# same as FunctionType, but enable passing __dict__ to new function,
|
598 |
+
# __dict__ is the storehouse for attributes added after function creation
|
599 |
+
func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure)
|
600 |
+
if fdict is not None:
|
601 |
+
func.__dict__.update(fdict) #XXX: better copy? option to copy?
|
602 |
+
if fkwdefaults is not None:
|
603 |
+
func.__kwdefaults__ = fkwdefaults
|
604 |
+
# 'recurse' only stores referenced modules/objects in fglobals,
|
605 |
+
# thus we need to make sure that we have __builtins__ as well
|
606 |
+
if "__builtins__" not in func.__globals__:
|
607 |
+
func.__globals__["__builtins__"] = globals()["__builtins__"]
|
608 |
+
# assert id(fglobals) == id(func.__globals__)
|
609 |
+
return func
|
610 |
+
|
611 |
+
class match:
|
612 |
+
"""
|
613 |
+
Make avaialable a limited structural pattern matching-like syntax for Python < 3.10
|
614 |
+
|
615 |
+
Patterns can be only tuples (without types) currently.
|
616 |
+
Inspired by the package pattern-matching-PEP634.
|
617 |
+
|
618 |
+
Usage:
|
619 |
+
>>> with match(args) as m:
|
620 |
+
>>> if m.case(('x', 'y')):
|
621 |
+
>>> # use m.x and m.y
|
622 |
+
>>> elif m.case(('x', 'y', 'z')):
|
623 |
+
>>> # use m.x, m.y and m.z
|
624 |
+
|
625 |
+
Equivalent native code for Python >= 3.10:
|
626 |
+
>>> match args:
|
627 |
+
>>> case (x, y):
|
628 |
+
>>> # use x and y
|
629 |
+
>>> case (x, y, z):
|
630 |
+
>>> # use x, y and z
|
631 |
+
"""
|
632 |
+
def __init__(self, value):
|
633 |
+
self.value = value
|
634 |
+
self._fields = None
|
635 |
+
def __enter__(self):
|
636 |
+
return self
|
637 |
+
def __exit__(self, *exc_info):
|
638 |
+
return False
|
639 |
+
def case(self, args): # *args, **kwargs):
|
640 |
+
"""just handles tuple patterns"""
|
641 |
+
if len(self.value) != len(args): # + len(kwargs):
|
642 |
+
return False
|
643 |
+
#if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())):
|
644 |
+
# return False
|
645 |
+
self.args = args # (*args, *kwargs)
|
646 |
+
return True
|
647 |
+
@property
|
648 |
+
def fields(self):
|
649 |
+
# Only bind names to values if necessary.
|
650 |
+
if self._fields is None:
|
651 |
+
self._fields = dict(zip(self.args, self.value))
|
652 |
+
return self._fields
|
653 |
+
def __getattr__(self, item):
|
654 |
+
return self.fields[item]
|
655 |
+
|
656 |
+
ALL_CODE_PARAMS = [
|
657 |
+
# Version New attribute CodeType parameters
|
658 |
+
((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'),
|
659 |
+
((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'),
|
660 |
+
((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'),
|
661 |
+
((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
|
662 |
+
((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
|
663 |
+
]
|
664 |
+
for version, new_attr, params in ALL_CODE_PARAMS:
|
665 |
+
if hasattr(CodeType, new_attr):
|
666 |
+
CODE_VERSION = version
|
667 |
+
CODE_PARAMS = params.split()
|
668 |
+
break
|
669 |
+
ENCODE_PARAMS = set(CODE_PARAMS).intersection(
|
670 |
+
['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable'])
|
671 |
+
|
672 |
+
def _create_code(*args):
|
673 |
+
if not isinstance(args[0], int): # co_lnotab stored from >= 3.10
|
674 |
+
LNOTAB, *args = args
|
675 |
+
else: # from < 3.10 (or pre-LNOTAB storage)
|
676 |
+
LNOTAB = b''
|
677 |
+
|
678 |
+
with match(args) as m:
|
679 |
+
# Python 3.11/3.12a (18 members)
|
680 |
+
if m.case((
|
681 |
+
'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
|
682 |
+
'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
|
683 |
+
'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
|
684 |
+
)):
|
685 |
+
if CODE_VERSION == (3,11):
|
686 |
+
return CodeType(
|
687 |
+
*args[:6],
|
688 |
+
args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
|
689 |
+
*args[7:14],
|
690 |
+
args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
|
691 |
+
args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable
|
692 |
+
args[16],
|
693 |
+
args[17],
|
694 |
+
)
|
695 |
+
fields = m.fields
|
696 |
+
# Python 3.10 or 3.8/3.9 (16 members)
|
697 |
+
elif m.case((
|
698 |
+
'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
|
699 |
+
'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13]
|
700 |
+
'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:]
|
701 |
+
)):
|
702 |
+
if CODE_VERSION == (3,10) or CODE_VERSION == (3,8):
|
703 |
+
return CodeType(
|
704 |
+
*args[:6],
|
705 |
+
args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
|
706 |
+
*args[7:13],
|
707 |
+
args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable
|
708 |
+
args[14],
|
709 |
+
args[15],
|
710 |
+
)
|
711 |
+
fields = m.fields
|
712 |
+
if CODE_VERSION >= (3,10):
|
713 |
+
fields['linetable'] = m.LNOTAB_OR_LINETABLE
|
714 |
+
else:
|
715 |
+
fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE
|
716 |
+
# Python 3.7 (15 args)
|
717 |
+
elif m.case((
|
718 |
+
'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5]
|
719 |
+
'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12]
|
720 |
+
'lnotab', 'freevars', 'cellvars' # args[12:]
|
721 |
+
)):
|
722 |
+
if CODE_VERSION == (3,7):
|
723 |
+
return CodeType(
|
724 |
+
*args[:5],
|
725 |
+
args[5].encode() if hasattr(args[5], 'encode') else args[5], # code
|
726 |
+
*args[6:12],
|
727 |
+
args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab
|
728 |
+
args[13],
|
729 |
+
args[14],
|
730 |
+
)
|
731 |
+
fields = m.fields
|
732 |
+
# Python 3.11a (20 members)
|
733 |
+
elif m.case((
|
734 |
+
'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
|
735 |
+
'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
|
736 |
+
'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
|
737 |
+
)):
|
738 |
+
if CODE_VERSION == (3,11,'a'):
|
739 |
+
return CodeType(
|
740 |
+
*args[:6],
|
741 |
+
args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
|
742 |
+
*args[7:14],
|
743 |
+
*(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable
|
744 |
+
args[18],
|
745 |
+
args[19],
|
746 |
+
)
|
747 |
+
fields = m.fields
|
748 |
+
else:
|
749 |
+
raise UnpicklingError("pattern match for code object failed")
|
750 |
+
|
751 |
+
# The args format doesn't match this version.
|
752 |
+
fields.setdefault('posonlyargcount', 0) # from python <= 3.7
|
753 |
+
fields.setdefault('lnotab', LNOTAB) # from python >= 3.10
|
754 |
+
fields.setdefault('linetable', b'') # from python <= 3.9
|
755 |
+
fields.setdefault('qualname', fields['name']) # from python <= 3.10
|
756 |
+
fields.setdefault('exceptiontable', b'') # from python <= 3.10
|
757 |
+
fields.setdefault('endlinetable', None) # from python != 3.11a
|
758 |
+
fields.setdefault('columntable', None) # from python != 3.11a
|
759 |
+
|
760 |
+
args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k]
|
761 |
+
for k in CODE_PARAMS)
|
762 |
+
return CodeType(*args)
|
763 |
+
|
764 |
+
def _create_ftype(ftypeobj, func, args, kwds):
|
765 |
+
if kwds is None:
|
766 |
+
kwds = {}
|
767 |
+
if args is None:
|
768 |
+
args = ()
|
769 |
+
return ftypeobj(func, *args, **kwds)
|
770 |
+
|
771 |
+
def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245
|
772 |
+
if not argz:
|
773 |
+
return typing.Tuple[()].copy_with(())
|
774 |
+
if argz == ((),):
|
775 |
+
return typing.Tuple[()]
|
776 |
+
return typing.Tuple[argz]
|
777 |
+
|
778 |
+
def _create_lock(locked, *args): #XXX: ignores 'blocking'
|
779 |
+
from threading import Lock
|
780 |
+
lock = Lock()
|
781 |
+
if locked:
|
782 |
+
if not lock.acquire(False):
|
783 |
+
raise UnpicklingError("Cannot acquire lock")
|
784 |
+
return lock
|
785 |
+
|
786 |
+
def _create_rlock(count, owner, *args): #XXX: ignores 'blocking'
|
787 |
+
lock = RLockType()
|
788 |
+
if owner is not None:
|
789 |
+
lock._acquire_restore((count, owner))
|
790 |
+
if owner and not lock._is_owned():
|
791 |
+
raise UnpicklingError("Cannot acquire lock")
|
792 |
+
return lock
|
793 |
+
|
794 |
+
# thanks to matsjoyce for adding all the different file modes
|
795 |
+
def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0
|
796 |
+
# only pickles the handle, not the file contents... good? or StringIO(data)?
|
797 |
+
# (for file contents see: http://effbot.org/librarybook/copy-reg.htm)
|
798 |
+
# NOTE: handle special cases first (are there more special cases?)
|
799 |
+
names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__,
|
800 |
+
'<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ?
|
801 |
+
if name in list(names.keys()):
|
802 |
+
f = names[name] #XXX: safer "f=sys.stdin"
|
803 |
+
elif name == '<tmpfile>':
|
804 |
+
f = os.tmpfile()
|
805 |
+
elif name == '<fdopen>':
|
806 |
+
import tempfile
|
807 |
+
f = tempfile.TemporaryFile(mode)
|
808 |
+
else:
|
809 |
+
try:
|
810 |
+
exists = os.path.exists(name)
|
811 |
+
except Exception:
|
812 |
+
exists = False
|
813 |
+
if not exists:
|
814 |
+
if strictio:
|
815 |
+
raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name)
|
816 |
+
elif "r" in mode and fmode != FILE_FMODE:
|
817 |
+
name = '<fdopen>' # or os.devnull?
|
818 |
+
current_size = 0 # or maintain position?
|
819 |
+
else:
|
820 |
+
current_size = os.path.getsize(name)
|
821 |
+
|
822 |
+
if position > current_size:
|
823 |
+
if strictio:
|
824 |
+
raise ValueError("invalid buffer size")
|
825 |
+
elif fmode == CONTENTS_FMODE:
|
826 |
+
position = current_size
|
827 |
+
# try to open the file by name
|
828 |
+
# NOTE: has different fileno
|
829 |
+
try:
|
830 |
+
#FIXME: missing: *buffering*, encoding, softspace
|
831 |
+
if fmode == FILE_FMODE:
|
832 |
+
f = open(name, mode if "w" in mode else "w")
|
833 |
+
f.write(fdata)
|
834 |
+
if "w" not in mode:
|
835 |
+
f.close()
|
836 |
+
f = open(name, mode)
|
837 |
+
elif name == '<fdopen>': # file did not exist
|
838 |
+
import tempfile
|
839 |
+
f = tempfile.TemporaryFile(mode)
|
840 |
+
# treat x mode as w mode
|
841 |
+
elif fmode == CONTENTS_FMODE \
|
842 |
+
and ("w" in mode or "x" in mode):
|
843 |
+
# stop truncation when opening
|
844 |
+
flags = os.O_CREAT
|
845 |
+
if "+" in mode:
|
846 |
+
flags |= os.O_RDWR
|
847 |
+
else:
|
848 |
+
flags |= os.O_WRONLY
|
849 |
+
f = os.fdopen(os.open(name, flags), mode)
|
850 |
+
# set name to the correct value
|
851 |
+
r = getattr(f, "buffer", f)
|
852 |
+
r = getattr(r, "raw", r)
|
853 |
+
r.name = name
|
854 |
+
assert f.name == name
|
855 |
+
else:
|
856 |
+
f = open(name, mode)
|
857 |
+
except (IOError, FileNotFoundError):
|
858 |
+
err = sys.exc_info()[1]
|
859 |
+
raise UnpicklingError(err)
|
860 |
+
if closed:
|
861 |
+
f.close()
|
862 |
+
elif position >= 0 and fmode != HANDLE_FMODE:
|
863 |
+
f.seek(position)
|
864 |
+
return f
|
865 |
+
|
866 |
+
def _create_stringi(value, position, closed):
|
867 |
+
f = StringIO(value)
|
868 |
+
if closed: f.close()
|
869 |
+
else: f.seek(position)
|
870 |
+
return f
|
871 |
+
|
872 |
+
def _create_stringo(value, position, closed):
|
873 |
+
f = StringIO()
|
874 |
+
if closed: f.close()
|
875 |
+
else:
|
876 |
+
f.write(value)
|
877 |
+
f.seek(position)
|
878 |
+
return f
|
879 |
+
|
880 |
+
class _itemgetter_helper(object):
|
881 |
+
def __init__(self):
|
882 |
+
self.items = []
|
883 |
+
def __getitem__(self, item):
|
884 |
+
self.items.append(item)
|
885 |
+
return
|
886 |
+
|
887 |
+
class _attrgetter_helper(object):
|
888 |
+
def __init__(self, attrs, index=None):
|
889 |
+
self.attrs = attrs
|
890 |
+
self.index = index
|
891 |
+
def __getattribute__(self, attr):
|
892 |
+
attrs = object.__getattribute__(self, "attrs")
|
893 |
+
index = object.__getattribute__(self, "index")
|
894 |
+
if index is None:
|
895 |
+
index = len(attrs)
|
896 |
+
attrs.append(attr)
|
897 |
+
else:
|
898 |
+
attrs[index] = ".".join([attrs[index], attr])
|
899 |
+
return type(self)(attrs, index)
|
900 |
+
|
901 |
+
class _dictproxy_helper(dict):
|
902 |
+
def __ror__(self, a):
|
903 |
+
return a
|
904 |
+
|
905 |
+
_dictproxy_helper_instance = _dictproxy_helper()
|
906 |
+
|
907 |
+
__d = {}
|
908 |
+
try:
|
909 |
+
# In CPython 3.9 and later, this trick can be used to exploit the
|
910 |
+
# implementation of the __or__ function of MappingProxyType to get the true
|
911 |
+
# mapping referenced by the proxy. It may work for other implementations,
|
912 |
+
# but is not guaranteed.
|
913 |
+
MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance)
|
914 |
+
except Exception:
|
915 |
+
MAPPING_PROXY_TRICK = False
|
916 |
+
del __d
|
917 |
+
|
918 |
+
# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill
|
919 |
+
# whose _create_cell functions do not have a default value.
|
920 |
+
# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls
|
921 |
+
# to _create_cell) once breaking changes are allowed.
|
922 |
+
_CELL_REF = None
|
923 |
+
_CELL_EMPTY = Sentinel('_CELL_EMPTY')
|
924 |
+
|
925 |
+
def _create_cell(contents=None):
|
926 |
+
if contents is not _CELL_EMPTY:
|
927 |
+
value = contents
|
928 |
+
return (lambda: value).__closure__[0]
|
929 |
+
|
930 |
+
def _create_weakref(obj, *args):
|
931 |
+
from weakref import ref
|
932 |
+
if obj is None: # it's dead
|
933 |
+
from collections import UserDict
|
934 |
+
return ref(UserDict(), *args)
|
935 |
+
return ref(obj, *args)
|
936 |
+
|
937 |
+
def _create_weakproxy(obj, callable=False, *args):
|
938 |
+
from weakref import proxy
|
939 |
+
if obj is None: # it's dead
|
940 |
+
if callable: return proxy(lambda x:x, *args)
|
941 |
+
from collections import UserDict
|
942 |
+
return proxy(UserDict(), *args)
|
943 |
+
return proxy(obj, *args)
|
944 |
+
|
945 |
+
def _eval_repr(repr_str):
|
946 |
+
return eval(repr_str)
|
947 |
+
|
948 |
+
def _create_array(f, args, state, npdict=None):
|
949 |
+
#array = numpy.core.multiarray._reconstruct(*args)
|
950 |
+
array = f(*args)
|
951 |
+
array.__setstate__(state)
|
952 |
+
if npdict is not None: # we also have saved state in __dict__
|
953 |
+
array.__dict__.update(npdict)
|
954 |
+
return array
|
955 |
+
|
956 |
+
def _create_dtypemeta(scalar_type):
|
957 |
+
if NumpyDType is True: __hook__() # a bit hacky I think
|
958 |
+
if scalar_type is None:
|
959 |
+
return NumpyDType
|
960 |
+
return type(NumpyDType(scalar_type))
|
961 |
+
|
962 |
+
def _create_namedtuple(name, fieldnames, modulename, defaults=None):
|
963 |
+
class_ = _import_module(modulename + '.' + name, safe=True)
|
964 |
+
if class_ is not None:
|
965 |
+
return class_
|
966 |
+
import collections
|
967 |
+
t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename)
|
968 |
+
return t
|
969 |
+
|
970 |
+
def _create_capsule(pointer, name, context, destructor):
|
971 |
+
attr_found = False
|
972 |
+
try:
|
973 |
+
# based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231
|
974 |
+
uname = name.decode('utf8')
|
975 |
+
for i in range(1, uname.count('.')+1):
|
976 |
+
names = uname.rsplit('.', i)
|
977 |
+
try:
|
978 |
+
module = __import__(names[0])
|
979 |
+
except ImportError:
|
980 |
+
pass
|
981 |
+
obj = module
|
982 |
+
for attr in names[1:]:
|
983 |
+
obj = getattr(obj, attr)
|
984 |
+
capsule = obj
|
985 |
+
attr_found = True
|
986 |
+
break
|
987 |
+
except Exception:
|
988 |
+
pass
|
989 |
+
|
990 |
+
if attr_found:
|
991 |
+
if _PyCapsule_IsValid(capsule, name):
|
992 |
+
return capsule
|
993 |
+
raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name))
|
994 |
+
else:
|
995 |
+
#warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning)
|
996 |
+
capsule = _PyCapsule_New(pointer, name, destructor)
|
997 |
+
_PyCapsule_SetContext(capsule, context)
|
998 |
+
return capsule
|
999 |
+
|
1000 |
+
def _getattr(objclass, name, repr_str):
|
1001 |
+
# hack to grab the reference directly
|
1002 |
+
try: #XXX: works only for __builtin__ ?
|
1003 |
+
attr = repr_str.split("'")[3]
|
1004 |
+
return eval(attr+'.__dict__["'+name+'"]')
|
1005 |
+
except Exception:
|
1006 |
+
try:
|
1007 |
+
attr = objclass.__dict__
|
1008 |
+
if type(attr) is DictProxyType:
|
1009 |
+
attr = attr[name]
|
1010 |
+
else:
|
1011 |
+
attr = getattr(objclass,name)
|
1012 |
+
except (AttributeError, KeyError):
|
1013 |
+
attr = getattr(objclass,name)
|
1014 |
+
return attr
|
1015 |
+
|
1016 |
+
def _get_attr(self, name):
|
1017 |
+
# stop recursive pickling
|
1018 |
+
return getattr(self, name, None) or getattr(__builtin__, name)
|
1019 |
+
|
1020 |
+
def _import_module(import_name, safe=False):
|
1021 |
+
try:
|
1022 |
+
if import_name.startswith('__runtime__.'):
|
1023 |
+
return sys.modules[import_name]
|
1024 |
+
elif '.' in import_name:
|
1025 |
+
items = import_name.split('.')
|
1026 |
+
module = '.'.join(items[:-1])
|
1027 |
+
obj = items[-1]
|
1028 |
+
submodule = getattr(__import__(module, None, None, [obj]), obj)
|
1029 |
+
if isinstance(submodule, (ModuleType, type)):
|
1030 |
+
return submodule
|
1031 |
+
return __import__(import_name, None, None, [obj])
|
1032 |
+
else:
|
1033 |
+
return __import__(import_name)
|
1034 |
+
except (ImportError, AttributeError, KeyError):
|
1035 |
+
if safe:
|
1036 |
+
return None
|
1037 |
+
raise
|
1038 |
+
|
1039 |
+
# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333
|
1040 |
+
def _getattribute(obj, name):
|
1041 |
+
for subpath in name.split('.'):
|
1042 |
+
if subpath == '<locals>':
|
1043 |
+
raise AttributeError("Can't get local attribute {!r} on {!r}"
|
1044 |
+
.format(name, obj))
|
1045 |
+
try:
|
1046 |
+
parent = obj
|
1047 |
+
obj = getattr(obj, subpath)
|
1048 |
+
except AttributeError:
|
1049 |
+
raise AttributeError("Can't get attribute {!r} on {!r}"
|
1050 |
+
.format(name, obj))
|
1051 |
+
return obj, parent
|
1052 |
+
|
1053 |
+
def _locate_function(obj, pickler=None):
|
1054 |
+
module_name = getattr(obj, '__module__', None)
|
1055 |
+
if module_name in ['__main__', None] or \
|
1056 |
+
pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__:
|
1057 |
+
return False
|
1058 |
+
if hasattr(obj, '__qualname__'):
|
1059 |
+
module = _import_module(module_name, safe=True)
|
1060 |
+
try:
|
1061 |
+
found, _ = _getattribute(module, obj.__qualname__)
|
1062 |
+
return found is obj
|
1063 |
+
except AttributeError:
|
1064 |
+
return False
|
1065 |
+
else:
|
1066 |
+
found = _import_module(module_name + '.' + obj.__name__, safe=True)
|
1067 |
+
return found is obj
|
1068 |
+
|
1069 |
+
|
1070 |
+
def _setitems(dest, source):
|
1071 |
+
for k, v in source.items():
|
1072 |
+
dest[k] = v
|
1073 |
+
|
1074 |
+
|
1075 |
+
def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None):
|
1076 |
+
if obj is Getattr.NO_DEFAULT:
|
1077 |
+
obj = Reduce(reduction) # pragma: no cover
|
1078 |
+
|
1079 |
+
if is_pickler_dill is None:
|
1080 |
+
is_pickler_dill = is_dill(pickler, child=True)
|
1081 |
+
if is_pickler_dill:
|
1082 |
+
# assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!'
|
1083 |
+
# if not hasattr(pickler, 'x'): pickler.x = 0
|
1084 |
+
# print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse)
|
1085 |
+
# pickler.x += 1
|
1086 |
+
if postproc_list is None:
|
1087 |
+
postproc_list = []
|
1088 |
+
|
1089 |
+
# Recursive object not supported. Default to a global instead.
|
1090 |
+
if id(obj) in pickler._postproc:
|
1091 |
+
name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else ''
|
1092 |
+
warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning)
|
1093 |
+
pickler.save_global(obj)
|
1094 |
+
return
|
1095 |
+
pickler._postproc[id(obj)] = postproc_list
|
1096 |
+
|
1097 |
+
# TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations
|
1098 |
+
pickler.save_reduce(*reduction, obj=obj)
|
1099 |
+
|
1100 |
+
if is_pickler_dill:
|
1101 |
+
# pickler.x -= 1
|
1102 |
+
# print(pickler.x*' ', 'pop', obj, id(obj))
|
1103 |
+
postproc = pickler._postproc.pop(id(obj))
|
1104 |
+
# assert postproc_list == postproc, 'Stack tampered!'
|
1105 |
+
for reduction in reversed(postproc):
|
1106 |
+
if reduction[0] is _setitems:
|
1107 |
+
# use the internal machinery of pickle.py to speedup when
|
1108 |
+
# updating a dictionary in postproc
|
1109 |
+
dest, source = reduction[1]
|
1110 |
+
if source:
|
1111 |
+
pickler.write(pickler.get(pickler.memo[id(dest)][0]))
|
1112 |
+
pickler._batch_setitems(iter(source.items()))
|
1113 |
+
else:
|
1114 |
+
# Updating with an empty dictionary. Same as doing nothing.
|
1115 |
+
continue
|
1116 |
+
else:
|
1117 |
+
pickler.save_reduce(*reduction)
|
1118 |
+
# pop None created by calling preprocessing step off stack
|
1119 |
+
pickler.write(POP)
|
1120 |
+
|
1121 |
+
#@register(CodeType)
|
1122 |
+
#def save_code(pickler, obj):
|
1123 |
+
# logger.trace(pickler, "Co: %s", obj)
|
1124 |
+
# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj)
|
1125 |
+
# logger.trace(pickler, "# Co")
|
1126 |
+
# return
|
1127 |
+
|
1128 |
+
# The following function is based on 'save_codeobject' from 'cloudpickle'
|
1129 |
+
# Copyright (c) 2012, Regents of the University of California.
|
1130 |
+
# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
|
1131 |
+
# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
|
1132 |
+
@register(CodeType)
|
1133 |
+
def save_code(pickler, obj):
|
1134 |
+
logger.trace(pickler, "Co: %s", obj)
|
1135 |
+
if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args)
|
1136 |
+
args = (
|
1137 |
+
obj.co_lnotab, # for < python 3.10 [not counted in args]
|
1138 |
+
obj.co_argcount, obj.co_posonlyargcount,
|
1139 |
+
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
|
1140 |
+
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
|
1141 |
+
obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
|
1142 |
+
obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable,
|
1143 |
+
obj.co_columntable, obj.co_exceptiontable, obj.co_freevars,
|
1144 |
+
obj.co_cellvars
|
1145 |
+
)
|
1146 |
+
elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args)
|
1147 |
+
with warnings.catch_warnings():
|
1148 |
+
if not OLD312a7: # issue 597
|
1149 |
+
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
1150 |
+
args = (
|
1151 |
+
obj.co_lnotab, # for < python 3.10 [not counted in args]
|
1152 |
+
obj.co_argcount, obj.co_posonlyargcount,
|
1153 |
+
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
|
1154 |
+
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
|
1155 |
+
obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
|
1156 |
+
obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable,
|
1157 |
+
obj.co_freevars, obj.co_cellvars
|
1158 |
+
)
|
1159 |
+
elif hasattr(obj, "co_linetable"): # python 3.10 (16 args)
|
1160 |
+
args = (
|
1161 |
+
obj.co_lnotab, # for < python 3.10 [not counted in args]
|
1162 |
+
obj.co_argcount, obj.co_posonlyargcount,
|
1163 |
+
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
|
1164 |
+
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
|
1165 |
+
obj.co_varnames, obj.co_filename, obj.co_name,
|
1166 |
+
obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
|
1167 |
+
obj.co_cellvars
|
1168 |
+
)
|
1169 |
+
elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args)
|
1170 |
+
args = (
|
1171 |
+
obj.co_argcount, obj.co_posonlyargcount,
|
1172 |
+
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
|
1173 |
+
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
|
1174 |
+
obj.co_varnames, obj.co_filename, obj.co_name,
|
1175 |
+
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
|
1176 |
+
obj.co_cellvars
|
1177 |
+
)
|
1178 |
+
else: # python 3.7 (15 args)
|
1179 |
+
args = (
|
1180 |
+
obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
|
1181 |
+
obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
|
1182 |
+
obj.co_names, obj.co_varnames, obj.co_filename,
|
1183 |
+
obj.co_name, obj.co_firstlineno, obj.co_lnotab,
|
1184 |
+
obj.co_freevars, obj.co_cellvars
|
1185 |
+
)
|
1186 |
+
|
1187 |
+
pickler.save_reduce(_create_code, args, obj=obj)
|
1188 |
+
logger.trace(pickler, "# Co")
|
1189 |
+
return
|
1190 |
+
|
1191 |
+
def _repr_dict(obj):
|
1192 |
+
"""Make a short string representation of a dictionary."""
|
1193 |
+
return "<%s object at %#012x>" % (type(obj).__name__, id(obj))
|
1194 |
+
|
1195 |
+
@register(dict)
|
1196 |
+
def save_module_dict(pickler, obj):
|
1197 |
+
if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \
|
1198 |
+
not (pickler._session and pickler._first_pass):
|
1199 |
+
logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj
|
1200 |
+
pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8'))
|
1201 |
+
logger.trace(pickler, "# D1")
|
1202 |
+
elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__):
|
1203 |
+
logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj
|
1204 |
+
pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general?
|
1205 |
+
logger.trace(pickler, "# D3")
|
1206 |
+
elif '__name__' in obj and obj != _main_module.__dict__ \
|
1207 |
+
and type(obj['__name__']) is str \
|
1208 |
+
and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None):
|
1209 |
+
logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj
|
1210 |
+
pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8'))
|
1211 |
+
logger.trace(pickler, "# D4")
|
1212 |
+
else:
|
1213 |
+
logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj
|
1214 |
+
if is_dill(pickler, child=False) and pickler._session:
|
1215 |
+
# we only care about session the first pass thru
|
1216 |
+
pickler._first_pass = False
|
1217 |
+
StockPickler.save_dict(pickler, obj)
|
1218 |
+
logger.trace(pickler, "# D2")
|
1219 |
+
return
|
1220 |
+
|
1221 |
+
|
1222 |
+
if not OLD310 and MAPPING_PROXY_TRICK:
|
1223 |
+
def save_dict_view(dicttype):
|
1224 |
+
def save_dict_view_for_function(func):
|
1225 |
+
def _save_dict_view(pickler, obj):
|
1226 |
+
logger.trace(pickler, "Dkvi: <%s>", obj)
|
1227 |
+
mapping = obj.mapping | _dictproxy_helper_instance
|
1228 |
+
pickler.save_reduce(func, (mapping,), obj=obj)
|
1229 |
+
logger.trace(pickler, "# Dkvi")
|
1230 |
+
return _save_dict_view
|
1231 |
+
return [
|
1232 |
+
(funcname, save_dict_view_for_function(getattr(dicttype, funcname)))
|
1233 |
+
for funcname in ('keys', 'values', 'items')
|
1234 |
+
]
|
1235 |
+
else:
|
1236 |
+
# The following functions are based on 'cloudpickle'
|
1237 |
+
# https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940
|
1238 |
+
# Copyright (c) 2012, Regents of the University of California.
|
1239 |
+
# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
|
1240 |
+
# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
|
1241 |
+
def save_dict_view(dicttype):
|
1242 |
+
def save_dict_keys(pickler, obj):
|
1243 |
+
logger.trace(pickler, "Dk: <%s>", obj)
|
1244 |
+
dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),))
|
1245 |
+
pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj)
|
1246 |
+
logger.trace(pickler, "# Dk")
|
1247 |
+
|
1248 |
+
def save_dict_values(pickler, obj):
|
1249 |
+
logger.trace(pickler, "Dv: <%s>", obj)
|
1250 |
+
dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),))
|
1251 |
+
pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj)
|
1252 |
+
logger.trace(pickler, "# Dv")
|
1253 |
+
|
1254 |
+
def save_dict_items(pickler, obj):
|
1255 |
+
logger.trace(pickler, "Di: <%s>", obj)
|
1256 |
+
pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj)
|
1257 |
+
logger.trace(pickler, "# Di")
|
1258 |
+
|
1259 |
+
return (
|
1260 |
+
('keys', save_dict_keys),
|
1261 |
+
('values', save_dict_values),
|
1262 |
+
('items', save_dict_items)
|
1263 |
+
)
|
1264 |
+
|
1265 |
+
for __dicttype in (
|
1266 |
+
dict,
|
1267 |
+
OrderedDict
|
1268 |
+
):
|
1269 |
+
__obj = __dicttype()
|
1270 |
+
for __funcname, __savefunc in save_dict_view(__dicttype):
|
1271 |
+
__tview = type(getattr(__obj, __funcname)())
|
1272 |
+
if __tview not in Pickler.dispatch:
|
1273 |
+
Pickler.dispatch[__tview] = __savefunc
|
1274 |
+
del __dicttype, __obj, __funcname, __tview, __savefunc
|
1275 |
+
|
1276 |
+
|
1277 |
+
@register(ClassType)
|
1278 |
+
def save_classobj(pickler, obj): #FIXME: enable pickler._byref
|
1279 |
+
if not _locate_function(obj, pickler):
|
1280 |
+
logger.trace(pickler, "C1: %s", obj)
|
1281 |
+
pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
|
1282 |
+
obj.__dict__), obj=obj)
|
1283 |
+
#XXX: or obj.__dict__.copy()), obj=obj) ?
|
1284 |
+
logger.trace(pickler, "# C1")
|
1285 |
+
else:
|
1286 |
+
logger.trace(pickler, "C2: %s", obj)
|
1287 |
+
name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
|
1288 |
+
StockPickler.save_global(pickler, obj, name=name)
|
1289 |
+
logger.trace(pickler, "# C2")
|
1290 |
+
return
|
1291 |
+
|
1292 |
+
@register(typing._GenericAlias)
|
1293 |
+
def save_generic_alias(pickler, obj):
|
1294 |
+
args = obj.__args__
|
1295 |
+
if type(obj.__reduce__()) is str:
|
1296 |
+
logger.trace(pickler, "Ga0: %s", obj)
|
1297 |
+
StockPickler.save_global(pickler, obj, name=obj.__reduce__())
|
1298 |
+
logger.trace(pickler, "# Ga0")
|
1299 |
+
elif obj.__origin__ is tuple and (not args or args == ((),)):
|
1300 |
+
logger.trace(pickler, "Ga1: %s", obj)
|
1301 |
+
pickler.save_reduce(_create_typing_tuple, (args,), obj=obj)
|
1302 |
+
logger.trace(pickler, "# Ga1")
|
1303 |
+
else:
|
1304 |
+
logger.trace(pickler, "Ga2: %s", obj)
|
1305 |
+
StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj)
|
1306 |
+
logger.trace(pickler, "# Ga2")
|
1307 |
+
return
|
1308 |
+
|
1309 |
+
@register(LockType)
|
1310 |
+
def save_lock(pickler, obj):
|
1311 |
+
logger.trace(pickler, "Lo: %s", obj)
|
1312 |
+
pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj)
|
1313 |
+
logger.trace(pickler, "# Lo")
|
1314 |
+
return
|
1315 |
+
|
1316 |
+
@register(RLockType)
|
1317 |
+
def save_rlock(pickler, obj):
|
1318 |
+
logger.trace(pickler, "RL: %s", obj)
|
1319 |
+
r = obj.__repr__() # don't use _release_save as it unlocks the lock
|
1320 |
+
count = int(r.split('count=')[1].split()[0].rstrip('>'))
|
1321 |
+
owner = int(r.split('owner=')[1].split()[0])
|
1322 |
+
pickler.save_reduce(_create_rlock, (count,owner,), obj=obj)
|
1323 |
+
logger.trace(pickler, "# RL")
|
1324 |
+
return
|
1325 |
+
|
1326 |
+
#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL
|
1327 |
+
def save_socket(pickler, obj):
|
1328 |
+
logger.trace(pickler, "So: %s", obj)
|
1329 |
+
pickler.save_reduce(*reduce_socket(obj))
|
1330 |
+
logger.trace(pickler, "# So")
|
1331 |
+
return
|
1332 |
+
|
1333 |
+
def _save_file(pickler, obj, open_):
|
1334 |
+
if obj.closed:
|
1335 |
+
position = 0
|
1336 |
+
else:
|
1337 |
+
obj.flush()
|
1338 |
+
if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__):
|
1339 |
+
position = -1
|
1340 |
+
else:
|
1341 |
+
position = obj.tell()
|
1342 |
+
if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE:
|
1343 |
+
f = open_(obj.name, "r")
|
1344 |
+
fdata = f.read()
|
1345 |
+
f.close()
|
1346 |
+
else:
|
1347 |
+
fdata = ""
|
1348 |
+
if is_dill(pickler, child=True):
|
1349 |
+
strictio = pickler._strictio
|
1350 |
+
fmode = pickler._fmode
|
1351 |
+
else:
|
1352 |
+
strictio = False
|
1353 |
+
fmode = 0 # HANDLE_FMODE
|
1354 |
+
pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position,
|
1355 |
+
obj.closed, open_, strictio,
|
1356 |
+
fmode, fdata), obj=obj)
|
1357 |
+
return
|
1358 |
+
|
1359 |
+
|
1360 |
+
@register(FileType) #XXX: in 3.x has buffer=0, needs different _create?
|
1361 |
+
@register(BufferedReaderType)
|
1362 |
+
@register(BufferedWriterType)
|
1363 |
+
@register(TextWrapperType)
|
1364 |
+
def save_file(pickler, obj):
|
1365 |
+
logger.trace(pickler, "Fi: %s", obj)
|
1366 |
+
f = _save_file(pickler, obj, open)
|
1367 |
+
logger.trace(pickler, "# Fi")
|
1368 |
+
return f
|
1369 |
+
|
1370 |
+
if BufferedRandomType:
|
1371 |
+
@register(BufferedRandomType)
|
1372 |
+
def save_file(pickler, obj):
|
1373 |
+
logger.trace(pickler, "Fi: %s", obj)
|
1374 |
+
f = _save_file(pickler, obj, open)
|
1375 |
+
logger.trace(pickler, "# Fi")
|
1376 |
+
return f
|
1377 |
+
|
1378 |
+
if PyTextWrapperType:
|
1379 |
+
@register(PyBufferedReaderType)
|
1380 |
+
@register(PyBufferedWriterType)
|
1381 |
+
@register(PyTextWrapperType)
|
1382 |
+
def save_file(pickler, obj):
|
1383 |
+
logger.trace(pickler, "Fi: %s", obj)
|
1384 |
+
f = _save_file(pickler, obj, _open)
|
1385 |
+
logger.trace(pickler, "# Fi")
|
1386 |
+
return f
|
1387 |
+
|
1388 |
+
if PyBufferedRandomType:
|
1389 |
+
@register(PyBufferedRandomType)
|
1390 |
+
def save_file(pickler, obj):
|
1391 |
+
logger.trace(pickler, "Fi: %s", obj)
|
1392 |
+
f = _save_file(pickler, obj, _open)
|
1393 |
+
logger.trace(pickler, "# Fi")
|
1394 |
+
return f
|
1395 |
+
|
1396 |
+
|
1397 |
+
# The following two functions are based on 'saveCStringIoInput'
|
1398 |
+
# and 'saveCStringIoOutput' from spickle
|
1399 |
+
# Copyright (c) 2011 by science+computing ag
|
1400 |
+
# License: http://www.apache.org/licenses/LICENSE-2.0
|
1401 |
+
if InputType:
|
1402 |
+
@register(InputType)
|
1403 |
+
def save_stringi(pickler, obj):
|
1404 |
+
logger.trace(pickler, "Io: %s", obj)
|
1405 |
+
if obj.closed:
|
1406 |
+
value = ''; position = 0
|
1407 |
+
else:
|
1408 |
+
value = obj.getvalue(); position = obj.tell()
|
1409 |
+
pickler.save_reduce(_create_stringi, (value, position, \
|
1410 |
+
obj.closed), obj=obj)
|
1411 |
+
logger.trace(pickler, "# Io")
|
1412 |
+
return
|
1413 |
+
|
1414 |
+
@register(OutputType)
|
1415 |
+
def save_stringo(pickler, obj):
|
1416 |
+
logger.trace(pickler, "Io: %s", obj)
|
1417 |
+
if obj.closed:
|
1418 |
+
value = ''; position = 0
|
1419 |
+
else:
|
1420 |
+
value = obj.getvalue(); position = obj.tell()
|
1421 |
+
pickler.save_reduce(_create_stringo, (value, position, \
|
1422 |
+
obj.closed), obj=obj)
|
1423 |
+
logger.trace(pickler, "# Io")
|
1424 |
+
return
|
1425 |
+
|
1426 |
+
if LRUCacheType is not None:
|
1427 |
+
from functools import lru_cache
|
1428 |
+
@register(LRUCacheType)
|
1429 |
+
def save_lru_cache(pickler, obj):
|
1430 |
+
logger.trace(pickler, "LRU: %s", obj)
|
1431 |
+
if OLD39:
|
1432 |
+
kwargs = obj.cache_info()
|
1433 |
+
args = (kwargs.maxsize,)
|
1434 |
+
else:
|
1435 |
+
kwargs = obj.cache_parameters()
|
1436 |
+
args = (kwargs['maxsize'], kwargs['typed'])
|
1437 |
+
if args != lru_cache.__defaults__:
|
1438 |
+
wrapper = Reduce(lru_cache, args, is_callable=True)
|
1439 |
+
else:
|
1440 |
+
wrapper = lru_cache
|
1441 |
+
pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj)
|
1442 |
+
logger.trace(pickler, "# LRU")
|
1443 |
+
return
|
1444 |
+
|
1445 |
+
@register(SuperType)
|
1446 |
+
def save_super(pickler, obj):
|
1447 |
+
logger.trace(pickler, "Su: %s", obj)
|
1448 |
+
pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj)
|
1449 |
+
logger.trace(pickler, "# Su")
|
1450 |
+
return
|
1451 |
+
|
1452 |
+
if IS_PYPY:
|
1453 |
+
@register(MethodType)
|
1454 |
+
def save_instancemethod0(pickler, obj):
|
1455 |
+
code = getattr(obj.__func__, '__code__', None)
|
1456 |
+
if code is not None and type(code) is not CodeType \
|
1457 |
+
and getattr(obj.__self__, obj.__name__) == obj:
|
1458 |
+
# Some PyPy builtin functions have no module name
|
1459 |
+
logger.trace(pickler, "Me2: %s", obj)
|
1460 |
+
# TODO: verify that this works for all PyPy builtin methods
|
1461 |
+
pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
|
1462 |
+
logger.trace(pickler, "# Me2")
|
1463 |
+
return
|
1464 |
+
|
1465 |
+
logger.trace(pickler, "Me1: %s", obj)
|
1466 |
+
pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
|
1467 |
+
logger.trace(pickler, "# Me1")
|
1468 |
+
return
|
1469 |
+
else:
|
1470 |
+
@register(MethodType)
|
1471 |
+
def save_instancemethod0(pickler, obj):
|
1472 |
+
logger.trace(pickler, "Me1: %s", obj)
|
1473 |
+
pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
|
1474 |
+
logger.trace(pickler, "# Me1")
|
1475 |
+
return
|
1476 |
+
|
1477 |
+
if not IS_PYPY:
|
1478 |
+
@register(MemberDescriptorType)
|
1479 |
+
@register(GetSetDescriptorType)
|
1480 |
+
@register(MethodDescriptorType)
|
1481 |
+
@register(WrapperDescriptorType)
|
1482 |
+
@register(ClassMethodDescriptorType)
|
1483 |
+
def save_wrapper_descriptor(pickler, obj):
|
1484 |
+
logger.trace(pickler, "Wr: %s", obj)
|
1485 |
+
pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
|
1486 |
+
obj.__repr__()), obj=obj)
|
1487 |
+
logger.trace(pickler, "# Wr")
|
1488 |
+
return
|
1489 |
+
else:
|
1490 |
+
@register(MemberDescriptorType)
|
1491 |
+
@register(GetSetDescriptorType)
|
1492 |
+
def save_wrapper_descriptor(pickler, obj):
|
1493 |
+
logger.trace(pickler, "Wr: %s", obj)
|
1494 |
+
pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
|
1495 |
+
obj.__repr__()), obj=obj)
|
1496 |
+
logger.trace(pickler, "# Wr")
|
1497 |
+
return
|
1498 |
+
|
1499 |
+
@register(CellType)
|
1500 |
+
def save_cell(pickler, obj):
|
1501 |
+
try:
|
1502 |
+
f = obj.cell_contents
|
1503 |
+
except ValueError: # cell is empty
|
1504 |
+
logger.trace(pickler, "Ce3: %s", obj)
|
1505 |
+
# _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7.
|
1506 |
+
# It unpickles to a sentinel object _dill._CELL_EMPTY, also created in
|
1507 |
+
# _shims.py. This object is not present in Python 3 because the cell's
|
1508 |
+
# contents can be deleted in newer versions of Python. The reduce object
|
1509 |
+
# will instead unpickle to None if unpickled in Python 3.
|
1510 |
+
|
1511 |
+
# When breaking changes are made to dill, (_shims._CELL_EMPTY,) can
|
1512 |
+
# be replaced by () OR the delattr function can be removed repending on
|
1513 |
+
# whichever is more convienient.
|
1514 |
+
pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj)
|
1515 |
+
# Call the function _delattr on the cell's cell_contents attribute
|
1516 |
+
# The result of this function call will be None
|
1517 |
+
pickler.save_reduce(_shims._delattr, (obj, 'cell_contents'))
|
1518 |
+
# pop None created by calling _delattr off stack
|
1519 |
+
pickler.write(POP)
|
1520 |
+
logger.trace(pickler, "# Ce3")
|
1521 |
+
return
|
1522 |
+
if is_dill(pickler, child=True):
|
1523 |
+
if id(f) in pickler._postproc:
|
1524 |
+
# Already seen. Add to its postprocessing.
|
1525 |
+
postproc = pickler._postproc[id(f)]
|
1526 |
+
else:
|
1527 |
+
# Haven't seen it. Add to the highest possible object and set its
|
1528 |
+
# value as late as possible to prevent cycle.
|
1529 |
+
postproc = next(iter(pickler._postproc.values()), None)
|
1530 |
+
if postproc is not None:
|
1531 |
+
logger.trace(pickler, "Ce2: %s", obj)
|
1532 |
+
# _CELL_REF is defined in _shims.py to support older versions of
|
1533 |
+
# dill. When breaking changes are made to dill, (_CELL_REF,) can
|
1534 |
+
# be replaced by ()
|
1535 |
+
pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj)
|
1536 |
+
postproc.append((_shims._setattr, (obj, 'cell_contents', f)))
|
1537 |
+
logger.trace(pickler, "# Ce2")
|
1538 |
+
return
|
1539 |
+
logger.trace(pickler, "Ce1: %s", obj)
|
1540 |
+
pickler.save_reduce(_create_cell, (f,), obj=obj)
|
1541 |
+
logger.trace(pickler, "# Ce1")
|
1542 |
+
return
|
1543 |
+
|
1544 |
+
if MAPPING_PROXY_TRICK:
|
1545 |
+
@register(DictProxyType)
|
1546 |
+
def save_dictproxy(pickler, obj):
|
1547 |
+
logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
|
1548 |
+
mapping = obj | _dictproxy_helper_instance
|
1549 |
+
pickler.save_reduce(DictProxyType, (mapping,), obj=obj)
|
1550 |
+
logger.trace(pickler, "# Mp")
|
1551 |
+
return
|
1552 |
+
else:
|
1553 |
+
@register(DictProxyType)
|
1554 |
+
def save_dictproxy(pickler, obj):
|
1555 |
+
logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
|
1556 |
+
pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj)
|
1557 |
+
logger.trace(pickler, "# Mp")
|
1558 |
+
return
|
1559 |
+
|
1560 |
+
@register(SliceType)
|
1561 |
+
def save_slice(pickler, obj):
|
1562 |
+
logger.trace(pickler, "Sl: %s", obj)
|
1563 |
+
pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj)
|
1564 |
+
logger.trace(pickler, "# Sl")
|
1565 |
+
return
|
1566 |
+
|
1567 |
+
@register(XRangeType)
|
1568 |
+
@register(EllipsisType)
|
1569 |
+
@register(NotImplementedType)
|
1570 |
+
def save_singleton(pickler, obj):
|
1571 |
+
logger.trace(pickler, "Si: %s", obj)
|
1572 |
+
pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj)
|
1573 |
+
logger.trace(pickler, "# Si")
|
1574 |
+
return
|
1575 |
+
|
1576 |
+
def _proxy_helper(obj): # a dead proxy returns a reference to None
|
1577 |
+
"""get memory address of proxy's reference object"""
|
1578 |
+
_repr = repr(obj)
|
1579 |
+
try: _str = str(obj)
|
1580 |
+
except ReferenceError: # it's a dead proxy
|
1581 |
+
return id(None)
|
1582 |
+
if _str == _repr: return id(obj) # it's a repr
|
1583 |
+
try: # either way, it's a proxy from here
|
1584 |
+
address = int(_str.rstrip('>').split(' at ')[-1], base=16)
|
1585 |
+
except ValueError: # special case: proxy of a 'type'
|
1586 |
+
if not IS_PYPY:
|
1587 |
+
address = int(_repr.rstrip('>').split(' at ')[-1], base=16)
|
1588 |
+
else:
|
1589 |
+
objects = iter(gc.get_objects())
|
1590 |
+
for _obj in objects:
|
1591 |
+
if repr(_obj) == _str: return id(_obj)
|
1592 |
+
# all bad below... nothing found so throw ReferenceError
|
1593 |
+
msg = "Cannot reference object for proxy at '%s'" % id(obj)
|
1594 |
+
raise ReferenceError(msg)
|
1595 |
+
return address
|
1596 |
+
|
1597 |
+
def _locate_object(address, module=None):
|
1598 |
+
"""get object located at the given memory address (inverse of id(obj))"""
|
1599 |
+
special = [None, True, False] #XXX: more...?
|
1600 |
+
for obj in special:
|
1601 |
+
if address == id(obj): return obj
|
1602 |
+
if module:
|
1603 |
+
objects = iter(module.__dict__.values())
|
1604 |
+
else: objects = iter(gc.get_objects())
|
1605 |
+
for obj in objects:
|
1606 |
+
if address == id(obj): return obj
|
1607 |
+
# all bad below... nothing found so throw ReferenceError or TypeError
|
1608 |
+
try: address = hex(address)
|
1609 |
+
except TypeError:
|
1610 |
+
raise TypeError("'%s' is not a valid memory address" % str(address))
|
1611 |
+
raise ReferenceError("Cannot reference object at '%s'" % address)
|
1612 |
+
|
1613 |
+
@register(ReferenceType)
|
1614 |
+
def save_weakref(pickler, obj):
|
1615 |
+
refobj = obj()
|
1616 |
+
logger.trace(pickler, "R1: %s", obj)
|
1617 |
+
#refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None"
|
1618 |
+
pickler.save_reduce(_create_weakref, (refobj,), obj=obj)
|
1619 |
+
logger.trace(pickler, "# R1")
|
1620 |
+
return
|
1621 |
+
|
1622 |
+
@register(ProxyType)
|
1623 |
+
@register(CallableProxyType)
|
1624 |
+
def save_weakproxy(pickler, obj):
|
1625 |
+
# Must do string substitution here and use %r to avoid ReferenceError.
|
1626 |
+
logger.trace(pickler, "R2: %r" % obj)
|
1627 |
+
refobj = _locate_object(_proxy_helper(obj))
|
1628 |
+
pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj)
|
1629 |
+
logger.trace(pickler, "# R2")
|
1630 |
+
return
|
1631 |
+
|
1632 |
+
def _is_builtin_module(module):
|
1633 |
+
if not hasattr(module, "__file__"): return True
|
1634 |
+
if module.__file__ is None: return False
|
1635 |
+
# If a module file name starts with prefix, it should be a builtin
|
1636 |
+
# module, so should always be pickled as a reference.
|
1637 |
+
names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"]
|
1638 |
+
rp = os.path.realpath
|
1639 |
+
# See https://github.com/uqfoundation/dill/issues/566
|
1640 |
+
return (
|
1641 |
+
any(
|
1642 |
+
module.__file__.startswith(getattr(sys, name))
|
1643 |
+
or rp(module.__file__).startswith(rp(getattr(sys, name)))
|
1644 |
+
for name in names
|
1645 |
+
if hasattr(sys, name)
|
1646 |
+
)
|
1647 |
+
or module.__file__.endswith(EXTENSION_SUFFIXES)
|
1648 |
+
or 'site-packages' in module.__file__
|
1649 |
+
)
|
1650 |
+
|
1651 |
+
def _is_imported_module(module):
|
1652 |
+
return getattr(module, '__loader__', None) is not None or module in sys.modules.values()
|
1653 |
+
|
1654 |
+
@register(ModuleType)
|
1655 |
+
def save_module(pickler, obj):
|
1656 |
+
if False: #_use_diff:
|
1657 |
+
if obj.__name__.split('.', 1)[0] != "dill":
|
1658 |
+
try:
|
1659 |
+
changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0]
|
1660 |
+
except RuntimeError: # not memorised module, probably part of dill
|
1661 |
+
pass
|
1662 |
+
else:
|
1663 |
+
logger.trace(pickler, "M2: %s with diff", obj)
|
1664 |
+
logger.info("Diff: %s", changed.keys())
|
1665 |
+
pickler.save_reduce(_import_module, (obj.__name__,), obj=obj,
|
1666 |
+
state=changed)
|
1667 |
+
logger.trace(pickler, "# M2")
|
1668 |
+
return
|
1669 |
+
|
1670 |
+
logger.trace(pickler, "M1: %s", obj)
|
1671 |
+
pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
|
1672 |
+
logger.trace(pickler, "# M1")
|
1673 |
+
else:
|
1674 |
+
builtin_mod = _is_builtin_module(obj)
|
1675 |
+
is_session_main = is_dill(pickler, child=True) and obj is pickler._main
|
1676 |
+
if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod
|
1677 |
+
or is_session_main):
|
1678 |
+
logger.trace(pickler, "M1: %s", obj)
|
1679 |
+
# Hack for handling module-type objects in load_module().
|
1680 |
+
mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__
|
1681 |
+
# Second references are saved as __builtin__.__main__ in save_module_dict().
|
1682 |
+
main_dict = obj.__dict__.copy()
|
1683 |
+
for item in ('__builtins__', '__loader__'):
|
1684 |
+
main_dict.pop(item, None)
|
1685 |
+
for item in IPYTHON_SINGLETONS: #pragma: no cover
|
1686 |
+
if getattr(main_dict.get(item), '__module__', '').startswith('IPython'):
|
1687 |
+
del main_dict[item]
|
1688 |
+
pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict)
|
1689 |
+
logger.trace(pickler, "# M1")
|
1690 |
+
elif obj.__name__ == "dill._dill":
|
1691 |
+
logger.trace(pickler, "M2: %s", obj)
|
1692 |
+
pickler.save_global(obj, name="_dill")
|
1693 |
+
logger.trace(pickler, "# M2")
|
1694 |
+
else:
|
1695 |
+
logger.trace(pickler, "M2: %s", obj)
|
1696 |
+
pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
|
1697 |
+
logger.trace(pickler, "# M2")
|
1698 |
+
return
|
1699 |
+
|
1700 |
+
# The following function is based on '_extract_class_dict' from 'cloudpickle'
|
1701 |
+
# Copyright (c) 2012, Regents of the University of California.
|
1702 |
+
# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
|
1703 |
+
# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
|
1704 |
+
def _get_typedict_type(cls, clsdict, attrs, postproc_list):
|
1705 |
+
"""Retrieve a copy of the dict of a class without the inherited methods"""
|
1706 |
+
if len(cls.__bases__) == 1:
|
1707 |
+
inherited_dict = cls.__bases__[0].__dict__
|
1708 |
+
else:
|
1709 |
+
inherited_dict = {}
|
1710 |
+
for base in reversed(cls.__bases__):
|
1711 |
+
inherited_dict.update(base.__dict__)
|
1712 |
+
to_remove = []
|
1713 |
+
for name, value in dict.items(clsdict):
|
1714 |
+
try:
|
1715 |
+
base_value = inherited_dict[name]
|
1716 |
+
if value is base_value and hasattr(value, '__qualname__'):
|
1717 |
+
to_remove.append(name)
|
1718 |
+
except KeyError:
|
1719 |
+
pass
|
1720 |
+
for name in to_remove:
|
1721 |
+
dict.pop(clsdict, name)
|
1722 |
+
|
1723 |
+
if issubclass(type(cls), type):
|
1724 |
+
clsdict.pop('__dict__', None)
|
1725 |
+
clsdict.pop('__weakref__', None)
|
1726 |
+
# clsdict.pop('__prepare__', None)
|
1727 |
+
return clsdict, attrs
|
1728 |
+
|
1729 |
+
def _get_typedict_abc(obj, _dict, attrs, postproc_list):
|
1730 |
+
if hasattr(abc, '_get_dump'):
|
1731 |
+
(registry, _, _, _) = abc._get_dump(obj)
|
1732 |
+
register = obj.register
|
1733 |
+
postproc_list.extend((register, (reg(),)) for reg in registry)
|
1734 |
+
elif hasattr(obj, '_abc_registry'):
|
1735 |
+
registry = obj._abc_registry
|
1736 |
+
register = obj.register
|
1737 |
+
postproc_list.extend((register, (reg,)) for reg in registry)
|
1738 |
+
else:
|
1739 |
+
raise PicklingError("Cannot find registry of ABC %s", obj)
|
1740 |
+
|
1741 |
+
if '_abc_registry' in _dict:
|
1742 |
+
_dict.pop('_abc_registry', None)
|
1743 |
+
_dict.pop('_abc_cache', None)
|
1744 |
+
_dict.pop('_abc_negative_cache', None)
|
1745 |
+
# _dict.pop('_abc_negative_cache_version', None)
|
1746 |
+
else:
|
1747 |
+
_dict.pop('_abc_impl', None)
|
1748 |
+
return _dict, attrs
|
1749 |
+
|
1750 |
+
@register(TypeType)
|
1751 |
+
def save_type(pickler, obj, postproc_list=None):
|
1752 |
+
if obj in _typemap:
|
1753 |
+
logger.trace(pickler, "T1: %s", obj)
|
1754 |
+
# if obj in _incedental_types:
|
1755 |
+
# warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning)
|
1756 |
+
pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
|
1757 |
+
logger.trace(pickler, "# T1")
|
1758 |
+
elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]):
|
1759 |
+
# special case: namedtuples
|
1760 |
+
logger.trace(pickler, "T6: %s", obj)
|
1761 |
+
|
1762 |
+
obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
|
1763 |
+
if obj.__name__ != obj_name:
|
1764 |
+
if postproc_list is None:
|
1765 |
+
postproc_list = []
|
1766 |
+
postproc_list.append((setattr, (obj, '__qualname__', obj_name)))
|
1767 |
+
|
1768 |
+
if not obj._field_defaults:
|
1769 |
+
_save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list)
|
1770 |
+
else:
|
1771 |
+
defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults]
|
1772 |
+
_save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list)
|
1773 |
+
logger.trace(pickler, "# T6")
|
1774 |
+
return
|
1775 |
+
|
1776 |
+
# special cases: NoneType, NotImplementedType, EllipsisType, EnumMeta
|
1777 |
+
elif obj is type(None):
|
1778 |
+
logger.trace(pickler, "T7: %s", obj)
|
1779 |
+
#XXX: pickler.save_reduce(type, (None,), obj=obj)
|
1780 |
+
pickler.write(GLOBAL + b'__builtin__\nNoneType\n')
|
1781 |
+
logger.trace(pickler, "# T7")
|
1782 |
+
elif obj is NotImplementedType:
|
1783 |
+
logger.trace(pickler, "T7: %s", obj)
|
1784 |
+
pickler.save_reduce(type, (NotImplemented,), obj=obj)
|
1785 |
+
logger.trace(pickler, "# T7")
|
1786 |
+
elif obj is EllipsisType:
|
1787 |
+
logger.trace(pickler, "T7: %s", obj)
|
1788 |
+
pickler.save_reduce(type, (Ellipsis,), obj=obj)
|
1789 |
+
logger.trace(pickler, "# T7")
|
1790 |
+
elif obj is EnumMeta:
|
1791 |
+
logger.trace(pickler, "T7: %s", obj)
|
1792 |
+
pickler.write(GLOBAL + b'enum\nEnumMeta\n')
|
1793 |
+
logger.trace(pickler, "# T7")
|
1794 |
+
|
1795 |
+
else:
|
1796 |
+
_byref = getattr(pickler, '_byref', None)
|
1797 |
+
obj_recursive = id(obj) in getattr(pickler, '_postproc', ())
|
1798 |
+
incorrectly_named = not _locate_function(obj, pickler)
|
1799 |
+
if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over
|
1800 |
+
if postproc_list is None:
|
1801 |
+
postproc_list = []
|
1802 |
+
|
1803 |
+
# thanks to Tom Stepleton pointing out pickler._session unneeded
|
1804 |
+
logger.trace(pickler, "T2: %s", obj)
|
1805 |
+
_dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict
|
1806 |
+
|
1807 |
+
#print (_dict)
|
1808 |
+
#print ("%s\n%s" % (type(obj), obj.__name__))
|
1809 |
+
#print ("%s\n%s" % (obj.__bases__, obj.__dict__))
|
1810 |
+
slots = _dict.get('__slots__', ())
|
1811 |
+
if type(slots) == str:
|
1812 |
+
# __slots__ accepts a single string
|
1813 |
+
slots = (slots,)
|
1814 |
+
|
1815 |
+
for name in slots:
|
1816 |
+
_dict.pop(name, None)
|
1817 |
+
|
1818 |
+
if isinstance(obj, abc.ABCMeta):
|
1819 |
+
logger.trace(pickler, "ABC: %s", obj)
|
1820 |
+
_dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list)
|
1821 |
+
logger.trace(pickler, "# ABC")
|
1822 |
+
|
1823 |
+
qualname = getattr(obj, '__qualname__', None)
|
1824 |
+
if attrs is not None:
|
1825 |
+
for k, v in attrs.items():
|
1826 |
+
postproc_list.append((setattr, (obj, k, v)))
|
1827 |
+
# TODO: Consider using the state argument to save_reduce?
|
1828 |
+
if qualname is not None:
|
1829 |
+
postproc_list.append((setattr, (obj, '__qualname__', qualname)))
|
1830 |
+
|
1831 |
+
if not hasattr(obj, '__orig_bases__'):
|
1832 |
+
_save_with_postproc(pickler, (_create_type, (
|
1833 |
+
type(obj), obj.__name__, obj.__bases__, _dict
|
1834 |
+
)), obj=obj, postproc_list=postproc_list)
|
1835 |
+
else:
|
1836 |
+
# This case will always work, but might be overkill.
|
1837 |
+
_metadict = {
|
1838 |
+
'metaclass': type(obj)
|
1839 |
+
}
|
1840 |
+
|
1841 |
+
if _dict:
|
1842 |
+
_dict_update = PartialType(_setitems, source=_dict)
|
1843 |
+
else:
|
1844 |
+
_dict_update = None
|
1845 |
+
|
1846 |
+
_save_with_postproc(pickler, (new_class, (
|
1847 |
+
obj.__name__, obj.__orig_bases__, _metadict, _dict_update
|
1848 |
+
)), obj=obj, postproc_list=postproc_list)
|
1849 |
+
logger.trace(pickler, "# T2")
|
1850 |
+
else:
|
1851 |
+
obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
|
1852 |
+
logger.trace(pickler, "T4: %s", obj)
|
1853 |
+
if incorrectly_named:
|
1854 |
+
warnings.warn(
|
1855 |
+
"Cannot locate reference to %r." % (obj,),
|
1856 |
+
PicklingWarning,
|
1857 |
+
stacklevel=3,
|
1858 |
+
)
|
1859 |
+
if obj_recursive:
|
1860 |
+
warnings.warn(
|
1861 |
+
"Cannot pickle %r: %s.%s has recursive self-references that "
|
1862 |
+
"trigger a RecursionError." % (obj, obj.__module__, obj_name),
|
1863 |
+
PicklingWarning,
|
1864 |
+
stacklevel=3,
|
1865 |
+
)
|
1866 |
+
#print (obj.__dict__)
|
1867 |
+
#print ("%s\n%s" % (type(obj), obj.__name__))
|
1868 |
+
#print ("%s\n%s" % (obj.__bases__, obj.__dict__))
|
1869 |
+
StockPickler.save_global(pickler, obj, name=obj_name)
|
1870 |
+
logger.trace(pickler, "# T4")
|
1871 |
+
return
|
1872 |
+
|
1873 |
+
@register(property)
|
1874 |
+
@register(abc.abstractproperty)
|
1875 |
+
def save_property(pickler, obj):
|
1876 |
+
logger.trace(pickler, "Pr: %s", obj)
|
1877 |
+
pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__),
|
1878 |
+
obj=obj)
|
1879 |
+
logger.trace(pickler, "# Pr")
|
1880 |
+
|
1881 |
+
@register(staticmethod)
|
1882 |
+
@register(classmethod)
|
1883 |
+
@register(abc.abstractstaticmethod)
|
1884 |
+
@register(abc.abstractclassmethod)
|
1885 |
+
def save_classmethod(pickler, obj):
|
1886 |
+
logger.trace(pickler, "Cm: %s", obj)
|
1887 |
+
orig_func = obj.__func__
|
1888 |
+
|
1889 |
+
# if type(obj.__dict__) is dict:
|
1890 |
+
# if obj.__dict__:
|
1891 |
+
# state = obj.__dict__
|
1892 |
+
# else:
|
1893 |
+
# state = None
|
1894 |
+
# else:
|
1895 |
+
# state = (None, {'__dict__', obj.__dict__})
|
1896 |
+
|
1897 |
+
pickler.save_reduce(type(obj), (orig_func,), obj=obj)
|
1898 |
+
logger.trace(pickler, "# Cm")
|
1899 |
+
|
1900 |
+
@register(FunctionType)
|
1901 |
+
def save_function(pickler, obj):
|
1902 |
+
if not _locate_function(obj, pickler):
|
1903 |
+
if type(obj.__code__) is not CodeType:
|
1904 |
+
# Some PyPy builtin functions have no module name, and thus are not
|
1905 |
+
# able to be located
|
1906 |
+
module_name = getattr(obj, '__module__', None)
|
1907 |
+
if module_name is None:
|
1908 |
+
module_name = __builtin__.__name__
|
1909 |
+
module = _import_module(module_name, safe=True)
|
1910 |
+
_pypy_builtin = False
|
1911 |
+
try:
|
1912 |
+
found, _ = _getattribute(module, obj.__qualname__)
|
1913 |
+
if getattr(found, '__func__', None) is obj:
|
1914 |
+
_pypy_builtin = True
|
1915 |
+
except AttributeError:
|
1916 |
+
pass
|
1917 |
+
|
1918 |
+
if _pypy_builtin:
|
1919 |
+
logger.trace(pickler, "F3: %s", obj)
|
1920 |
+
pickler.save_reduce(getattr, (found, '__func__'), obj=obj)
|
1921 |
+
logger.trace(pickler, "# F3")
|
1922 |
+
return
|
1923 |
+
|
1924 |
+
logger.trace(pickler, "F1: %s", obj)
|
1925 |
+
_recurse = getattr(pickler, '_recurse', None)
|
1926 |
+
_postproc = getattr(pickler, '_postproc', None)
|
1927 |
+
_main_modified = getattr(pickler, '_main_modified', None)
|
1928 |
+
_original_main = getattr(pickler, '_original_main', __builtin__)#'None'
|
1929 |
+
postproc_list = []
|
1930 |
+
if _recurse:
|
1931 |
+
# recurse to get all globals referred to by obj
|
1932 |
+
from .detect import globalvars
|
1933 |
+
globs_copy = globalvars(obj, recurse=True, builtin=True)
|
1934 |
+
|
1935 |
+
# Add the name of the module to the globs dictionary to prevent
|
1936 |
+
# the duplication of the dictionary. Pickle the unpopulated
|
1937 |
+
# globals dictionary and set the remaining items after the function
|
1938 |
+
# is created to correctly handle recursion.
|
1939 |
+
globs = {'__name__': obj.__module__}
|
1940 |
+
else:
|
1941 |
+
globs_copy = obj.__globals__
|
1942 |
+
|
1943 |
+
# If the globals is the __dict__ from the module being saved as a
|
1944 |
+
# session, substitute it by the dictionary being actually saved.
|
1945 |
+
if _main_modified and globs_copy is _original_main.__dict__:
|
1946 |
+
globs_copy = getattr(pickler, '_main', _original_main).__dict__
|
1947 |
+
globs = globs_copy
|
1948 |
+
# If the globals is a module __dict__, do not save it in the pickle.
|
1949 |
+
elif globs_copy is not None and obj.__module__ is not None and \
|
1950 |
+
getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy:
|
1951 |
+
globs = globs_copy
|
1952 |
+
else:
|
1953 |
+
globs = {'__name__': obj.__module__}
|
1954 |
+
|
1955 |
+
if globs_copy is not None and globs is not globs_copy:
|
1956 |
+
# In the case that the globals are copied, we need to ensure that
|
1957 |
+
# the globals dictionary is updated when all objects in the
|
1958 |
+
# dictionary are already created.
|
1959 |
+
glob_ids = {id(g) for g in globs_copy.values()}
|
1960 |
+
for stack_element in _postproc:
|
1961 |
+
if stack_element in glob_ids:
|
1962 |
+
_postproc[stack_element].append((_setitems, (globs, globs_copy)))
|
1963 |
+
break
|
1964 |
+
else:
|
1965 |
+
postproc_list.append((_setitems, (globs, globs_copy)))
|
1966 |
+
|
1967 |
+
closure = obj.__closure__
|
1968 |
+
state_dict = {}
|
1969 |
+
for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'):
|
1970 |
+
fattr = getattr(obj, fattrname, None)
|
1971 |
+
if fattr is not None:
|
1972 |
+
state_dict[fattrname] = fattr
|
1973 |
+
if obj.__qualname__ != obj.__name__:
|
1974 |
+
state_dict['__qualname__'] = obj.__qualname__
|
1975 |
+
if '__name__' not in globs or obj.__module__ != globs['__name__']:
|
1976 |
+
state_dict['__module__'] = obj.__module__
|
1977 |
+
|
1978 |
+
state = obj.__dict__
|
1979 |
+
if type(state) is not dict:
|
1980 |
+
state_dict['__dict__'] = state
|
1981 |
+
state = None
|
1982 |
+
if state_dict:
|
1983 |
+
state = state, state_dict
|
1984 |
+
|
1985 |
+
_save_with_postproc(pickler, (_create_function, (
|
1986 |
+
obj.__code__, globs, obj.__name__, obj.__defaults__,
|
1987 |
+
closure
|
1988 |
+
), state), obj=obj, postproc_list=postproc_list)
|
1989 |
+
|
1990 |
+
# Lift closure cell update to earliest function (#458)
|
1991 |
+
if _postproc:
|
1992 |
+
topmost_postproc = next(iter(_postproc.values()), None)
|
1993 |
+
if closure and topmost_postproc:
|
1994 |
+
for cell in closure:
|
1995 |
+
possible_postproc = (setattr, (cell, 'cell_contents', obj))
|
1996 |
+
try:
|
1997 |
+
topmost_postproc.remove(possible_postproc)
|
1998 |
+
except ValueError:
|
1999 |
+
continue
|
2000 |
+
|
2001 |
+
# Change the value of the cell
|
2002 |
+
pickler.save_reduce(*possible_postproc)
|
2003 |
+
# pop None created by calling preprocessing step off stack
|
2004 |
+
pickler.write(POP)
|
2005 |
+
|
2006 |
+
logger.trace(pickler, "# F1")
|
2007 |
+
else:
|
2008 |
+
logger.trace(pickler, "F2: %s", obj)
|
2009 |
+
name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
|
2010 |
+
StockPickler.save_global(pickler, obj, name=name)
|
2011 |
+
logger.trace(pickler, "# F2")
|
2012 |
+
return
|
2013 |
+
|
2014 |
+
if HAS_CTYPES and hasattr(ctypes, 'pythonapi'):
|
2015 |
+
_PyCapsule_New = ctypes.pythonapi.PyCapsule_New
|
2016 |
+
_PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p)
|
2017 |
+
_PyCapsule_New.restype = ctypes.py_object
|
2018 |
+
_PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer
|
2019 |
+
_PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p)
|
2020 |
+
_PyCapsule_GetPointer.restype = ctypes.c_void_p
|
2021 |
+
_PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor
|
2022 |
+
_PyCapsule_GetDestructor.argtypes = (ctypes.py_object,)
|
2023 |
+
_PyCapsule_GetDestructor.restype = ctypes.c_void_p
|
2024 |
+
_PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext
|
2025 |
+
_PyCapsule_GetContext.argtypes = (ctypes.py_object,)
|
2026 |
+
_PyCapsule_GetContext.restype = ctypes.c_void_p
|
2027 |
+
_PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName
|
2028 |
+
_PyCapsule_GetName.argtypes = (ctypes.py_object,)
|
2029 |
+
_PyCapsule_GetName.restype = ctypes.c_char_p
|
2030 |
+
_PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid
|
2031 |
+
_PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p)
|
2032 |
+
_PyCapsule_IsValid.restype = ctypes.c_bool
|
2033 |
+
_PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext
|
2034 |
+
_PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p)
|
2035 |
+
_PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor
|
2036 |
+
_PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p)
|
2037 |
+
_PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName
|
2038 |
+
_PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p)
|
2039 |
+
_PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer
|
2040 |
+
_PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p)
|
2041 |
+
#from _socket import CAPI as _testcapsule
|
2042 |
+
_testcapsule_name = b'dill._dill._testcapsule'
|
2043 |
+
_testcapsule = _PyCapsule_New(
|
2044 |
+
ctypes.cast(_PyCapsule_New, ctypes.c_void_p),
|
2045 |
+
ctypes.c_char_p(_testcapsule_name),
|
2046 |
+
None
|
2047 |
+
)
|
2048 |
+
PyCapsuleType = type(_testcapsule)
|
2049 |
+
@register(PyCapsuleType)
|
2050 |
+
def save_capsule(pickler, obj):
|
2051 |
+
logger.trace(pickler, "Cap: %s", obj)
|
2052 |
+
name = _PyCapsule_GetName(obj)
|
2053 |
+
#warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning)
|
2054 |
+
pointer = _PyCapsule_GetPointer(obj, name)
|
2055 |
+
context = _PyCapsule_GetContext(obj)
|
2056 |
+
destructor = _PyCapsule_GetDestructor(obj)
|
2057 |
+
pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj)
|
2058 |
+
logger.trace(pickler, "# Cap")
|
2059 |
+
_incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType
|
2060 |
+
_reverse_typemap['PyCapsuleType'] = PyCapsuleType
|
2061 |
+
_incedental_types.add(PyCapsuleType)
|
2062 |
+
else:
|
2063 |
+
_testcapsule = None
|
2064 |
+
|
2065 |
+
|
2066 |
+
#############################
|
2067 |
+
# A quick fix for issue #500
|
2068 |
+
# This should be removed when a better solution is found.
|
2069 |
+
|
2070 |
+
if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"):
|
2071 |
+
@register(dataclasses._HAS_DEFAULT_FACTORY_CLASS)
|
2072 |
+
def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj):
|
2073 |
+
logger.trace(pickler, "DcHDF: %s", obj)
|
2074 |
+
pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n")
|
2075 |
+
logger.trace(pickler, "# DcHDF")
|
2076 |
+
|
2077 |
+
if hasattr(dataclasses, "MISSING"):
|
2078 |
+
@register(type(dataclasses.MISSING))
|
2079 |
+
def save_dataclasses_MISSING_TYPE(pickler, obj):
|
2080 |
+
logger.trace(pickler, "DcM: %s", obj)
|
2081 |
+
pickler.write(GLOBAL + b"dataclasses\nMISSING\n")
|
2082 |
+
logger.trace(pickler, "# DcM")
|
2083 |
+
|
2084 |
+
if hasattr(dataclasses, "KW_ONLY"):
|
2085 |
+
@register(type(dataclasses.KW_ONLY))
|
2086 |
+
def save_dataclasses_KW_ONLY_TYPE(pickler, obj):
|
2087 |
+
logger.trace(pickler, "DcKWO: %s", obj)
|
2088 |
+
pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n")
|
2089 |
+
logger.trace(pickler, "# DcKWO")
|
2090 |
+
|
2091 |
+
if hasattr(dataclasses, "_FIELD_BASE"):
|
2092 |
+
@register(dataclasses._FIELD_BASE)
|
2093 |
+
def save_dataclasses_FIELD_BASE(pickler, obj):
|
2094 |
+
logger.trace(pickler, "DcFB: %s", obj)
|
2095 |
+
pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n")
|
2096 |
+
logger.trace(pickler, "# DcFB")
|
2097 |
+
|
2098 |
+
#############################
|
2099 |
+
|
2100 |
+
# quick sanity checking
|
2101 |
+
def pickles(obj,exact=False,safe=False,**kwds):
|
2102 |
+
"""
|
2103 |
+
Quick check if object pickles with dill.
|
2104 |
+
|
2105 |
+
If *exact=True* then an equality test is done to check if the reconstructed
|
2106 |
+
object matches the original object.
|
2107 |
+
|
2108 |
+
If *safe=True* then any exception will raised in copy signal that the
|
2109 |
+
object is not picklable, otherwise only pickling errors will be trapped.
|
2110 |
+
|
2111 |
+
Additional keyword arguments are as :func:`dumps` and :func:`loads`.
|
2112 |
+
"""
|
2113 |
+
if safe: exceptions = (Exception,) # RuntimeError, ValueError
|
2114 |
+
else:
|
2115 |
+
exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError)
|
2116 |
+
try:
|
2117 |
+
pik = copy(obj, **kwds)
|
2118 |
+
#FIXME: should check types match first, then check content if "exact"
|
2119 |
+
try:
|
2120 |
+
#FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ
|
2121 |
+
result = bool(pik.all() == obj.all())
|
2122 |
+
except (AttributeError, TypeError):
|
2123 |
+
warnings.filterwarnings('ignore') #FIXME: be specific
|
2124 |
+
result = pik == obj
|
2125 |
+
if warnings.filters: del warnings.filters[0]
|
2126 |
+
if hasattr(result, 'toarray'): # for unusual types like sparse matrix
|
2127 |
+
result = result.toarray().all()
|
2128 |
+
if result: return True
|
2129 |
+
if not exact:
|
2130 |
+
result = type(pik) == type(obj)
|
2131 |
+
if result: return result
|
2132 |
+
# class instances might have been dumped with byref=False
|
2133 |
+
return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
|
2134 |
+
return False
|
2135 |
+
except exceptions:
|
2136 |
+
return False
|
2137 |
+
|
2138 |
+
def check(obj, *args, **kwds):
|
2139 |
+
"""
|
2140 |
+
Check pickling of an object across another process.
|
2141 |
+
|
2142 |
+
*python* is the path to the python interpreter (defaults to sys.executable)
|
2143 |
+
|
2144 |
+
Set *verbose=True* to print the unpickled object in the other process.
|
2145 |
+
|
2146 |
+
Additional keyword arguments are as :func:`dumps` and :func:`loads`.
|
2147 |
+
"""
|
2148 |
+
# == undocumented ==
|
2149 |
+
# python -- the string path or executable name of the selected python
|
2150 |
+
# verbose -- if True, be verbose about printing warning messages
|
2151 |
+
# all other args and kwds are passed to dill.dumps #FIXME: ignore on load
|
2152 |
+
verbose = kwds.pop('verbose', False)
|
2153 |
+
python = kwds.pop('python', None)
|
2154 |
+
if python is None:
|
2155 |
+
import sys
|
2156 |
+
python = sys.executable
|
2157 |
+
# type check
|
2158 |
+
isinstance(python, str)
|
2159 |
+
import subprocess
|
2160 |
+
fail = True
|
2161 |
+
try:
|
2162 |
+
_obj = dumps(obj, *args, **kwds)
|
2163 |
+
fail = False
|
2164 |
+
finally:
|
2165 |
+
if fail and verbose:
|
2166 |
+
print("DUMP FAILED")
|
2167 |
+
#FIXME: fails if python interpreter path contains spaces
|
2168 |
+
# Use the following instead (which also processes the 'ignore' keyword):
|
2169 |
+
# ignore = kwds.pop('ignore', None)
|
2170 |
+
# unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore))
|
2171 |
+
# cmd = [python, "-c", "import dill; print(%s)"%unpickle]
|
2172 |
+
# msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED"
|
2173 |
+
msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj))
|
2174 |
+
msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED"
|
2175 |
+
if verbose:
|
2176 |
+
print(msg)
|
2177 |
+
return
|
2178 |
+
|
2179 |
+
# use to protect against missing attributes
|
2180 |
+
def is_dill(pickler, child=None):
|
2181 |
+
"check the dill-ness of your pickler"
|
2182 |
+
if child is False or not hasattr(pickler.__class__, 'mro'):
|
2183 |
+
return 'dill' in pickler.__module__
|
2184 |
+
return Pickler in pickler.__class__.mro()
|
2185 |
+
|
2186 |
+
def _extend():
|
2187 |
+
"""extend pickle with all of dill's registered types"""
|
2188 |
+
# need to have pickle not choke on _main_module? use is_dill(pickler)
|
2189 |
+
for t,func in Pickler.dispatch.items():
|
2190 |
+
try:
|
2191 |
+
StockPickler.dispatch[t] = func
|
2192 |
+
except Exception: #TypeError, PicklingError, UnpicklingError
|
2193 |
+
logger.trace(pickler, "skip: %s", t)
|
2194 |
+
return
|
2195 |
+
|
2196 |
+
del diff, _use_diff, use_diff
|
2197 |
+
|
2198 |
+
# EOF
|
env-llmeval/lib/python3.10/site-packages/dill/_objects.py
ADDED
@@ -0,0 +1,537 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
all Python Standard Library objects (currently: CH 1-15 @ 2.7)
|
10 |
+
and some other common objects (i.e. numpy.ndarray)
|
11 |
+
"""
|
12 |
+
|
13 |
+
__all__ = ['registered','failures','succeeds']
|
14 |
+
|
15 |
+
# helper imports
|
16 |
+
import warnings; warnings.filterwarnings("ignore", category=DeprecationWarning)
|
17 |
+
import sys
|
18 |
+
import queue as Queue
|
19 |
+
import dbm as anydbm
|
20 |
+
from io import BytesIO as StringIO
|
21 |
+
import re
|
22 |
+
import array
|
23 |
+
import collections
|
24 |
+
import codecs
|
25 |
+
import struct
|
26 |
+
import dataclasses
|
27 |
+
import datetime
|
28 |
+
import calendar
|
29 |
+
import weakref
|
30 |
+
import pprint
|
31 |
+
import decimal
|
32 |
+
import numbers
|
33 |
+
import functools
|
34 |
+
import itertools
|
35 |
+
import operator
|
36 |
+
import tempfile
|
37 |
+
import shelve
|
38 |
+
import zlib
|
39 |
+
import gzip
|
40 |
+
import zipfile
|
41 |
+
import tarfile
|
42 |
+
import csv
|
43 |
+
import hashlib
|
44 |
+
import hmac
|
45 |
+
import os
|
46 |
+
import logging
|
47 |
+
import logging.handlers
|
48 |
+
import optparse
|
49 |
+
#import __hello__
|
50 |
+
import threading
|
51 |
+
import socket
|
52 |
+
import contextlib
|
53 |
+
try:
|
54 |
+
import bz2
|
55 |
+
import sqlite3
|
56 |
+
import dbm.ndbm as dbm
|
57 |
+
HAS_ALL = True
|
58 |
+
except ImportError: # Ubuntu
|
59 |
+
HAS_ALL = False
|
60 |
+
try:
|
61 |
+
#import curses
|
62 |
+
#from curses import textpad, panel
|
63 |
+
HAS_CURSES = True
|
64 |
+
except ImportError: # Windows
|
65 |
+
HAS_CURSES = False
|
66 |
+
try:
|
67 |
+
import ctypes
|
68 |
+
HAS_CTYPES = True
|
69 |
+
# if using `pypy`, pythonapi is not found
|
70 |
+
IS_PYPY = not hasattr(ctypes, 'pythonapi')
|
71 |
+
except ImportError: # MacPorts
|
72 |
+
HAS_CTYPES = False
|
73 |
+
IS_PYPY = False
|
74 |
+
|
75 |
+
# helper objects
|
76 |
+
class _class:
|
77 |
+
def _method(self):
|
78 |
+
pass
|
79 |
+
# @classmethod
|
80 |
+
# def _clsmethod(cls): #XXX: test me
|
81 |
+
# pass
|
82 |
+
# @staticmethod
|
83 |
+
# def _static(self): #XXX: test me
|
84 |
+
# pass
|
85 |
+
class _class2:
|
86 |
+
def __call__(self):
|
87 |
+
pass
|
88 |
+
_instance2 = _class2()
|
89 |
+
class _newclass(object):
|
90 |
+
def _method(self):
|
91 |
+
pass
|
92 |
+
# @classmethod
|
93 |
+
# def _clsmethod(cls): #XXX: test me
|
94 |
+
# pass
|
95 |
+
# @staticmethod
|
96 |
+
# def _static(self): #XXX: test me
|
97 |
+
# pass
|
98 |
+
class _newclass2(object):
|
99 |
+
__slots__ = ['descriptor']
|
100 |
+
def _function(x): yield x
|
101 |
+
def _function2():
|
102 |
+
try: raise
|
103 |
+
except Exception:
|
104 |
+
from sys import exc_info
|
105 |
+
e, er, tb = exc_info()
|
106 |
+
return er, tb
|
107 |
+
if HAS_CTYPES:
|
108 |
+
class _Struct(ctypes.Structure):
|
109 |
+
pass
|
110 |
+
_Struct._fields_ = [("_field", ctypes.c_int),("next", ctypes.POINTER(_Struct))]
|
111 |
+
_filedescrip, _tempfile = tempfile.mkstemp('r') # deleted in cleanup
|
112 |
+
if sys.hexversion < 0x30d00a1:
|
113 |
+
_tmpf = tempfile.TemporaryFile('w') # emits OSError 9 in python 3.13
|
114 |
+
else:
|
115 |
+
_tmpf = tempfile.NamedTemporaryFile('w').file # for > python 3.9
|
116 |
+
|
117 |
+
# objects used by dill for type declaration
|
118 |
+
registered = d = {}
|
119 |
+
# objects dill fails to pickle
|
120 |
+
failures = x = {}
|
121 |
+
# all other type objects
|
122 |
+
succeeds = a = {}
|
123 |
+
|
124 |
+
# types module (part of CH 8)
|
125 |
+
a['BooleanType'] = bool(1)
|
126 |
+
a['BuiltinFunctionType'] = len
|
127 |
+
a['BuiltinMethodType'] = a['BuiltinFunctionType']
|
128 |
+
a['BytesType'] = _bytes = codecs.latin_1_encode('\x00')[0] # bytes(1)
|
129 |
+
a['ClassType'] = _class
|
130 |
+
a['ComplexType'] = complex(1)
|
131 |
+
a['DictType'] = _dict = {}
|
132 |
+
a['DictionaryType'] = a['DictType']
|
133 |
+
a['FloatType'] = float(1)
|
134 |
+
a['FunctionType'] = _function
|
135 |
+
a['InstanceType'] = _instance = _class()
|
136 |
+
a['IntType'] = _int = int(1)
|
137 |
+
a['ListType'] = _list = []
|
138 |
+
a['NoneType'] = None
|
139 |
+
a['ObjectType'] = object()
|
140 |
+
a['StringType'] = _str = str(1)
|
141 |
+
a['TupleType'] = _tuple = ()
|
142 |
+
a['TypeType'] = type
|
143 |
+
a['LongType'] = _int
|
144 |
+
a['UnicodeType'] = _str
|
145 |
+
# built-in constants (CH 4)
|
146 |
+
a['CopyrightType'] = copyright
|
147 |
+
# built-in types (CH 5)
|
148 |
+
a['ClassObjectType'] = _newclass # <type 'type'>
|
149 |
+
a['ClassInstanceType'] = _newclass() # <type 'class'>
|
150 |
+
a['SetType'] = _set = set()
|
151 |
+
a['FrozenSetType'] = frozenset()
|
152 |
+
# built-in exceptions (CH 6)
|
153 |
+
a['ExceptionType'] = _exception = _function2()[0]
|
154 |
+
# string services (CH 7)
|
155 |
+
a['SREPatternType'] = _srepattern = re.compile('')
|
156 |
+
# data types (CH 8)
|
157 |
+
a['ArrayType'] = array.array("f")
|
158 |
+
a['DequeType'] = collections.deque([0])
|
159 |
+
a['DefaultDictType'] = collections.defaultdict(_function, _dict)
|
160 |
+
a['TZInfoType'] = datetime.tzinfo()
|
161 |
+
a['DateTimeType'] = datetime.datetime.today()
|
162 |
+
a['CalendarType'] = calendar.Calendar()
|
163 |
+
# numeric and mathematical types (CH 9)
|
164 |
+
a['DecimalType'] = decimal.Decimal(1)
|
165 |
+
a['CountType'] = itertools.count(0)
|
166 |
+
# data compression and archiving (CH 12)
|
167 |
+
a['TarInfoType'] = tarfile.TarInfo()
|
168 |
+
# generic operating system services (CH 15)
|
169 |
+
a['LoggerType'] = _logger = logging.getLogger()
|
170 |
+
a['FormatterType'] = logging.Formatter() # pickle ok
|
171 |
+
a['FilterType'] = logging.Filter() # pickle ok
|
172 |
+
a['LogRecordType'] = logging.makeLogRecord(_dict) # pickle ok
|
173 |
+
a['OptionParserType'] = _oparser = optparse.OptionParser() # pickle ok
|
174 |
+
a['OptionGroupType'] = optparse.OptionGroup(_oparser,"foo") # pickle ok
|
175 |
+
a['OptionType'] = optparse.Option('--foo') # pickle ok
|
176 |
+
if HAS_CTYPES:
|
177 |
+
z = x if IS_PYPY else a
|
178 |
+
z['CCharType'] = _cchar = ctypes.c_char()
|
179 |
+
z['CWCharType'] = ctypes.c_wchar() # fail == 2.6
|
180 |
+
z['CByteType'] = ctypes.c_byte()
|
181 |
+
z['CUByteType'] = ctypes.c_ubyte()
|
182 |
+
z['CShortType'] = ctypes.c_short()
|
183 |
+
z['CUShortType'] = ctypes.c_ushort()
|
184 |
+
z['CIntType'] = ctypes.c_int()
|
185 |
+
z['CUIntType'] = ctypes.c_uint()
|
186 |
+
z['CLongType'] = ctypes.c_long()
|
187 |
+
z['CULongType'] = ctypes.c_ulong()
|
188 |
+
z['CLongLongType'] = ctypes.c_longlong()
|
189 |
+
z['CULongLongType'] = ctypes.c_ulonglong()
|
190 |
+
z['CFloatType'] = ctypes.c_float()
|
191 |
+
z['CDoubleType'] = ctypes.c_double()
|
192 |
+
z['CSizeTType'] = ctypes.c_size_t()
|
193 |
+
del z
|
194 |
+
a['CLibraryLoaderType'] = ctypes.cdll
|
195 |
+
a['StructureType'] = _Struct
|
196 |
+
# if not IS_PYPY:
|
197 |
+
# a['BigEndianStructureType'] = ctypes.BigEndianStructure()
|
198 |
+
#NOTE: also LittleEndianStructureType and UnionType... abstract classes
|
199 |
+
#NOTE: remember for ctypesobj.contents creates a new python object
|
200 |
+
#NOTE: ctypes.c_int._objects is memberdescriptor for object's __dict__
|
201 |
+
#NOTE: base class of all ctypes data types is non-public _CData
|
202 |
+
|
203 |
+
import fractions
|
204 |
+
import io
|
205 |
+
from io import StringIO as TextIO
|
206 |
+
# built-in functions (CH 2)
|
207 |
+
a['ByteArrayType'] = bytearray([1])
|
208 |
+
# numeric and mathematical types (CH 9)
|
209 |
+
a['FractionType'] = fractions.Fraction()
|
210 |
+
a['NumberType'] = numbers.Number()
|
211 |
+
# generic operating system services (CH 15)
|
212 |
+
a['IOBaseType'] = io.IOBase()
|
213 |
+
a['RawIOBaseType'] = io.RawIOBase()
|
214 |
+
a['TextIOBaseType'] = io.TextIOBase()
|
215 |
+
a['BufferedIOBaseType'] = io.BufferedIOBase()
|
216 |
+
a['UnicodeIOType'] = TextIO() # the new StringIO
|
217 |
+
a['LoggerAdapterType'] = logging.LoggerAdapter(_logger,_dict) # pickle ok
|
218 |
+
if HAS_CTYPES:
|
219 |
+
z = x if IS_PYPY else a
|
220 |
+
z['CBoolType'] = ctypes.c_bool(1)
|
221 |
+
z['CLongDoubleType'] = ctypes.c_longdouble()
|
222 |
+
del z
|
223 |
+
import argparse
|
224 |
+
# data types (CH 8)
|
225 |
+
a['OrderedDictType'] = collections.OrderedDict(_dict)
|
226 |
+
a['CounterType'] = collections.Counter(_dict)
|
227 |
+
if HAS_CTYPES:
|
228 |
+
z = x if IS_PYPY else a
|
229 |
+
z['CSSizeTType'] = ctypes.c_ssize_t()
|
230 |
+
del z
|
231 |
+
# generic operating system services (CH 15)
|
232 |
+
a['NullHandlerType'] = logging.NullHandler() # pickle ok # new 2.7
|
233 |
+
a['ArgParseFileType'] = argparse.FileType() # pickle ok
|
234 |
+
|
235 |
+
# -- pickle fails on all below here -----------------------------------------
|
236 |
+
# types module (part of CH 8)
|
237 |
+
a['CodeType'] = compile('','','exec')
|
238 |
+
a['DictProxyType'] = type.__dict__
|
239 |
+
a['DictProxyType2'] = _newclass.__dict__
|
240 |
+
a['EllipsisType'] = Ellipsis
|
241 |
+
a['ClosedFileType'] = open(os.devnull, 'wb', buffering=0).close()
|
242 |
+
a['GetSetDescriptorType'] = array.array.typecode
|
243 |
+
a['LambdaType'] = _lambda = lambda x: lambda y: x #XXX: works when not imported!
|
244 |
+
a['MemberDescriptorType'] = _newclass2.descriptor
|
245 |
+
if not IS_PYPY:
|
246 |
+
a['MemberDescriptorType2'] = datetime.timedelta.days
|
247 |
+
a['MethodType'] = _method = _class()._method #XXX: works when not imported!
|
248 |
+
a['ModuleType'] = datetime
|
249 |
+
a['NotImplementedType'] = NotImplemented
|
250 |
+
a['SliceType'] = slice(1)
|
251 |
+
a['UnboundMethodType'] = _class._method #XXX: works when not imported!
|
252 |
+
d['TextWrapperType'] = open(os.devnull, 'r') # same as mode='w','w+','r+'
|
253 |
+
d['BufferedRandomType'] = open(os.devnull, 'r+b') # same as mode='w+b'
|
254 |
+
d['BufferedReaderType'] = open(os.devnull, 'rb') # (default: buffering=-1)
|
255 |
+
d['BufferedWriterType'] = open(os.devnull, 'wb')
|
256 |
+
try: # oddities: deprecated
|
257 |
+
from _pyio import open as _open
|
258 |
+
d['PyTextWrapperType'] = _open(os.devnull, 'r', buffering=-1)
|
259 |
+
d['PyBufferedRandomType'] = _open(os.devnull, 'r+b', buffering=-1)
|
260 |
+
d['PyBufferedReaderType'] = _open(os.devnull, 'rb', buffering=-1)
|
261 |
+
d['PyBufferedWriterType'] = _open(os.devnull, 'wb', buffering=-1)
|
262 |
+
except ImportError:
|
263 |
+
pass
|
264 |
+
# other (concrete) object types
|
265 |
+
z = d if sys.hexversion < 0x30800a2 else a
|
266 |
+
z['CellType'] = (_lambda)(0).__closure__[0]
|
267 |
+
del z
|
268 |
+
a['XRangeType'] = _xrange = range(1)
|
269 |
+
a['MethodDescriptorType'] = type.__dict__['mro']
|
270 |
+
a['WrapperDescriptorType'] = type.__repr__
|
271 |
+
#a['WrapperDescriptorType2'] = type.__dict__['__module__']#XXX: GetSetDescriptor
|
272 |
+
a['ClassMethodDescriptorType'] = type.__dict__['__prepare__']
|
273 |
+
# built-in functions (CH 2)
|
274 |
+
_methodwrap = (1).__lt__
|
275 |
+
a['MethodWrapperType'] = _methodwrap
|
276 |
+
a['StaticMethodType'] = staticmethod(_method)
|
277 |
+
a['ClassMethodType'] = classmethod(_method)
|
278 |
+
a['PropertyType'] = property()
|
279 |
+
d['SuperType'] = super(Exception, _exception)
|
280 |
+
# string services (CH 7)
|
281 |
+
_in = _bytes
|
282 |
+
a['InputType'] = _cstrI = StringIO(_in)
|
283 |
+
a['OutputType'] = _cstrO = StringIO()
|
284 |
+
# data types (CH 8)
|
285 |
+
a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary()
|
286 |
+
a['WeakValueDictionaryType'] = weakref.WeakValueDictionary()
|
287 |
+
a['ReferenceType'] = weakref.ref(_instance)
|
288 |
+
a['DeadReferenceType'] = weakref.ref(_class())
|
289 |
+
a['ProxyType'] = weakref.proxy(_instance)
|
290 |
+
a['DeadProxyType'] = weakref.proxy(_class())
|
291 |
+
a['CallableProxyType'] = weakref.proxy(_instance2)
|
292 |
+
a['DeadCallableProxyType'] = weakref.proxy(_class2())
|
293 |
+
a['QueueType'] = Queue.Queue()
|
294 |
+
# numeric and mathematical types (CH 9)
|
295 |
+
d['PartialType'] = functools.partial(int,base=2)
|
296 |
+
a['IzipType'] = zip('0','1')
|
297 |
+
a['ChainType'] = itertools.chain('0','1')
|
298 |
+
d['ItemGetterType'] = operator.itemgetter(0)
|
299 |
+
d['AttrGetterType'] = operator.attrgetter('__repr__')
|
300 |
+
# file and directory access (CH 10)
|
301 |
+
_fileW = _cstrO
|
302 |
+
# data persistence (CH 11)
|
303 |
+
if HAS_ALL:
|
304 |
+
x['ConnectionType'] = _conn = sqlite3.connect(':memory:')
|
305 |
+
x['CursorType'] = _conn.cursor()
|
306 |
+
a['ShelveType'] = shelve.Shelf({})
|
307 |
+
# data compression and archiving (CH 12)
|
308 |
+
if HAS_ALL:
|
309 |
+
x['BZ2FileType'] = bz2.BZ2File(os.devnull)
|
310 |
+
x['BZ2CompressorType'] = bz2.BZ2Compressor()
|
311 |
+
x['BZ2DecompressorType'] = bz2.BZ2Decompressor()
|
312 |
+
#x['ZipFileType'] = _zip = zipfile.ZipFile(os.devnull,'w')
|
313 |
+
#_zip.write(_tempfile,'x') [causes annoying warning/error printed on import]
|
314 |
+
#a['ZipInfoType'] = _zip.getinfo('x')
|
315 |
+
a['TarFileType'] = tarfile.open(fileobj=_fileW,mode='w')
|
316 |
+
# file formats (CH 13)
|
317 |
+
x['DialectType'] = csv.get_dialect('excel')
|
318 |
+
if sys.hexversion < 0x30d00a1:
|
319 |
+
import xdrlib
|
320 |
+
a['PackerType'] = xdrlib.Packer()
|
321 |
+
# optional operating system services (CH 16)
|
322 |
+
a['LockType'] = threading.Lock()
|
323 |
+
a['RLockType'] = threading.RLock()
|
324 |
+
# generic operating system services (CH 15) # also closed/open and r/w/etc...
|
325 |
+
a['NamedLoggerType'] = _logger = logging.getLogger(__name__)
|
326 |
+
#a['FrozenModuleType'] = __hello__ #FIXME: prints "Hello world..."
|
327 |
+
# interprocess communication (CH 17)
|
328 |
+
x['SocketType'] = _socket = socket.socket()
|
329 |
+
x['SocketPairType'] = socket.socketpair()[0]
|
330 |
+
# python runtime services (CH 27)
|
331 |
+
a['GeneratorContextManagerType'] = contextlib.contextmanager(max)([1])
|
332 |
+
|
333 |
+
try: # ipython
|
334 |
+
__IPYTHON__ is True # is ipython
|
335 |
+
except NameError:
|
336 |
+
# built-in constants (CH 4)
|
337 |
+
a['QuitterType'] = quit
|
338 |
+
d['ExitType'] = a['QuitterType']
|
339 |
+
try: # numpy #FIXME: slow... 0.05 to 0.1 sec to import numpy
|
340 |
+
from numpy import ufunc as _numpy_ufunc
|
341 |
+
from numpy import array as _numpy_array
|
342 |
+
from numpy import int32 as _numpy_int32
|
343 |
+
a['NumpyUfuncType'] = _numpy_ufunc
|
344 |
+
a['NumpyArrayType'] = _numpy_array
|
345 |
+
a['NumpyInt32Type'] = _numpy_int32
|
346 |
+
except ImportError:
|
347 |
+
pass
|
348 |
+
# numeric and mathematical types (CH 9)
|
349 |
+
a['ProductType'] = itertools.product('0','1')
|
350 |
+
# generic operating system services (CH 15)
|
351 |
+
a['FileHandlerType'] = logging.FileHandler(os.devnull)
|
352 |
+
a['RotatingFileHandlerType'] = logging.handlers.RotatingFileHandler(os.devnull)
|
353 |
+
a['SocketHandlerType'] = logging.handlers.SocketHandler('localhost',514)
|
354 |
+
a['MemoryHandlerType'] = logging.handlers.MemoryHandler(1)
|
355 |
+
# data types (CH 8)
|
356 |
+
a['WeakSetType'] = weakref.WeakSet() # 2.7
|
357 |
+
# generic operating system services (CH 15) [errors when dill is imported]
|
358 |
+
#a['ArgumentParserType'] = _parser = argparse.ArgumentParser('PROG')
|
359 |
+
#a['NamespaceType'] = _parser.parse_args() # pickle ok
|
360 |
+
#a['SubParsersActionType'] = _parser.add_subparsers()
|
361 |
+
#a['MutuallyExclusiveGroupType'] = _parser.add_mutually_exclusive_group()
|
362 |
+
#a['ArgumentGroupType'] = _parser.add_argument_group()
|
363 |
+
|
364 |
+
# -- dill fails in some versions below here ---------------------------------
|
365 |
+
# types module (part of CH 8)
|
366 |
+
d['FileType'] = open(os.devnull, 'rb', buffering=0) # same 'wb','wb+','rb+'
|
367 |
+
# built-in functions (CH 2)
|
368 |
+
# Iterators:
|
369 |
+
a['ListIteratorType'] = iter(_list) # empty vs non-empty
|
370 |
+
a['SetIteratorType'] = iter(_set) #XXX: empty vs non-empty #FIXME: list_iterator
|
371 |
+
a['TupleIteratorType']= iter(_tuple) # empty vs non-empty
|
372 |
+
a['XRangeIteratorType'] = iter(_xrange) # empty vs non-empty
|
373 |
+
a["BytesIteratorType"] = iter(b'')
|
374 |
+
a["BytearrayIteratorType"] = iter(bytearray(b''))
|
375 |
+
z = x if IS_PYPY else a
|
376 |
+
z["CallableIteratorType"] = iter(iter, None)
|
377 |
+
del z
|
378 |
+
x["MemoryIteratorType"] = iter(memoryview(b''))
|
379 |
+
a["ListReverseiteratorType"] = reversed([])
|
380 |
+
X = a['OrderedDictType']
|
381 |
+
d["OdictKeysType"] = X.keys()
|
382 |
+
d["OdictValuesType"] = X.values()
|
383 |
+
d["OdictItemsType"] = X.items()
|
384 |
+
a["OdictIteratorType"] = iter(X.keys()) #FIXME: list_iterator
|
385 |
+
del X
|
386 |
+
#FIXME: list_iterator
|
387 |
+
a['DictionaryItemIteratorType'] = iter(type.__dict__.items())
|
388 |
+
a['DictionaryKeyIteratorType'] = iter(type.__dict__.keys())
|
389 |
+
a['DictionaryValueIteratorType'] = iter(type.__dict__.values())
|
390 |
+
if sys.hexversion >= 0x30800a0:
|
391 |
+
a["DictReversekeyiteratorType"] = reversed({}.keys())
|
392 |
+
a["DictReversevalueiteratorType"] = reversed({}.values())
|
393 |
+
a["DictReverseitemiteratorType"] = reversed({}.items())
|
394 |
+
|
395 |
+
try:
|
396 |
+
import symtable
|
397 |
+
#FIXME: fails to pickle
|
398 |
+
x["SymtableEntryType"] = symtable.symtable("", "string", "exec")._table
|
399 |
+
except ImportError:
|
400 |
+
pass
|
401 |
+
|
402 |
+
if sys.hexversion >= 0x30a00a0 and not IS_PYPY:
|
403 |
+
x['LineIteratorType'] = compile('3', '', 'eval').co_lines()
|
404 |
+
|
405 |
+
if sys.hexversion >= 0x30b00b0:
|
406 |
+
from types import GenericAlias
|
407 |
+
d["GenericAliasIteratorType"] = iter(GenericAlias(list, (int,)))
|
408 |
+
x['PositionsIteratorType'] = compile('3', '', 'eval').co_positions()
|
409 |
+
|
410 |
+
# data types (CH 8)
|
411 |
+
a['PrettyPrinterType'] = pprint.PrettyPrinter()
|
412 |
+
# numeric and mathematical types (CH 9)
|
413 |
+
a['CycleType'] = itertools.cycle('0')
|
414 |
+
# file and directory access (CH 10)
|
415 |
+
a['TemporaryFileType'] = _tmpf
|
416 |
+
# data compression and archiving (CH 12)
|
417 |
+
x['GzipFileType'] = gzip.GzipFile(fileobj=_fileW)
|
418 |
+
# generic operating system services (CH 15)
|
419 |
+
a['StreamHandlerType'] = logging.StreamHandler()
|
420 |
+
# numeric and mathematical types (CH 9)
|
421 |
+
a['PermutationsType'] = itertools.permutations('0')
|
422 |
+
a['CombinationsType'] = itertools.combinations('0',1)
|
423 |
+
a['RepeatType'] = itertools.repeat(0)
|
424 |
+
a['CompressType'] = itertools.compress('0',[1])
|
425 |
+
#XXX: ...and etc
|
426 |
+
|
427 |
+
# -- dill fails on all below here -------------------------------------------
|
428 |
+
# types module (part of CH 8)
|
429 |
+
x['GeneratorType'] = _generator = _function(1) #XXX: priority
|
430 |
+
x['FrameType'] = _generator.gi_frame #XXX: inspect.currentframe()
|
431 |
+
x['TracebackType'] = _function2()[1] #(see: inspect.getouterframes,getframeinfo)
|
432 |
+
# other (concrete) object types
|
433 |
+
# (also: Capsule / CObject ?)
|
434 |
+
# built-in functions (CH 2)
|
435 |
+
# built-in types (CH 5)
|
436 |
+
# string services (CH 7)
|
437 |
+
x['StructType'] = struct.Struct('c')
|
438 |
+
x['CallableIteratorType'] = _srepattern.finditer('')
|
439 |
+
x['SREMatchType'] = _srepattern.match('')
|
440 |
+
x['SREScannerType'] = _srepattern.scanner('')
|
441 |
+
x['StreamReader'] = codecs.StreamReader(_cstrI) #XXX: ... and etc
|
442 |
+
# python object persistence (CH 11)
|
443 |
+
# x['DbShelveType'] = shelve.open('foo','n')#,protocol=2) #XXX: delete foo
|
444 |
+
if HAS_ALL:
|
445 |
+
z = a if IS_PYPY else x
|
446 |
+
z['DbmType'] = dbm.open(_tempfile,'n')
|
447 |
+
del z
|
448 |
+
# x['DbCursorType'] = _dbcursor = anydbm.open('foo','n') #XXX: delete foo
|
449 |
+
# x['DbType'] = _dbcursor.db
|
450 |
+
# data compression and archiving (CH 12)
|
451 |
+
x['ZlibCompressType'] = zlib.compressobj()
|
452 |
+
x['ZlibDecompressType'] = zlib.decompressobj()
|
453 |
+
# file formats (CH 13)
|
454 |
+
x['CSVReaderType'] = csv.reader(_cstrI)
|
455 |
+
x['CSVWriterType'] = csv.writer(_cstrO)
|
456 |
+
x['CSVDictReaderType'] = csv.DictReader(_cstrI)
|
457 |
+
x['CSVDictWriterType'] = csv.DictWriter(_cstrO,{})
|
458 |
+
# cryptographic services (CH 14)
|
459 |
+
x['HashType'] = hashlib.md5()
|
460 |
+
if (sys.hexversion < 0x30800a1):
|
461 |
+
x['HMACType'] = hmac.new(_in)
|
462 |
+
else:
|
463 |
+
x['HMACType'] = hmac.new(_in, digestmod='md5')
|
464 |
+
# generic operating system services (CH 15)
|
465 |
+
if HAS_CURSES: pass
|
466 |
+
#x['CursesWindowType'] = _curwin = curses.initscr() #FIXME: messes up tty
|
467 |
+
#x['CursesTextPadType'] = textpad.Textbox(_curwin)
|
468 |
+
#x['CursesPanelType'] = panel.new_panel(_curwin)
|
469 |
+
if HAS_CTYPES:
|
470 |
+
x['CCharPType'] = ctypes.c_char_p()
|
471 |
+
x['CWCharPType'] = ctypes.c_wchar_p()
|
472 |
+
x['CVoidPType'] = ctypes.c_void_p()
|
473 |
+
if sys.platform[:3] == 'win':
|
474 |
+
x['CDLLType'] = _cdll = ctypes.cdll.msvcrt
|
475 |
+
else:
|
476 |
+
x['CDLLType'] = _cdll = ctypes.CDLL(None)
|
477 |
+
if not IS_PYPY:
|
478 |
+
x['PyDLLType'] = _pydll = ctypes.pythonapi
|
479 |
+
x['FuncPtrType'] = _cdll._FuncPtr()
|
480 |
+
x['CCharArrayType'] = ctypes.create_string_buffer(1)
|
481 |
+
x['CWCharArrayType'] = ctypes.create_unicode_buffer(1)
|
482 |
+
x['CParamType'] = ctypes.byref(_cchar)
|
483 |
+
x['LPCCharType'] = ctypes.pointer(_cchar)
|
484 |
+
x['LPCCharObjType'] = _lpchar = ctypes.POINTER(ctypes.c_char)
|
485 |
+
x['NullPtrType'] = _lpchar()
|
486 |
+
x['NullPyObjectType'] = ctypes.py_object()
|
487 |
+
x['PyObjectType'] = ctypes.py_object(lambda :None)
|
488 |
+
z = a if IS_PYPY else x
|
489 |
+
z['FieldType'] = _field = _Struct._field
|
490 |
+
z['CFUNCTYPEType'] = _cfunc = ctypes.CFUNCTYPE(ctypes.c_char)
|
491 |
+
if sys.hexversion < 0x30c00b3:
|
492 |
+
x['CFunctionType'] = _cfunc(str)
|
493 |
+
del z
|
494 |
+
# numeric and mathematical types (CH 9)
|
495 |
+
a['MethodCallerType'] = operator.methodcaller('mro') # 2.6
|
496 |
+
# built-in types (CH 5)
|
497 |
+
x['MemoryType'] = memoryview(_in) # 2.7
|
498 |
+
x['MemoryType2'] = memoryview(bytearray(_in)) # 2.7
|
499 |
+
d['DictItemsType'] = _dict.items() # 2.7
|
500 |
+
d['DictKeysType'] = _dict.keys() # 2.7
|
501 |
+
d['DictValuesType'] = _dict.values() # 2.7
|
502 |
+
# generic operating system services (CH 15)
|
503 |
+
a['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG')
|
504 |
+
a['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter('PROG')
|
505 |
+
a['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter('PROG')
|
506 |
+
z = a if IS_PYPY else x
|
507 |
+
z['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap) # 2.7, >=3.2
|
508 |
+
z['CmpKeyObjType'] = _cmpkey('0') #2.7, >=3.2
|
509 |
+
del z
|
510 |
+
# oddities: removed, etc
|
511 |
+
x['BufferType'] = x['MemoryType']
|
512 |
+
|
513 |
+
from dill._dill import _testcapsule
|
514 |
+
if _testcapsule is not None:
|
515 |
+
d['PyCapsuleType'] = _testcapsule
|
516 |
+
del _testcapsule
|
517 |
+
|
518 |
+
if hasattr(dataclasses, '_HAS_DEFAULT_FACTORY'):
|
519 |
+
a['DataclassesHasDefaultFactoryType'] = dataclasses._HAS_DEFAULT_FACTORY
|
520 |
+
|
521 |
+
if hasattr(dataclasses, 'MISSING'):
|
522 |
+
a['DataclassesMissingType'] = dataclasses.MISSING
|
523 |
+
|
524 |
+
if hasattr(dataclasses, 'KW_ONLY'):
|
525 |
+
a['DataclassesKWOnlyType'] = dataclasses.KW_ONLY
|
526 |
+
|
527 |
+
if hasattr(dataclasses, '_FIELD_BASE'):
|
528 |
+
a['DataclassesFieldBaseType'] = dataclasses._FIELD
|
529 |
+
|
530 |
+
# -- cleanup ----------------------------------------------------------------
|
531 |
+
a.update(d) # registered also succeed
|
532 |
+
if sys.platform[:3] == 'win':
|
533 |
+
os.close(_filedescrip) # required on win32
|
534 |
+
os.remove(_tempfile)
|
535 |
+
|
536 |
+
|
537 |
+
# EOF
|
env-llmeval/lib/python3.10/site-packages/dill/_shims.py
ADDED
@@ -0,0 +1,193 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Author: Anirudh Vegesana ([email protected])
|
5 |
+
# Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
Provides shims for compatibility between versions of dill and Python.
|
10 |
+
|
11 |
+
Compatibility shims should be provided in this file. Here are two simple example
|
12 |
+
use cases.
|
13 |
+
|
14 |
+
Deprecation of constructor function:
|
15 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
16 |
+
Assume that we were transitioning _import_module in _dill.py to
|
17 |
+
the builtin function importlib.import_module when present.
|
18 |
+
|
19 |
+
@move_to(_dill)
|
20 |
+
def _import_module(import_name):
|
21 |
+
... # code already in _dill.py
|
22 |
+
|
23 |
+
_import_module = Getattr(importlib, 'import_module', Getattr(_dill, '_import_module', None))
|
24 |
+
|
25 |
+
The code will attempt to find import_module in the importlib module. If not
|
26 |
+
present, it will use the _import_module function in _dill.
|
27 |
+
|
28 |
+
Emulate new Python behavior in older Python versions:
|
29 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
30 |
+
CellType.cell_contents behaves differently in Python 3.6 and 3.7. It is
|
31 |
+
read-only in Python 3.6 and writable and deletable in 3.7.
|
32 |
+
|
33 |
+
if _dill.OLD37 and _dill.HAS_CTYPES and ...:
|
34 |
+
@move_to(_dill)
|
35 |
+
def _setattr(object, name, value):
|
36 |
+
if type(object) is _dill.CellType and name == 'cell_contents':
|
37 |
+
_PyCell_Set.argtypes = (ctypes.py_object, ctypes.py_object)
|
38 |
+
_PyCell_Set(object, value)
|
39 |
+
else:
|
40 |
+
setattr(object, name, value)
|
41 |
+
... # more cases below
|
42 |
+
|
43 |
+
_setattr = Getattr(_dill, '_setattr', setattr)
|
44 |
+
|
45 |
+
_dill._setattr will be used when present to emulate Python 3.7 functionality in
|
46 |
+
older versions of Python while defaulting to the standard setattr in 3.7+.
|
47 |
+
|
48 |
+
See this PR for the discussion that lead to this system:
|
49 |
+
https://github.com/uqfoundation/dill/pull/443
|
50 |
+
"""
|
51 |
+
|
52 |
+
import inspect
|
53 |
+
import sys
|
54 |
+
|
55 |
+
_dill = sys.modules['dill._dill']
|
56 |
+
|
57 |
+
|
58 |
+
class Reduce(object):
|
59 |
+
"""
|
60 |
+
Reduce objects are wrappers used for compatibility enforcement during
|
61 |
+
unpickle-time. They should only be used in calls to pickler.save and
|
62 |
+
other Reduce objects. They are only evaluated within unpickler.load.
|
63 |
+
|
64 |
+
Pickling a Reduce object makes the two implementations equivalent:
|
65 |
+
|
66 |
+
pickler.save(Reduce(*reduction))
|
67 |
+
|
68 |
+
pickler.save_reduce(*reduction, obj=reduction)
|
69 |
+
"""
|
70 |
+
__slots__ = ['reduction']
|
71 |
+
def __new__(cls, *reduction, **kwargs):
|
72 |
+
"""
|
73 |
+
Args:
|
74 |
+
*reduction: a tuple that matches the format given here:
|
75 |
+
https://docs.python.org/3/library/pickle.html#object.__reduce__
|
76 |
+
is_callable: a bool to indicate that the object created by
|
77 |
+
unpickling `reduction` is callable. If true, the current Reduce
|
78 |
+
is allowed to be used as the function in further save_reduce calls
|
79 |
+
or Reduce objects.
|
80 |
+
"""
|
81 |
+
is_callable = kwargs.get('is_callable', False) # Pleases Py2. Can be removed later
|
82 |
+
if is_callable:
|
83 |
+
self = object.__new__(_CallableReduce)
|
84 |
+
else:
|
85 |
+
self = object.__new__(Reduce)
|
86 |
+
self.reduction = reduction
|
87 |
+
return self
|
88 |
+
def __repr__(self):
|
89 |
+
return 'Reduce%s' % (self.reduction,)
|
90 |
+
def __copy__(self):
|
91 |
+
return self # pragma: no cover
|
92 |
+
def __deepcopy__(self, memo):
|
93 |
+
return self # pragma: no cover
|
94 |
+
def __reduce__(self):
|
95 |
+
return self.reduction
|
96 |
+
def __reduce_ex__(self, protocol):
|
97 |
+
return self.__reduce__()
|
98 |
+
|
99 |
+
class _CallableReduce(Reduce):
|
100 |
+
# A version of Reduce for functions. Used to trick pickler.save_reduce into
|
101 |
+
# thinking that Reduce objects of functions are themselves meaningful functions.
|
102 |
+
def __call__(self, *args, **kwargs):
|
103 |
+
reduction = self.__reduce__()
|
104 |
+
func = reduction[0]
|
105 |
+
f_args = reduction[1]
|
106 |
+
obj = func(*f_args)
|
107 |
+
return obj(*args, **kwargs)
|
108 |
+
|
109 |
+
__NO_DEFAULT = _dill.Sentinel('Getattr.NO_DEFAULT')
|
110 |
+
|
111 |
+
def Getattr(object, name, default=__NO_DEFAULT):
|
112 |
+
"""
|
113 |
+
A Reduce object that represents the getattr operation. When unpickled, the
|
114 |
+
Getattr will access an attribute 'name' of 'object' and return the value
|
115 |
+
stored there. If the attribute doesn't exist, the default value will be
|
116 |
+
returned if present.
|
117 |
+
|
118 |
+
The following statements are equivalent:
|
119 |
+
|
120 |
+
Getattr(collections, 'OrderedDict')
|
121 |
+
Getattr(collections, 'spam', None)
|
122 |
+
Getattr(*args)
|
123 |
+
|
124 |
+
Reduce(getattr, (collections, 'OrderedDict'))
|
125 |
+
Reduce(getattr, (collections, 'spam', None))
|
126 |
+
Reduce(getattr, args)
|
127 |
+
|
128 |
+
During unpickling, the first two will result in collections.OrderedDict and
|
129 |
+
None respectively because the first attribute exists and the second one does
|
130 |
+
not, forcing it to use the default value given in the third argument.
|
131 |
+
"""
|
132 |
+
|
133 |
+
if default is Getattr.NO_DEFAULT:
|
134 |
+
reduction = (getattr, (object, name))
|
135 |
+
else:
|
136 |
+
reduction = (getattr, (object, name, default))
|
137 |
+
|
138 |
+
return Reduce(*reduction, is_callable=callable(default))
|
139 |
+
|
140 |
+
Getattr.NO_DEFAULT = __NO_DEFAULT
|
141 |
+
del __NO_DEFAULT
|
142 |
+
|
143 |
+
def move_to(module, name=None):
|
144 |
+
def decorator(func):
|
145 |
+
if name is None:
|
146 |
+
fname = func.__name__
|
147 |
+
else:
|
148 |
+
fname = name
|
149 |
+
module.__dict__[fname] = func
|
150 |
+
func.__module__ = module.__name__
|
151 |
+
return func
|
152 |
+
return decorator
|
153 |
+
|
154 |
+
def register_shim(name, default):
|
155 |
+
"""
|
156 |
+
A easier to understand and more compact way of "softly" defining a function.
|
157 |
+
These two pieces of code are equivalent:
|
158 |
+
|
159 |
+
if _dill.OLD3X:
|
160 |
+
def _create_class():
|
161 |
+
...
|
162 |
+
_create_class = register_shim('_create_class', types.new_class)
|
163 |
+
|
164 |
+
if _dill.OLD3X:
|
165 |
+
@move_to(_dill)
|
166 |
+
def _create_class():
|
167 |
+
...
|
168 |
+
_create_class = Getattr(_dill, '_create_class', types.new_class)
|
169 |
+
|
170 |
+
Intuitively, it creates a function or object in the versions of dill/python
|
171 |
+
that require special reimplementations, and use a core library or default
|
172 |
+
implementation if that function or object does not exist.
|
173 |
+
"""
|
174 |
+
func = globals().get(name)
|
175 |
+
if func is not None:
|
176 |
+
_dill.__dict__[name] = func
|
177 |
+
func.__module__ = _dill.__name__
|
178 |
+
|
179 |
+
if default is Getattr.NO_DEFAULT:
|
180 |
+
reduction = (getattr, (_dill, name))
|
181 |
+
else:
|
182 |
+
reduction = (getattr, (_dill, name, default))
|
183 |
+
|
184 |
+
return Reduce(*reduction, is_callable=callable(default))
|
185 |
+
|
186 |
+
######################
|
187 |
+
## Compatibility Shims are defined below
|
188 |
+
######################
|
189 |
+
|
190 |
+
_CELL_EMPTY = register_shim('_CELL_EMPTY', None)
|
191 |
+
|
192 |
+
_setattr = register_shim('_setattr', setattr)
|
193 |
+
_delattr = register_shim('_delattr', delattr)
|
env-llmeval/lib/python3.10/site-packages/dill/detect.py
ADDED
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
Methods for detecting objects leading to pickling failures.
|
10 |
+
"""
|
11 |
+
|
12 |
+
import dis
|
13 |
+
from inspect import ismethod, isfunction, istraceback, isframe, iscode
|
14 |
+
|
15 |
+
from .pointers import parent, reference, at, parents, children
|
16 |
+
from .logger import trace
|
17 |
+
|
18 |
+
__all__ = ['baditems','badobjects','badtypes','code','errors','freevars',
|
19 |
+
'getmodule','globalvars','nestedcode','nestedglobals','outermost',
|
20 |
+
'referredglobals','referrednested','trace','varnames']
|
21 |
+
|
22 |
+
def getmodule(object, _filename=None, force=False):
|
23 |
+
"""get the module of the object"""
|
24 |
+
from inspect import getmodule as getmod
|
25 |
+
module = getmod(object, _filename)
|
26 |
+
if module or not force: return module
|
27 |
+
import builtins
|
28 |
+
from .source import getname
|
29 |
+
name = getname(object, force=True)
|
30 |
+
return builtins if name in vars(builtins).keys() else None
|
31 |
+
|
32 |
+
def outermost(func): # is analogous to getsource(func,enclosing=True)
|
33 |
+
"""get outermost enclosing object (i.e. the outer function in a closure)
|
34 |
+
|
35 |
+
NOTE: this is the object-equivalent of getsource(func, enclosing=True)
|
36 |
+
"""
|
37 |
+
if ismethod(func):
|
38 |
+
_globals = func.__func__.__globals__ or {}
|
39 |
+
elif isfunction(func):
|
40 |
+
_globals = func.__globals__ or {}
|
41 |
+
else:
|
42 |
+
return #XXX: or raise? no matches
|
43 |
+
_globals = _globals.items()
|
44 |
+
# get the enclosing source
|
45 |
+
from .source import getsourcelines
|
46 |
+
try: lines,lnum = getsourcelines(func, enclosing=True)
|
47 |
+
except Exception: #TypeError, IOError
|
48 |
+
lines,lnum = [],None
|
49 |
+
code = ''.join(lines)
|
50 |
+
# get all possible names,objects that are named in the enclosing source
|
51 |
+
_locals = ((name,obj) for (name,obj) in _globals if name in code)
|
52 |
+
# now only save the objects that generate the enclosing block
|
53 |
+
for name,obj in _locals: #XXX: don't really need 'name'
|
54 |
+
try:
|
55 |
+
if getsourcelines(obj) == (lines,lnum): return obj
|
56 |
+
except Exception: #TypeError, IOError
|
57 |
+
pass
|
58 |
+
return #XXX: or raise? no matches
|
59 |
+
|
60 |
+
def nestedcode(func, recurse=True): #XXX: or return dict of {co_name: co} ?
|
61 |
+
"""get the code objects for any nested functions (e.g. in a closure)"""
|
62 |
+
func = code(func)
|
63 |
+
if not iscode(func): return [] #XXX: or raise? no matches
|
64 |
+
nested = set()
|
65 |
+
for co in func.co_consts:
|
66 |
+
if co is None: continue
|
67 |
+
co = code(co)
|
68 |
+
if co:
|
69 |
+
nested.add(co)
|
70 |
+
if recurse: nested |= set(nestedcode(co, recurse=True))
|
71 |
+
return list(nested)
|
72 |
+
|
73 |
+
def code(func):
|
74 |
+
"""get the code object for the given function or method
|
75 |
+
|
76 |
+
NOTE: use dill.source.getsource(CODEOBJ) to get the source code
|
77 |
+
"""
|
78 |
+
if ismethod(func): func = func.__func__
|
79 |
+
if isfunction(func): func = func.__code__
|
80 |
+
if istraceback(func): func = func.tb_frame
|
81 |
+
if isframe(func): func = func.f_code
|
82 |
+
if iscode(func): return func
|
83 |
+
return
|
84 |
+
|
85 |
+
#XXX: ugly: parse dis.dis for name after "<code object" in line and in globals?
|
86 |
+
def referrednested(func, recurse=True): #XXX: return dict of {__name__: obj} ?
|
87 |
+
"""get functions defined inside of func (e.g. inner functions in a closure)
|
88 |
+
|
89 |
+
NOTE: results may differ if the function has been executed or not.
|
90 |
+
If len(nestedcode(func)) > len(referrednested(func)), try calling func().
|
91 |
+
If possible, python builds code objects, but delays building functions
|
92 |
+
until func() is called.
|
93 |
+
"""
|
94 |
+
import gc
|
95 |
+
funcs = set()
|
96 |
+
# get the code objects, and try to track down by referrence
|
97 |
+
for co in nestedcode(func, recurse):
|
98 |
+
# look for function objects that refer to the code object
|
99 |
+
for obj in gc.get_referrers(co):
|
100 |
+
# get methods
|
101 |
+
_ = getattr(obj, '__func__', None) # ismethod
|
102 |
+
if getattr(_, '__code__', None) is co: funcs.add(obj)
|
103 |
+
# get functions
|
104 |
+
elif getattr(obj, '__code__', None) is co: funcs.add(obj)
|
105 |
+
# get frame objects
|
106 |
+
elif getattr(obj, 'f_code', None) is co: funcs.add(obj)
|
107 |
+
# get code objects
|
108 |
+
elif hasattr(obj, 'co_code') and obj is co: funcs.add(obj)
|
109 |
+
# frameobjs => func.__code__.co_varnames not in func.__code__.co_cellvars
|
110 |
+
# funcobjs => func.__code__.co_cellvars not in func.__code__.co_varnames
|
111 |
+
# frameobjs are not found, however funcobjs are...
|
112 |
+
# (see: test_mixins.quad ... and test_mixins.wtf)
|
113 |
+
# after execution, code objects get compiled, and then may be found by gc
|
114 |
+
return list(funcs)
|
115 |
+
|
116 |
+
|
117 |
+
def freevars(func):
|
118 |
+
"""get objects defined in enclosing code that are referred to by func
|
119 |
+
|
120 |
+
returns a dict of {name:object}"""
|
121 |
+
if ismethod(func): func = func.__func__
|
122 |
+
if isfunction(func):
|
123 |
+
closures = func.__closure__ or ()
|
124 |
+
func = func.__code__.co_freevars # get freevars
|
125 |
+
else:
|
126 |
+
return {}
|
127 |
+
|
128 |
+
def get_cell_contents():
|
129 |
+
for name, c in zip(func, closures):
|
130 |
+
try:
|
131 |
+
cell_contents = c.cell_contents
|
132 |
+
except ValueError: # cell is empty
|
133 |
+
continue
|
134 |
+
yield name, c.cell_contents
|
135 |
+
|
136 |
+
return dict(get_cell_contents())
|
137 |
+
|
138 |
+
# thanks to Davies Liu for recursion of globals
|
139 |
+
def nestedglobals(func, recurse=True):
|
140 |
+
"""get the names of any globals found within func"""
|
141 |
+
func = code(func)
|
142 |
+
if func is None: return list()
|
143 |
+
import sys
|
144 |
+
from .temp import capture
|
145 |
+
CAN_NULL = sys.hexversion >= 0x30b00a7 # NULL may be prepended >= 3.11a7
|
146 |
+
names = set()
|
147 |
+
with capture('stdout') as out:
|
148 |
+
dis.dis(func) #XXX: dis.dis(None) disassembles last traceback
|
149 |
+
for line in out.getvalue().splitlines():
|
150 |
+
if '_GLOBAL' in line:
|
151 |
+
name = line.split('(')[-1].split(')')[0]
|
152 |
+
if CAN_NULL:
|
153 |
+
names.add(name.replace('NULL + ', '').replace(' + NULL', ''))
|
154 |
+
else:
|
155 |
+
names.add(name)
|
156 |
+
for co in getattr(func, 'co_consts', tuple()):
|
157 |
+
if co and recurse and iscode(co):
|
158 |
+
names.update(nestedglobals(co, recurse=True))
|
159 |
+
return list(names)
|
160 |
+
|
161 |
+
def referredglobals(func, recurse=True, builtin=False):
|
162 |
+
"""get the names of objects in the global scope referred to by func"""
|
163 |
+
return globalvars(func, recurse, builtin).keys()
|
164 |
+
|
165 |
+
def globalvars(func, recurse=True, builtin=False):
|
166 |
+
"""get objects defined in global scope that are referred to by func
|
167 |
+
|
168 |
+
return a dict of {name:object}"""
|
169 |
+
if ismethod(func): func = func.__func__
|
170 |
+
if isfunction(func):
|
171 |
+
globs = vars(getmodule(sum)).copy() if builtin else {}
|
172 |
+
# get references from within closure
|
173 |
+
orig_func, func = func, set()
|
174 |
+
for obj in orig_func.__closure__ or {}:
|
175 |
+
try:
|
176 |
+
cell_contents = obj.cell_contents
|
177 |
+
except ValueError: # cell is empty
|
178 |
+
pass
|
179 |
+
else:
|
180 |
+
_vars = globalvars(cell_contents, recurse, builtin) or {}
|
181 |
+
func.update(_vars) #XXX: (above) be wary of infinte recursion?
|
182 |
+
globs.update(_vars)
|
183 |
+
# get globals
|
184 |
+
globs.update(orig_func.__globals__ or {})
|
185 |
+
# get names of references
|
186 |
+
if not recurse:
|
187 |
+
func.update(orig_func.__code__.co_names)
|
188 |
+
else:
|
189 |
+
func.update(nestedglobals(orig_func.__code__))
|
190 |
+
# find globals for all entries of func
|
191 |
+
for key in func.copy(): #XXX: unnecessary...?
|
192 |
+
nested_func = globs.get(key)
|
193 |
+
if nested_func is orig_func:
|
194 |
+
#func.remove(key) if key in func else None
|
195 |
+
continue #XXX: globalvars(func, False)?
|
196 |
+
func.update(globalvars(nested_func, True, builtin))
|
197 |
+
elif iscode(func):
|
198 |
+
globs = vars(getmodule(sum)).copy() if builtin else {}
|
199 |
+
#globs.update(globals())
|
200 |
+
if not recurse:
|
201 |
+
func = func.co_names # get names
|
202 |
+
else:
|
203 |
+
orig_func = func.co_name # to stop infinite recursion
|
204 |
+
func = set(nestedglobals(func))
|
205 |
+
# find globals for all entries of func
|
206 |
+
for key in func.copy(): #XXX: unnecessary...?
|
207 |
+
if key is orig_func:
|
208 |
+
#func.remove(key) if key in func else None
|
209 |
+
continue #XXX: globalvars(func, False)?
|
210 |
+
nested_func = globs.get(key)
|
211 |
+
func.update(globalvars(nested_func, True, builtin))
|
212 |
+
else:
|
213 |
+
return {}
|
214 |
+
#NOTE: if name not in __globals__, then we skip it...
|
215 |
+
return dict((name,globs[name]) for name in func if name in globs)
|
216 |
+
|
217 |
+
|
218 |
+
def varnames(func):
|
219 |
+
"""get names of variables defined by func
|
220 |
+
|
221 |
+
returns a tuple (local vars, local vars referrenced by nested functions)"""
|
222 |
+
func = code(func)
|
223 |
+
if not iscode(func):
|
224 |
+
return () #XXX: better ((),())? or None?
|
225 |
+
return func.co_varnames, func.co_cellvars
|
226 |
+
|
227 |
+
|
228 |
+
def baditems(obj, exact=False, safe=False): #XXX: obj=globals() ?
|
229 |
+
"""get items in object that fail to pickle"""
|
230 |
+
if not hasattr(obj,'__iter__'): # is not iterable
|
231 |
+
return [j for j in (badobjects(obj,0,exact,safe),) if j is not None]
|
232 |
+
obj = obj.values() if getattr(obj,'values',None) else obj
|
233 |
+
_obj = [] # can't use a set, as items may be unhashable
|
234 |
+
[_obj.append(badobjects(i,0,exact,safe)) for i in obj if i not in _obj]
|
235 |
+
return [j for j in _obj if j is not None]
|
236 |
+
|
237 |
+
|
238 |
+
def badobjects(obj, depth=0, exact=False, safe=False):
|
239 |
+
"""get objects that fail to pickle"""
|
240 |
+
from dill import pickles
|
241 |
+
if not depth:
|
242 |
+
if pickles(obj,exact,safe): return None
|
243 |
+
return obj
|
244 |
+
return dict(((attr, badobjects(getattr(obj,attr),depth-1,exact,safe)) \
|
245 |
+
for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe)))
|
246 |
+
|
247 |
+
def badtypes(obj, depth=0, exact=False, safe=False):
|
248 |
+
"""get types for objects that fail to pickle"""
|
249 |
+
from dill import pickles
|
250 |
+
if not depth:
|
251 |
+
if pickles(obj,exact,safe): return None
|
252 |
+
return type(obj)
|
253 |
+
return dict(((attr, badtypes(getattr(obj,attr),depth-1,exact,safe)) \
|
254 |
+
for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe)))
|
255 |
+
|
256 |
+
def errors(obj, depth=0, exact=False, safe=False):
|
257 |
+
"""get errors for objects that fail to pickle"""
|
258 |
+
from dill import pickles, copy
|
259 |
+
if not depth:
|
260 |
+
try:
|
261 |
+
pik = copy(obj)
|
262 |
+
if exact:
|
263 |
+
assert pik == obj, \
|
264 |
+
"Unpickling produces %s instead of %s" % (pik,obj)
|
265 |
+
assert type(pik) == type(obj), \
|
266 |
+
"Unpickling produces %s instead of %s" % (type(pik),type(obj))
|
267 |
+
return None
|
268 |
+
except Exception:
|
269 |
+
import sys
|
270 |
+
return sys.exc_info()[1]
|
271 |
+
_dict = {}
|
272 |
+
for attr in dir(obj):
|
273 |
+
try:
|
274 |
+
_attr = getattr(obj,attr)
|
275 |
+
except Exception:
|
276 |
+
import sys
|
277 |
+
_dict[attr] = sys.exc_info()[1]
|
278 |
+
continue
|
279 |
+
if not pickles(_attr,exact,safe):
|
280 |
+
_dict[attr] = errors(_attr,depth-1,exact,safe)
|
281 |
+
return _dict
|
282 |
+
|
283 |
+
|
284 |
+
# EOF
|
env-llmeval/lib/python3.10/site-packages/dill/logger.py
ADDED
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
#
|
4 |
+
# Author: Leonardo Gama (@leogama)
|
5 |
+
# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
Logging utilities for dill.
|
10 |
+
|
11 |
+
The 'logger' object is dill's top-level logger.
|
12 |
+
|
13 |
+
The 'adapter' object wraps the logger and implements a 'trace()' method that
|
14 |
+
generates a detailed tree-style trace for the pickling call at log level INFO.
|
15 |
+
|
16 |
+
The 'trace()' function sets and resets dill's logger log level, enabling and
|
17 |
+
disabling the pickling trace.
|
18 |
+
|
19 |
+
The trace shows a tree structure depicting the depth of each object serialized
|
20 |
+
*with dill save functions*, but not the ones that use save functions from
|
21 |
+
'pickle._Pickler.dispatch'. If the information is available, it also displays
|
22 |
+
the size in bytes that the object contributed to the pickle stream (including
|
23 |
+
its child objects). Sample trace output:
|
24 |
+
|
25 |
+
>>> import dill, dill.tests
|
26 |
+
>>> dill.detect.trace(True)
|
27 |
+
>>> dill.dump_session(main=dill.tests)
|
28 |
+
┬ M1: <module 'dill.tests' from '.../dill/tests/__init__.py'>
|
29 |
+
├┬ F2: <function _import_module at 0x7f0d2dce1b80>
|
30 |
+
│└ # F2 [32 B]
|
31 |
+
├┬ D2: <dict object at 0x7f0d2e98a540>
|
32 |
+
│├┬ T4: <class '_frozen_importlib.ModuleSpec'>
|
33 |
+
││└ # T4 [35 B]
|
34 |
+
│├┬ D2: <dict object at 0x7f0d2ef0e8c0>
|
35 |
+
││├┬ T4: <class '_frozen_importlib_external.SourceFileLoader'>
|
36 |
+
│││└ # T4 [50 B]
|
37 |
+
││├┬ D2: <dict object at 0x7f0d2e988a40>
|
38 |
+
│││└ # D2 [84 B]
|
39 |
+
││└ # D2 [413 B]
|
40 |
+
│└ # D2 [763 B]
|
41 |
+
└ # M1 [813 B]
|
42 |
+
"""
|
43 |
+
|
44 |
+
__all__ = ['adapter', 'logger', 'trace']
|
45 |
+
|
46 |
+
import codecs
|
47 |
+
import contextlib
|
48 |
+
import locale
|
49 |
+
import logging
|
50 |
+
import math
|
51 |
+
import os
|
52 |
+
from functools import partial
|
53 |
+
from typing import TextIO, Union
|
54 |
+
|
55 |
+
import dill
|
56 |
+
|
57 |
+
# Tree drawing characters: Unicode to ASCII map.
|
58 |
+
ASCII_MAP = str.maketrans({"│": "|", "├": "|", "┬": "+", "└": "`"})
|
59 |
+
|
60 |
+
## Notes about the design choices ##
|
61 |
+
|
62 |
+
# Here is some domumentation of the Standard Library's logging internals that
|
63 |
+
# can't be found completely in the official documentation. dill's logger is
|
64 |
+
# obtained by calling logging.getLogger('dill') and therefore is an instance of
|
65 |
+
# logging.getLoggerClass() at the call time. As this is controlled by the user,
|
66 |
+
# in order to add some functionality to it it's necessary to use a LoggerAdapter
|
67 |
+
# to wrap it, overriding some of the adapter's methods and creating new ones.
|
68 |
+
#
|
69 |
+
# Basic calling sequence
|
70 |
+
# ======================
|
71 |
+
#
|
72 |
+
# Python's logging functionality can be conceptually divided into five steps:
|
73 |
+
# 0. Check logging level -> abort if call level is greater than logger level
|
74 |
+
# 1. Gather information -> construct a LogRecord from passed arguments and context
|
75 |
+
# 2. Filter (optional) -> discard message if the record matches a filter
|
76 |
+
# 3. Format -> format message with args, then format output string with message plus record
|
77 |
+
# 4. Handle -> write the formatted string to output as defined in the handler
|
78 |
+
#
|
79 |
+
# dill.logging.logger.log -> # or logger.info, etc.
|
80 |
+
# Logger.log -> \
|
81 |
+
# Logger._log -> }- accept 'extra' parameter for custom record entries
|
82 |
+
# Logger.makeRecord -> /
|
83 |
+
# LogRecord.__init__
|
84 |
+
# Logger.handle ->
|
85 |
+
# Logger.callHandlers ->
|
86 |
+
# Handler.handle ->
|
87 |
+
# Filterer.filter ->
|
88 |
+
# Filter.filter
|
89 |
+
# StreamHandler.emit ->
|
90 |
+
# Handler.format ->
|
91 |
+
# Formatter.format ->
|
92 |
+
# LogRecord.getMessage # does: record.message = msg % args
|
93 |
+
# Formatter.formatMessage ->
|
94 |
+
# PercentStyle.format # does: self._fmt % vars(record)
|
95 |
+
#
|
96 |
+
# NOTE: All methods from the second line on are from logging.__init__.py
|
97 |
+
|
98 |
+
class TraceAdapter(logging.LoggerAdapter):
|
99 |
+
"""
|
100 |
+
Tracks object tree depth and calculates pickled object size.
|
101 |
+
|
102 |
+
A single instance of this wraps the module's logger, as the logging API
|
103 |
+
doesn't allow setting it directly with a custom Logger subclass. The added
|
104 |
+
'trace()' method receives a pickle instance as the first argument and
|
105 |
+
creates extra values to be added in the LogRecord from it, then calls
|
106 |
+
'info()'.
|
107 |
+
|
108 |
+
Usage of logger with 'trace()' method:
|
109 |
+
|
110 |
+
>>> from dill.logger import adapter as logger #NOTE: not dill.logger.logger
|
111 |
+
>>> ...
|
112 |
+
>>> def save_atype(pickler, obj):
|
113 |
+
>>> logger.trace(pickler, "Message with %s and %r etc. placeholders", 'text', obj)
|
114 |
+
>>> ...
|
115 |
+
"""
|
116 |
+
def __init__(self, logger):
|
117 |
+
self.logger = logger
|
118 |
+
def addHandler(self, handler):
|
119 |
+
formatter = TraceFormatter("%(prefix)s%(message)s%(suffix)s", handler=handler)
|
120 |
+
handler.setFormatter(formatter)
|
121 |
+
self.logger.addHandler(handler)
|
122 |
+
def removeHandler(self, handler):
|
123 |
+
self.logger.removeHandler(handler)
|
124 |
+
def process(self, msg, kwargs):
|
125 |
+
# A no-op override, as we don't have self.extra.
|
126 |
+
return msg, kwargs
|
127 |
+
def trace_setup(self, pickler):
|
128 |
+
# Called by Pickler.dump().
|
129 |
+
if not dill._dill.is_dill(pickler, child=False):
|
130 |
+
return
|
131 |
+
if self.isEnabledFor(logging.INFO):
|
132 |
+
pickler._trace_depth = 1
|
133 |
+
pickler._size_stack = []
|
134 |
+
else:
|
135 |
+
pickler._trace_depth = None
|
136 |
+
def trace(self, pickler, msg, *args, **kwargs):
|
137 |
+
if not hasattr(pickler, '_trace_depth'):
|
138 |
+
logger.info(msg, *args, **kwargs)
|
139 |
+
return
|
140 |
+
if pickler._trace_depth is None:
|
141 |
+
return
|
142 |
+
extra = kwargs.get('extra', {})
|
143 |
+
pushed_obj = msg.startswith('#')
|
144 |
+
size = None
|
145 |
+
try:
|
146 |
+
# Streams are not required to be tellable.
|
147 |
+
size = pickler._file.tell()
|
148 |
+
frame = pickler.framer.current_frame
|
149 |
+
try:
|
150 |
+
size += frame.tell()
|
151 |
+
except AttributeError:
|
152 |
+
# PyPy may use a BytesBuilder as frame
|
153 |
+
size += len(frame)
|
154 |
+
except (AttributeError, TypeError):
|
155 |
+
pass
|
156 |
+
if size is not None:
|
157 |
+
if not pushed_obj:
|
158 |
+
pickler._size_stack.append(size)
|
159 |
+
else:
|
160 |
+
size -= pickler._size_stack.pop()
|
161 |
+
extra['size'] = size
|
162 |
+
if pushed_obj:
|
163 |
+
pickler._trace_depth -= 1
|
164 |
+
extra['depth'] = pickler._trace_depth
|
165 |
+
kwargs['extra'] = extra
|
166 |
+
self.info(msg, *args, **kwargs)
|
167 |
+
if not pushed_obj:
|
168 |
+
pickler._trace_depth += 1
|
169 |
+
|
170 |
+
class TraceFormatter(logging.Formatter):
|
171 |
+
"""
|
172 |
+
Generates message prefix and suffix from record.
|
173 |
+
|
174 |
+
This Formatter adds prefix and suffix strings to the log message in trace
|
175 |
+
mode (an also provides empty string defaults for normal logs).
|
176 |
+
"""
|
177 |
+
def __init__(self, *args, handler=None, **kwargs):
|
178 |
+
super().__init__(*args, **kwargs)
|
179 |
+
try:
|
180 |
+
encoding = handler.stream.encoding
|
181 |
+
if encoding is None:
|
182 |
+
raise AttributeError
|
183 |
+
except AttributeError:
|
184 |
+
encoding = locale.getpreferredencoding()
|
185 |
+
try:
|
186 |
+
encoding = codecs.lookup(encoding).name
|
187 |
+
except LookupError:
|
188 |
+
self.is_utf8 = False
|
189 |
+
else:
|
190 |
+
self.is_utf8 = (encoding == codecs.lookup('utf-8').name)
|
191 |
+
def format(self, record):
|
192 |
+
fields = {'prefix': "", 'suffix': ""}
|
193 |
+
if getattr(record, 'depth', 0) > 0:
|
194 |
+
if record.msg.startswith("#"):
|
195 |
+
prefix = (record.depth - 1)*"│" + "└"
|
196 |
+
elif record.depth == 1:
|
197 |
+
prefix = "┬"
|
198 |
+
else:
|
199 |
+
prefix = (record.depth - 2)*"│" + "├┬"
|
200 |
+
if not self.is_utf8:
|
201 |
+
prefix = prefix.translate(ASCII_MAP) + "-"
|
202 |
+
fields['prefix'] = prefix + " "
|
203 |
+
if hasattr(record, 'size') and record.size is not None and record.size >= 1:
|
204 |
+
# Show object size in human-readable form.
|
205 |
+
power = int(math.log(record.size, 2)) // 10
|
206 |
+
size = record.size >> power*10
|
207 |
+
fields['suffix'] = " [%d %sB]" % (size, "KMGTP"[power] + "i" if power else "")
|
208 |
+
vars(record).update(fields)
|
209 |
+
return super().format(record)
|
210 |
+
|
211 |
+
logger = logging.getLogger('dill')
|
212 |
+
logger.propagate = False
|
213 |
+
adapter = TraceAdapter(logger)
|
214 |
+
stderr_handler = logging._StderrHandler()
|
215 |
+
adapter.addHandler(stderr_handler)
|
216 |
+
|
217 |
+
def trace(arg: Union[bool, TextIO, str, os.PathLike] = None, *, mode: str = 'a') -> None:
|
218 |
+
"""print a trace through the stack when pickling; useful for debugging
|
219 |
+
|
220 |
+
With a single boolean argument, enable or disable the tracing.
|
221 |
+
|
222 |
+
Example usage:
|
223 |
+
|
224 |
+
>>> import dill
|
225 |
+
>>> dill.detect.trace(True)
|
226 |
+
>>> dill.dump_session()
|
227 |
+
|
228 |
+
Alternatively, ``trace()`` can be used as a context manager. With no
|
229 |
+
arguments, it just takes care of restoring the tracing state on exit.
|
230 |
+
Either a file handle, or a file name and (optionally) a file mode may be
|
231 |
+
specitfied to redirect the tracing output in the ``with`` block context. A
|
232 |
+
log function is yielded by the manager so the user can write extra
|
233 |
+
information to the file.
|
234 |
+
|
235 |
+
Example usage:
|
236 |
+
|
237 |
+
>>> from dill import detect
|
238 |
+
>>> D = {'a': 42, 'b': {'x': None}}
|
239 |
+
>>> with detect.trace():
|
240 |
+
>>> dumps(D)
|
241 |
+
┬ D2: <dict object at 0x7f2721804800>
|
242 |
+
├┬ D2: <dict object at 0x7f27217f5c40>
|
243 |
+
│└ # D2 [8 B]
|
244 |
+
└ # D2 [22 B]
|
245 |
+
>>> squared = lambda x: x**2
|
246 |
+
>>> with detect.trace('output.txt', mode='w') as log:
|
247 |
+
>>> log("> D = %r", D)
|
248 |
+
>>> dumps(D)
|
249 |
+
>>> log("> squared = %r", squared)
|
250 |
+
>>> dumps(squared)
|
251 |
+
|
252 |
+
Arguments:
|
253 |
+
arg: a boolean value, or an optional file-like or path-like object for the context manager
|
254 |
+
mode: mode string for ``open()`` if a file name is passed as the first argument
|
255 |
+
"""
|
256 |
+
if not isinstance(arg, bool):
|
257 |
+
return TraceManager(file=arg, mode=mode)
|
258 |
+
logger.setLevel(logging.INFO if arg else logging.WARNING)
|
259 |
+
|
260 |
+
class TraceManager(contextlib.AbstractContextManager):
|
261 |
+
"""context manager version of trace(); can redirect the trace to a file"""
|
262 |
+
def __init__(self, file, mode):
|
263 |
+
self.file = file
|
264 |
+
self.mode = mode
|
265 |
+
self.redirect = file is not None
|
266 |
+
self.file_is_stream = hasattr(file, 'write')
|
267 |
+
def __enter__(self):
|
268 |
+
if self.redirect:
|
269 |
+
stderr_handler.flush()
|
270 |
+
if self.file_is_stream:
|
271 |
+
self.handler = logging.StreamHandler(self.file)
|
272 |
+
else:
|
273 |
+
self.handler = logging.FileHandler(self.file, self.mode)
|
274 |
+
adapter.removeHandler(stderr_handler)
|
275 |
+
adapter.addHandler(self.handler)
|
276 |
+
self.old_level = adapter.getEffectiveLevel()
|
277 |
+
adapter.setLevel(logging.INFO)
|
278 |
+
return adapter.info
|
279 |
+
def __exit__(self, *exc_info):
|
280 |
+
adapter.setLevel(self.old_level)
|
281 |
+
if self.redirect:
|
282 |
+
adapter.removeHandler(self.handler)
|
283 |
+
adapter.addHandler(stderr_handler)
|
284 |
+
if not self.file_is_stream:
|
285 |
+
self.handler.close()
|
env-llmeval/lib/python3.10/site-packages/dill/objtypes.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
all Python Standard Library object types (currently: CH 1-15 @ 2.7)
|
10 |
+
and some other common object types (i.e. numpy.ndarray)
|
11 |
+
|
12 |
+
to load more objects and types, use dill.load_types()
|
13 |
+
"""
|
14 |
+
|
15 |
+
# non-local import of dill.objects
|
16 |
+
from dill import objects
|
17 |
+
for _type in objects.keys():
|
18 |
+
exec("%s = type(objects['%s'])" % (_type,_type))
|
19 |
+
|
20 |
+
del objects
|
21 |
+
try:
|
22 |
+
del _type
|
23 |
+
except NameError:
|
24 |
+
pass
|
env-llmeval/lib/python3.10/site-packages/dill/pointers.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
__all__ = ['parent', 'reference', 'at', 'parents', 'children']
|
10 |
+
|
11 |
+
import gc
|
12 |
+
import sys
|
13 |
+
|
14 |
+
from ._dill import _proxy_helper as reference
|
15 |
+
from ._dill import _locate_object as at
|
16 |
+
|
17 |
+
def parent(obj, objtype, ignore=()):
|
18 |
+
"""
|
19 |
+
>>> listiter = iter([4,5,6,7])
|
20 |
+
>>> obj = parent(listiter, list)
|
21 |
+
>>> obj == [4,5,6,7] # actually 'is', but don't have handle any longer
|
22 |
+
True
|
23 |
+
|
24 |
+
NOTE: objtype can be a single type (e.g. int or list) or a tuple of types.
|
25 |
+
|
26 |
+
WARNING: if obj is a sequence (e.g. list), may produce unexpected results.
|
27 |
+
Parent finds *one* parent (e.g. the last member of the sequence).
|
28 |
+
"""
|
29 |
+
depth = 1 #XXX: always looking for the parent (only, right?)
|
30 |
+
chain = parents(obj, objtype, depth, ignore)
|
31 |
+
parent = chain.pop()
|
32 |
+
if parent is obj:
|
33 |
+
return None
|
34 |
+
return parent
|
35 |
+
|
36 |
+
|
37 |
+
def parents(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ?
|
38 |
+
"""Find the chain of referents for obj. Chain will end with obj.
|
39 |
+
|
40 |
+
objtype: an object type or tuple of types to search for
|
41 |
+
depth: search depth (e.g. depth=2 is 'grandparents')
|
42 |
+
ignore: an object or tuple of objects to ignore in the search
|
43 |
+
"""
|
44 |
+
edge_func = gc.get_referents # looking for refs, not back_refs
|
45 |
+
predicate = lambda x: isinstance(x, objtype) # looking for parent type
|
46 |
+
#if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ?
|
47 |
+
ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore
|
48 |
+
ignore = (id(obj) for obj in ignore)
|
49 |
+
chain = find_chain(obj, predicate, edge_func, depth)[::-1]
|
50 |
+
#XXX: should pop off obj... ?
|
51 |
+
return chain
|
52 |
+
|
53 |
+
|
54 |
+
def children(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ?
|
55 |
+
"""Find the chain of referrers for obj. Chain will start with obj.
|
56 |
+
|
57 |
+
objtype: an object type or tuple of types to search for
|
58 |
+
depth: search depth (e.g. depth=2 is 'grandchildren')
|
59 |
+
ignore: an object or tuple of objects to ignore in the search
|
60 |
+
|
61 |
+
NOTE: a common thing to ignore is all globals, 'ignore=(globals(),)'
|
62 |
+
|
63 |
+
NOTE: repeated calls may yield different results, as python stores
|
64 |
+
the last value in the special variable '_'; thus, it is often good
|
65 |
+
to execute something to replace '_' (e.g. >>> 1+1).
|
66 |
+
"""
|
67 |
+
edge_func = gc.get_referrers # looking for back_refs, not refs
|
68 |
+
predicate = lambda x: isinstance(x, objtype) # looking for child type
|
69 |
+
#if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ?
|
70 |
+
ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore
|
71 |
+
ignore = (id(obj) for obj in ignore)
|
72 |
+
chain = find_chain(obj, predicate, edge_func, depth, ignore)
|
73 |
+
#XXX: should pop off obj... ?
|
74 |
+
return chain
|
75 |
+
|
76 |
+
|
77 |
+
# more generic helper function (cut-n-paste from objgraph)
|
78 |
+
# Source at http://mg.pov.lt/objgraph/
|
79 |
+
# Copyright (c) 2008-2010 Marius Gedminas <[email protected]>
|
80 |
+
# Copyright (c) 2010 Stefano Rivera <[email protected]>
|
81 |
+
# Released under the MIT licence (see objgraph/objgrah.py)
|
82 |
+
|
83 |
+
def find_chain(obj, predicate, edge_func, max_depth=20, extra_ignore=()):
|
84 |
+
queue = [obj]
|
85 |
+
depth = {id(obj): 0}
|
86 |
+
parent = {id(obj): None}
|
87 |
+
ignore = set(extra_ignore)
|
88 |
+
ignore.add(id(extra_ignore))
|
89 |
+
ignore.add(id(queue))
|
90 |
+
ignore.add(id(depth))
|
91 |
+
ignore.add(id(parent))
|
92 |
+
ignore.add(id(ignore))
|
93 |
+
ignore.add(id(sys._getframe())) # this function
|
94 |
+
ignore.add(id(sys._getframe(1))) # find_chain/find_backref_chain, likely
|
95 |
+
gc.collect()
|
96 |
+
while queue:
|
97 |
+
target = queue.pop(0)
|
98 |
+
if predicate(target):
|
99 |
+
chain = [target]
|
100 |
+
while parent[id(target)] is not None:
|
101 |
+
target = parent[id(target)]
|
102 |
+
chain.append(target)
|
103 |
+
return chain
|
104 |
+
tdepth = depth[id(target)]
|
105 |
+
if tdepth < max_depth:
|
106 |
+
referrers = edge_func(target)
|
107 |
+
ignore.add(id(referrers))
|
108 |
+
for source in referrers:
|
109 |
+
if id(source) in ignore:
|
110 |
+
continue
|
111 |
+
if id(source) not in depth:
|
112 |
+
depth[id(source)] = tdepth + 1
|
113 |
+
parent[id(source)] = target
|
114 |
+
queue.append(source)
|
115 |
+
return [obj] # not found
|
116 |
+
|
117 |
+
|
118 |
+
# backward compatibility
|
119 |
+
refobject = at
|
120 |
+
|
121 |
+
|
122 |
+
# EOF
|
env-llmeval/lib/python3.10/site-packages/dill/session.py
ADDED
@@ -0,0 +1,613 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Author: Leonardo Gama (@leogama)
|
5 |
+
# Copyright (c) 2008-2015 California Institute of Technology.
|
6 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
7 |
+
# License: 3-clause BSD. The full license text is available at:
|
8 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
9 |
+
"""
|
10 |
+
Pickle and restore the intepreter session.
|
11 |
+
"""
|
12 |
+
|
13 |
+
__all__ = [
|
14 |
+
'dump_module', 'load_module', 'load_module_asdict',
|
15 |
+
'dump_session', 'load_session' # backward compatibility
|
16 |
+
]
|
17 |
+
|
18 |
+
import re
|
19 |
+
import os
|
20 |
+
import sys
|
21 |
+
import warnings
|
22 |
+
|
23 |
+
from dill import _dill, Pickler, Unpickler
|
24 |
+
from ._dill import (
|
25 |
+
BuiltinMethodType, FunctionType, MethodType, ModuleType, TypeType,
|
26 |
+
_import_module, _is_builtin_module, _is_imported_module, _main_module,
|
27 |
+
_reverse_typemap, __builtin__,
|
28 |
+
)
|
29 |
+
|
30 |
+
# Type hints.
|
31 |
+
from typing import Optional, Union
|
32 |
+
|
33 |
+
import pathlib
|
34 |
+
import tempfile
|
35 |
+
|
36 |
+
TEMPDIR = pathlib.PurePath(tempfile.gettempdir())
|
37 |
+
|
38 |
+
def _module_map():
|
39 |
+
"""get map of imported modules"""
|
40 |
+
from collections import defaultdict
|
41 |
+
from types import SimpleNamespace
|
42 |
+
modmap = SimpleNamespace(
|
43 |
+
by_name=defaultdict(list),
|
44 |
+
by_id=defaultdict(list),
|
45 |
+
top_level={},
|
46 |
+
)
|
47 |
+
for modname, module in sys.modules.items():
|
48 |
+
if modname in ('__main__', '__mp_main__') or not isinstance(module, ModuleType):
|
49 |
+
continue
|
50 |
+
if '.' not in modname:
|
51 |
+
modmap.top_level[id(module)] = modname
|
52 |
+
for objname, modobj in module.__dict__.items():
|
53 |
+
modmap.by_name[objname].append((modobj, modname))
|
54 |
+
modmap.by_id[id(modobj)].append((modobj, objname, modname))
|
55 |
+
return modmap
|
56 |
+
|
57 |
+
IMPORTED_AS_TYPES = (ModuleType, TypeType, FunctionType, MethodType, BuiltinMethodType)
|
58 |
+
if 'PyCapsuleType' in _reverse_typemap:
|
59 |
+
IMPORTED_AS_TYPES += (_reverse_typemap['PyCapsuleType'],)
|
60 |
+
IMPORTED_AS_MODULES = ('ctypes', 'typing', 'subprocess', 'threading',
|
61 |
+
r'concurrent\.futures(\.\w+)?', r'multiprocessing(\.\w+)?')
|
62 |
+
IMPORTED_AS_MODULES = tuple(re.compile(x) for x in IMPORTED_AS_MODULES)
|
63 |
+
|
64 |
+
def _lookup_module(modmap, name, obj, main_module):
|
65 |
+
"""lookup name or id of obj if module is imported"""
|
66 |
+
for modobj, modname in modmap.by_name[name]:
|
67 |
+
if modobj is obj and sys.modules[modname] is not main_module:
|
68 |
+
return modname, name
|
69 |
+
__module__ = getattr(obj, '__module__', None)
|
70 |
+
if isinstance(obj, IMPORTED_AS_TYPES) or (__module__ is not None
|
71 |
+
and any(regex.fullmatch(__module__) for regex in IMPORTED_AS_MODULES)):
|
72 |
+
for modobj, objname, modname in modmap.by_id[id(obj)]:
|
73 |
+
if sys.modules[modname] is not main_module:
|
74 |
+
return modname, objname
|
75 |
+
return None, None
|
76 |
+
|
77 |
+
def _stash_modules(main_module):
|
78 |
+
modmap = _module_map()
|
79 |
+
newmod = ModuleType(main_module.__name__)
|
80 |
+
|
81 |
+
imported = []
|
82 |
+
imported_as = []
|
83 |
+
imported_top_level = [] # keep separated for backward compatibility
|
84 |
+
original = {}
|
85 |
+
for name, obj in main_module.__dict__.items():
|
86 |
+
if obj is main_module:
|
87 |
+
original[name] = newmod # self-reference
|
88 |
+
elif obj is main_module.__dict__:
|
89 |
+
original[name] = newmod.__dict__
|
90 |
+
# Avoid incorrectly matching a singleton value in another package (ex.: __doc__).
|
91 |
+
elif any(obj is singleton for singleton in (None, False, True)) \
|
92 |
+
or isinstance(obj, ModuleType) and _is_builtin_module(obj): # always saved by ref
|
93 |
+
original[name] = obj
|
94 |
+
else:
|
95 |
+
source_module, objname = _lookup_module(modmap, name, obj, main_module)
|
96 |
+
if source_module is not None:
|
97 |
+
if objname == name:
|
98 |
+
imported.append((source_module, name))
|
99 |
+
else:
|
100 |
+
imported_as.append((source_module, objname, name))
|
101 |
+
else:
|
102 |
+
try:
|
103 |
+
imported_top_level.append((modmap.top_level[id(obj)], name))
|
104 |
+
except KeyError:
|
105 |
+
original[name] = obj
|
106 |
+
|
107 |
+
if len(original) < len(main_module.__dict__):
|
108 |
+
newmod.__dict__.update(original)
|
109 |
+
newmod.__dill_imported = imported
|
110 |
+
newmod.__dill_imported_as = imported_as
|
111 |
+
newmod.__dill_imported_top_level = imported_top_level
|
112 |
+
if getattr(newmod, '__loader__', None) is None and _is_imported_module(main_module):
|
113 |
+
# Trick _is_imported_module() to force saving as an imported module.
|
114 |
+
newmod.__loader__ = True # will be discarded by save_module()
|
115 |
+
return newmod
|
116 |
+
else:
|
117 |
+
return main_module
|
118 |
+
|
119 |
+
def _restore_modules(unpickler, main_module):
|
120 |
+
try:
|
121 |
+
for modname, name in main_module.__dict__.pop('__dill_imported'):
|
122 |
+
main_module.__dict__[name] = unpickler.find_class(modname, name)
|
123 |
+
for modname, objname, name in main_module.__dict__.pop('__dill_imported_as'):
|
124 |
+
main_module.__dict__[name] = unpickler.find_class(modname, objname)
|
125 |
+
for modname, name in main_module.__dict__.pop('__dill_imported_top_level'):
|
126 |
+
main_module.__dict__[name] = __import__(modname)
|
127 |
+
except KeyError:
|
128 |
+
pass
|
129 |
+
|
130 |
+
#NOTE: 06/03/15 renamed main_module to main
|
131 |
+
def dump_module(
|
132 |
+
filename: Union[str, os.PathLike] = None,
|
133 |
+
module: Optional[Union[ModuleType, str]] = None,
|
134 |
+
refimported: bool = False,
|
135 |
+
**kwds
|
136 |
+
) -> None:
|
137 |
+
"""Pickle the current state of :py:mod:`__main__` or another module to a file.
|
138 |
+
|
139 |
+
Save the contents of :py:mod:`__main__` (e.g. from an interactive
|
140 |
+
interpreter session), an imported module, or a module-type object (e.g.
|
141 |
+
built with :py:class:`~types.ModuleType`), to a file. The pickled
|
142 |
+
module can then be restored with the function :py:func:`load_module`.
|
143 |
+
|
144 |
+
Args:
|
145 |
+
filename: a path-like object or a writable stream. If `None`
|
146 |
+
(the default), write to a named file in a temporary directory.
|
147 |
+
module: a module object or the name of an importable module. If `None`
|
148 |
+
(the default), :py:mod:`__main__` is saved.
|
149 |
+
refimported: if `True`, all objects identified as having been imported
|
150 |
+
into the module's namespace are saved by reference. *Note:* this is
|
151 |
+
similar but independent from ``dill.settings[`byref`]``, as
|
152 |
+
``refimported`` refers to virtually all imported objects, while
|
153 |
+
``byref`` only affects select objects.
|
154 |
+
**kwds: extra keyword arguments passed to :py:class:`Pickler()`.
|
155 |
+
|
156 |
+
Raises:
|
157 |
+
:py:exc:`PicklingError`: if pickling fails.
|
158 |
+
|
159 |
+
Examples:
|
160 |
+
|
161 |
+
- Save current interpreter session state:
|
162 |
+
|
163 |
+
>>> import dill
|
164 |
+
>>> squared = lambda x: x*x
|
165 |
+
>>> dill.dump_module() # save state of __main__ to /tmp/session.pkl
|
166 |
+
|
167 |
+
- Save the state of an imported/importable module:
|
168 |
+
|
169 |
+
>>> import dill
|
170 |
+
>>> import pox
|
171 |
+
>>> pox.plus_one = lambda x: x+1
|
172 |
+
>>> dill.dump_module('pox_session.pkl', module=pox)
|
173 |
+
|
174 |
+
- Save the state of a non-importable, module-type object:
|
175 |
+
|
176 |
+
>>> import dill
|
177 |
+
>>> from types import ModuleType
|
178 |
+
>>> foo = ModuleType('foo')
|
179 |
+
>>> foo.values = [1,2,3]
|
180 |
+
>>> import math
|
181 |
+
>>> foo.sin = math.sin
|
182 |
+
>>> dill.dump_module('foo_session.pkl', module=foo, refimported=True)
|
183 |
+
|
184 |
+
- Restore the state of the saved modules:
|
185 |
+
|
186 |
+
>>> import dill
|
187 |
+
>>> dill.load_module()
|
188 |
+
>>> squared(2)
|
189 |
+
4
|
190 |
+
>>> pox = dill.load_module('pox_session.pkl')
|
191 |
+
>>> pox.plus_one(1)
|
192 |
+
2
|
193 |
+
>>> foo = dill.load_module('foo_session.pkl')
|
194 |
+
>>> [foo.sin(x) for x in foo.values]
|
195 |
+
[0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
|
196 |
+
|
197 |
+
- Use `refimported` to save imported objects by reference:
|
198 |
+
|
199 |
+
>>> import dill
|
200 |
+
>>> from html.entities import html5
|
201 |
+
>>> type(html5), len(html5)
|
202 |
+
(dict, 2231)
|
203 |
+
>>> import io
|
204 |
+
>>> buf = io.BytesIO()
|
205 |
+
>>> dill.dump_module(buf) # saves __main__, with html5 saved by value
|
206 |
+
>>> len(buf.getvalue()) # pickle size in bytes
|
207 |
+
71665
|
208 |
+
>>> buf = io.BytesIO()
|
209 |
+
>>> dill.dump_module(buf, refimported=True) # html5 saved by reference
|
210 |
+
>>> len(buf.getvalue())
|
211 |
+
438
|
212 |
+
|
213 |
+
*Changed in version 0.3.6:* Function ``dump_session()`` was renamed to
|
214 |
+
``dump_module()``. Parameters ``main`` and ``byref`` were renamed to
|
215 |
+
``module`` and ``refimported``, respectively.
|
216 |
+
|
217 |
+
Note:
|
218 |
+
Currently, ``dill.settings['byref']`` and ``dill.settings['recurse']``
|
219 |
+
don't apply to this function.
|
220 |
+
"""
|
221 |
+
for old_par, par in [('main', 'module'), ('byref', 'refimported')]:
|
222 |
+
if old_par in kwds:
|
223 |
+
message = "The argument %r has been renamed %r" % (old_par, par)
|
224 |
+
if old_par == 'byref':
|
225 |
+
message += " to distinguish it from dill.settings['byref']"
|
226 |
+
warnings.warn(message + ".", PendingDeprecationWarning)
|
227 |
+
if locals()[par]: # the defaults are None and False
|
228 |
+
raise TypeError("both %r and %r arguments were used" % (par, old_par))
|
229 |
+
refimported = kwds.pop('byref', refimported)
|
230 |
+
module = kwds.pop('main', module)
|
231 |
+
|
232 |
+
from .settings import settings
|
233 |
+
protocol = settings['protocol']
|
234 |
+
main = module
|
235 |
+
if main is None:
|
236 |
+
main = _main_module
|
237 |
+
elif isinstance(main, str):
|
238 |
+
main = _import_module(main)
|
239 |
+
if not isinstance(main, ModuleType):
|
240 |
+
raise TypeError("%r is not a module" % main)
|
241 |
+
if hasattr(filename, 'write'):
|
242 |
+
file = filename
|
243 |
+
else:
|
244 |
+
if filename is None:
|
245 |
+
filename = str(TEMPDIR/'session.pkl')
|
246 |
+
file = open(filename, 'wb')
|
247 |
+
try:
|
248 |
+
pickler = Pickler(file, protocol, **kwds)
|
249 |
+
pickler._original_main = main
|
250 |
+
if refimported:
|
251 |
+
main = _stash_modules(main)
|
252 |
+
pickler._main = main #FIXME: dill.settings are disabled
|
253 |
+
pickler._byref = False # disable pickling by name reference
|
254 |
+
pickler._recurse = False # disable pickling recursion for globals
|
255 |
+
pickler._session = True # is best indicator of when pickling a session
|
256 |
+
pickler._first_pass = True
|
257 |
+
pickler._main_modified = main is not pickler._original_main
|
258 |
+
pickler.dump(main)
|
259 |
+
finally:
|
260 |
+
if file is not filename: # if newly opened file
|
261 |
+
file.close()
|
262 |
+
return
|
263 |
+
|
264 |
+
# Backward compatibility.
|
265 |
+
def dump_session(filename=None, main=None, byref=False, **kwds):
|
266 |
+
warnings.warn("dump_session() has been renamed dump_module()", PendingDeprecationWarning)
|
267 |
+
dump_module(filename, module=main, refimported=byref, **kwds)
|
268 |
+
dump_session.__doc__ = dump_module.__doc__
|
269 |
+
|
270 |
+
class _PeekableReader:
|
271 |
+
"""lightweight stream wrapper that implements peek()"""
|
272 |
+
def __init__(self, stream):
|
273 |
+
self.stream = stream
|
274 |
+
def read(self, n):
|
275 |
+
return self.stream.read(n)
|
276 |
+
def readline(self):
|
277 |
+
return self.stream.readline()
|
278 |
+
def tell(self):
|
279 |
+
return self.stream.tell()
|
280 |
+
def close(self):
|
281 |
+
return self.stream.close()
|
282 |
+
def peek(self, n):
|
283 |
+
stream = self.stream
|
284 |
+
try:
|
285 |
+
if hasattr(stream, 'flush'): stream.flush()
|
286 |
+
position = stream.tell()
|
287 |
+
stream.seek(position) # assert seek() works before reading
|
288 |
+
chunk = stream.read(n)
|
289 |
+
stream.seek(position)
|
290 |
+
return chunk
|
291 |
+
except (AttributeError, OSError):
|
292 |
+
raise NotImplementedError("stream is not peekable: %r", stream) from None
|
293 |
+
|
294 |
+
def _make_peekable(stream):
|
295 |
+
"""return stream as an object with a peek() method"""
|
296 |
+
import io
|
297 |
+
if hasattr(stream, 'peek'):
|
298 |
+
return stream
|
299 |
+
if not (hasattr(stream, 'tell') and hasattr(stream, 'seek')):
|
300 |
+
try:
|
301 |
+
return io.BufferedReader(stream)
|
302 |
+
except Exception:
|
303 |
+
pass
|
304 |
+
return _PeekableReader(stream)
|
305 |
+
|
306 |
+
def _identify_module(file, main=None):
|
307 |
+
"""identify the name of the module stored in the given file-type object"""
|
308 |
+
from pickletools import genops
|
309 |
+
UNICODE = {'UNICODE', 'BINUNICODE', 'SHORT_BINUNICODE'}
|
310 |
+
found_import = False
|
311 |
+
try:
|
312 |
+
for opcode, arg, pos in genops(file.peek(256)):
|
313 |
+
if not found_import:
|
314 |
+
if opcode.name in ('GLOBAL', 'SHORT_BINUNICODE') and \
|
315 |
+
arg.endswith('_import_module'):
|
316 |
+
found_import = True
|
317 |
+
else:
|
318 |
+
if opcode.name in UNICODE:
|
319 |
+
return arg
|
320 |
+
else:
|
321 |
+
raise UnpicklingError("reached STOP without finding main module")
|
322 |
+
except (NotImplementedError, ValueError) as error:
|
323 |
+
# ValueError occours when the end of the chunk is reached (without a STOP).
|
324 |
+
if isinstance(error, NotImplementedError) and main is not None:
|
325 |
+
# file is not peekable, but we have main.
|
326 |
+
return None
|
327 |
+
raise UnpicklingError("unable to identify main module") from error
|
328 |
+
|
329 |
+
def load_module(
|
330 |
+
filename: Union[str, os.PathLike] = None,
|
331 |
+
module: Optional[Union[ModuleType, str]] = None,
|
332 |
+
**kwds
|
333 |
+
) -> Optional[ModuleType]:
|
334 |
+
"""Update the selected module (default is :py:mod:`__main__`) with
|
335 |
+
the state saved at ``filename``.
|
336 |
+
|
337 |
+
Restore a module to the state saved with :py:func:`dump_module`. The
|
338 |
+
saved module can be :py:mod:`__main__` (e.g. an interpreter session),
|
339 |
+
an imported module, or a module-type object (e.g. created with
|
340 |
+
:py:class:`~types.ModuleType`).
|
341 |
+
|
342 |
+
When restoring the state of a non-importable module-type object, the
|
343 |
+
current instance of this module may be passed as the argument ``main``.
|
344 |
+
Otherwise, a new instance is created with :py:class:`~types.ModuleType`
|
345 |
+
and returned.
|
346 |
+
|
347 |
+
Args:
|
348 |
+
filename: a path-like object or a readable stream. If `None`
|
349 |
+
(the default), read from a named file in a temporary directory.
|
350 |
+
module: a module object or the name of an importable module;
|
351 |
+
the module name and kind (i.e. imported or non-imported) must
|
352 |
+
match the name and kind of the module stored at ``filename``.
|
353 |
+
**kwds: extra keyword arguments passed to :py:class:`Unpickler()`.
|
354 |
+
|
355 |
+
Raises:
|
356 |
+
:py:exc:`UnpicklingError`: if unpickling fails.
|
357 |
+
:py:exc:`ValueError`: if the argument ``main`` and module saved
|
358 |
+
at ``filename`` are incompatible.
|
359 |
+
|
360 |
+
Returns:
|
361 |
+
A module object, if the saved module is not :py:mod:`__main__` or
|
362 |
+
a module instance wasn't provided with the argument ``main``.
|
363 |
+
|
364 |
+
Examples:
|
365 |
+
|
366 |
+
- Save the state of some modules:
|
367 |
+
|
368 |
+
>>> import dill
|
369 |
+
>>> squared = lambda x: x*x
|
370 |
+
>>> dill.dump_module() # save state of __main__ to /tmp/session.pkl
|
371 |
+
>>>
|
372 |
+
>>> import pox # an imported module
|
373 |
+
>>> pox.plus_one = lambda x: x+1
|
374 |
+
>>> dill.dump_module('pox_session.pkl', module=pox)
|
375 |
+
>>>
|
376 |
+
>>> from types import ModuleType
|
377 |
+
>>> foo = ModuleType('foo') # a module-type object
|
378 |
+
>>> foo.values = [1,2,3]
|
379 |
+
>>> import math
|
380 |
+
>>> foo.sin = math.sin
|
381 |
+
>>> dill.dump_module('foo_session.pkl', module=foo, refimported=True)
|
382 |
+
|
383 |
+
- Restore the state of the interpreter:
|
384 |
+
|
385 |
+
>>> import dill
|
386 |
+
>>> dill.load_module() # updates __main__ from /tmp/session.pkl
|
387 |
+
>>> squared(2)
|
388 |
+
4
|
389 |
+
|
390 |
+
- Load the saved state of an importable module:
|
391 |
+
|
392 |
+
>>> import dill
|
393 |
+
>>> pox = dill.load_module('pox_session.pkl')
|
394 |
+
>>> pox.plus_one(1)
|
395 |
+
2
|
396 |
+
>>> import sys
|
397 |
+
>>> pox in sys.modules.values()
|
398 |
+
True
|
399 |
+
|
400 |
+
- Load the saved state of a non-importable module-type object:
|
401 |
+
|
402 |
+
>>> import dill
|
403 |
+
>>> foo = dill.load_module('foo_session.pkl')
|
404 |
+
>>> [foo.sin(x) for x in foo.values]
|
405 |
+
[0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
|
406 |
+
>>> import math
|
407 |
+
>>> foo.sin is math.sin # foo.sin was saved by reference
|
408 |
+
True
|
409 |
+
>>> import sys
|
410 |
+
>>> foo in sys.modules.values()
|
411 |
+
False
|
412 |
+
|
413 |
+
- Update the state of a non-importable module-type object:
|
414 |
+
|
415 |
+
>>> import dill
|
416 |
+
>>> from types import ModuleType
|
417 |
+
>>> foo = ModuleType('foo')
|
418 |
+
>>> foo.values = ['a','b']
|
419 |
+
>>> foo.sin = lambda x: x*x
|
420 |
+
>>> dill.load_module('foo_session.pkl', module=foo)
|
421 |
+
>>> [foo.sin(x) for x in foo.values]
|
422 |
+
[0.8414709848078965, 0.9092974268256817, 0.1411200080598672]
|
423 |
+
|
424 |
+
*Changed in version 0.3.6:* Function ``load_session()`` was renamed to
|
425 |
+
``load_module()``. Parameter ``main`` was renamed to ``module``.
|
426 |
+
|
427 |
+
See also:
|
428 |
+
:py:func:`load_module_asdict` to load the contents of module saved
|
429 |
+
with :py:func:`dump_module` into a dictionary.
|
430 |
+
"""
|
431 |
+
if 'main' in kwds:
|
432 |
+
warnings.warn(
|
433 |
+
"The argument 'main' has been renamed 'module'.",
|
434 |
+
PendingDeprecationWarning
|
435 |
+
)
|
436 |
+
if module is not None:
|
437 |
+
raise TypeError("both 'module' and 'main' arguments were used")
|
438 |
+
module = kwds.pop('main')
|
439 |
+
main = module
|
440 |
+
if hasattr(filename, 'read'):
|
441 |
+
file = filename
|
442 |
+
else:
|
443 |
+
if filename is None:
|
444 |
+
filename = str(TEMPDIR/'session.pkl')
|
445 |
+
file = open(filename, 'rb')
|
446 |
+
try:
|
447 |
+
file = _make_peekable(file)
|
448 |
+
#FIXME: dill.settings are disabled
|
449 |
+
unpickler = Unpickler(file, **kwds)
|
450 |
+
unpickler._session = True
|
451 |
+
|
452 |
+
# Resolve unpickler._main
|
453 |
+
pickle_main = _identify_module(file, main)
|
454 |
+
if main is None and pickle_main is not None:
|
455 |
+
main = pickle_main
|
456 |
+
if isinstance(main, str):
|
457 |
+
if main.startswith('__runtime__.'):
|
458 |
+
# Create runtime module to load the session into.
|
459 |
+
main = ModuleType(main.partition('.')[-1])
|
460 |
+
else:
|
461 |
+
main = _import_module(main)
|
462 |
+
if main is not None:
|
463 |
+
if not isinstance(main, ModuleType):
|
464 |
+
raise TypeError("%r is not a module" % main)
|
465 |
+
unpickler._main = main
|
466 |
+
else:
|
467 |
+
main = unpickler._main
|
468 |
+
|
469 |
+
# Check against the pickle's main.
|
470 |
+
is_main_imported = _is_imported_module(main)
|
471 |
+
if pickle_main is not None:
|
472 |
+
is_runtime_mod = pickle_main.startswith('__runtime__.')
|
473 |
+
if is_runtime_mod:
|
474 |
+
pickle_main = pickle_main.partition('.')[-1]
|
475 |
+
error_msg = "can't update{} module{} %r with the saved state of{} module{} %r"
|
476 |
+
if is_runtime_mod and is_main_imported:
|
477 |
+
raise ValueError(
|
478 |
+
error_msg.format(" imported", "", "", "-type object")
|
479 |
+
% (main.__name__, pickle_main)
|
480 |
+
)
|
481 |
+
if not is_runtime_mod and not is_main_imported:
|
482 |
+
raise ValueError(
|
483 |
+
error_msg.format("", "-type object", " imported", "")
|
484 |
+
% (pickle_main, main.__name__)
|
485 |
+
)
|
486 |
+
if main.__name__ != pickle_main:
|
487 |
+
raise ValueError(error_msg.format("", "", "", "") % (main.__name__, pickle_main))
|
488 |
+
|
489 |
+
# This is for find_class() to be able to locate it.
|
490 |
+
if not is_main_imported:
|
491 |
+
runtime_main = '__runtime__.%s' % main.__name__
|
492 |
+
sys.modules[runtime_main] = main
|
493 |
+
|
494 |
+
loaded = unpickler.load()
|
495 |
+
finally:
|
496 |
+
if not hasattr(filename, 'read'): # if newly opened file
|
497 |
+
file.close()
|
498 |
+
try:
|
499 |
+
del sys.modules[runtime_main]
|
500 |
+
except (KeyError, NameError):
|
501 |
+
pass
|
502 |
+
assert loaded is main
|
503 |
+
_restore_modules(unpickler, main)
|
504 |
+
if main is _main_module or main is module:
|
505 |
+
return None
|
506 |
+
else:
|
507 |
+
return main
|
508 |
+
|
509 |
+
# Backward compatibility.
|
510 |
+
def load_session(filename=None, main=None, **kwds):
|
511 |
+
warnings.warn("load_session() has been renamed load_module().", PendingDeprecationWarning)
|
512 |
+
load_module(filename, module=main, **kwds)
|
513 |
+
load_session.__doc__ = load_module.__doc__
|
514 |
+
|
515 |
+
def load_module_asdict(
|
516 |
+
filename: Union[str, os.PathLike] = None,
|
517 |
+
update: bool = False,
|
518 |
+
**kwds
|
519 |
+
) -> dict:
|
520 |
+
"""
|
521 |
+
Load the contents of a saved module into a dictionary.
|
522 |
+
|
523 |
+
``load_module_asdict()`` is the near-equivalent of::
|
524 |
+
|
525 |
+
lambda filename: vars(dill.load_module(filename)).copy()
|
526 |
+
|
527 |
+
however, does not alter the original module. Also, the path of
|
528 |
+
the loaded module is stored in the ``__session__`` attribute.
|
529 |
+
|
530 |
+
Args:
|
531 |
+
filename: a path-like object or a readable stream. If `None`
|
532 |
+
(the default), read from a named file in a temporary directory.
|
533 |
+
update: if `True`, initialize the dictionary with the current state
|
534 |
+
of the module prior to loading the state stored at filename.
|
535 |
+
**kwds: extra keyword arguments passed to :py:class:`Unpickler()`
|
536 |
+
|
537 |
+
Raises:
|
538 |
+
:py:exc:`UnpicklingError`: if unpickling fails
|
539 |
+
|
540 |
+
Returns:
|
541 |
+
A copy of the restored module's dictionary.
|
542 |
+
|
543 |
+
Note:
|
544 |
+
If ``update`` is True, the corresponding module may first be imported
|
545 |
+
into the current namespace before the saved state is loaded from
|
546 |
+
filename to the dictionary. Note that any module that is imported into
|
547 |
+
the current namespace as a side-effect of using ``update`` will not be
|
548 |
+
modified by loading the saved module in filename to a dictionary.
|
549 |
+
|
550 |
+
Example:
|
551 |
+
>>> import dill
|
552 |
+
>>> alist = [1, 2, 3]
|
553 |
+
>>> anum = 42
|
554 |
+
>>> dill.dump_module()
|
555 |
+
>>> anum = 0
|
556 |
+
>>> new_var = 'spam'
|
557 |
+
>>> main = dill.load_module_asdict()
|
558 |
+
>>> main['__name__'], main['__session__']
|
559 |
+
('__main__', '/tmp/session.pkl')
|
560 |
+
>>> main is globals() # loaded objects don't reference globals
|
561 |
+
False
|
562 |
+
>>> main['alist'] == alist
|
563 |
+
True
|
564 |
+
>>> main['alist'] is alist # was saved by value
|
565 |
+
False
|
566 |
+
>>> main['anum'] == anum # changed after the session was saved
|
567 |
+
False
|
568 |
+
>>> new_var in main # would be True if the option 'update' was set
|
569 |
+
False
|
570 |
+
"""
|
571 |
+
if 'module' in kwds:
|
572 |
+
raise TypeError("'module' is an invalid keyword argument for load_module_asdict()")
|
573 |
+
if hasattr(filename, 'read'):
|
574 |
+
file = filename
|
575 |
+
else:
|
576 |
+
if filename is None:
|
577 |
+
filename = str(TEMPDIR/'session.pkl')
|
578 |
+
file = open(filename, 'rb')
|
579 |
+
try:
|
580 |
+
file = _make_peekable(file)
|
581 |
+
main_name = _identify_module(file)
|
582 |
+
old_main = sys.modules.get(main_name)
|
583 |
+
main = ModuleType(main_name)
|
584 |
+
if update:
|
585 |
+
if old_main is None:
|
586 |
+
old_main = _import_module(main_name)
|
587 |
+
main.__dict__.update(old_main.__dict__)
|
588 |
+
else:
|
589 |
+
main.__builtins__ = __builtin__
|
590 |
+
sys.modules[main_name] = main
|
591 |
+
load_module(file, **kwds)
|
592 |
+
finally:
|
593 |
+
if not hasattr(filename, 'read'): # if newly opened file
|
594 |
+
file.close()
|
595 |
+
try:
|
596 |
+
if old_main is None:
|
597 |
+
del sys.modules[main_name]
|
598 |
+
else:
|
599 |
+
sys.modules[main_name] = old_main
|
600 |
+
except NameError: # failed before setting old_main
|
601 |
+
pass
|
602 |
+
main.__session__ = str(filename)
|
603 |
+
return main.__dict__
|
604 |
+
|
605 |
+
|
606 |
+
# Internal exports for backward compatibility with dill v0.3.5.1
|
607 |
+
# Can't be placed in dill._dill because of circular import problems.
|
608 |
+
for name in (
|
609 |
+
'_lookup_module', '_module_map', '_restore_modules', '_stash_modules',
|
610 |
+
'dump_session', 'load_session' # backward compatibility functions
|
611 |
+
):
|
612 |
+
setattr(_dill, name, globals()[name])
|
613 |
+
del name
|
env-llmeval/lib/python3.10/site-packages/dill/settings.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
global settings for Pickler
|
10 |
+
"""
|
11 |
+
|
12 |
+
from pickle import DEFAULT_PROTOCOL
|
13 |
+
|
14 |
+
settings = {
|
15 |
+
#'main' : None,
|
16 |
+
'protocol' : DEFAULT_PROTOCOL,
|
17 |
+
'byref' : False,
|
18 |
+
#'strictio' : False,
|
19 |
+
'fmode' : 0, #HANDLE_FMODE
|
20 |
+
'recurse' : False,
|
21 |
+
'ignore' : False,
|
22 |
+
}
|
23 |
+
|
24 |
+
del DEFAULT_PROTOCOL
|
25 |
+
|
env-llmeval/lib/python3.10/site-packages/dill/source.py
ADDED
@@ -0,0 +1,1017 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
#
|
9 |
+
# inspired by inspect.py from Python-2.7.6
|
10 |
+
# inspect.py author: 'Ka-Ping Yee <[email protected]>'
|
11 |
+
# inspect.py merged into original dill.source by Mike McKerns 4/13/14
|
12 |
+
"""
|
13 |
+
Extensions to python's 'inspect' module, which can be used
|
14 |
+
to retrieve information from live python objects. The methods
|
15 |
+
defined in this module are augmented to facilitate access to
|
16 |
+
source code of interactively defined functions and classes,
|
17 |
+
as well as provide access to source code for objects defined
|
18 |
+
in a file.
|
19 |
+
"""
|
20 |
+
|
21 |
+
__all__ = ['findsource', 'getsourcelines', 'getsource', 'indent', 'outdent', \
|
22 |
+
'_wrap', 'dumpsource', 'getname', '_namespace', 'getimport', \
|
23 |
+
'_importable', 'importable','isdynamic', 'isfrommain']
|
24 |
+
|
25 |
+
import linecache
|
26 |
+
import re
|
27 |
+
from inspect import (getblock, getfile, getmodule, getsourcefile, indentsize,
|
28 |
+
isbuiltin, isclass, iscode, isframe, isfunction, ismethod,
|
29 |
+
ismodule, istraceback)
|
30 |
+
from tokenize import TokenError
|
31 |
+
|
32 |
+
from ._dill import IS_IPYTHON
|
33 |
+
|
34 |
+
|
35 |
+
def isfrommain(obj):
|
36 |
+
"check if object was built in __main__"
|
37 |
+
module = getmodule(obj)
|
38 |
+
if module and module.__name__ == '__main__':
|
39 |
+
return True
|
40 |
+
return False
|
41 |
+
|
42 |
+
|
43 |
+
def isdynamic(obj):
|
44 |
+
"check if object was built in the interpreter"
|
45 |
+
try: file = getfile(obj)
|
46 |
+
except TypeError: file = None
|
47 |
+
if file == '<stdin>' and isfrommain(obj):
|
48 |
+
return True
|
49 |
+
return False
|
50 |
+
|
51 |
+
|
52 |
+
def _matchlambda(func, line):
|
53 |
+
"""check if lambda object 'func' matches raw line of code 'line'"""
|
54 |
+
from .detect import code as getcode
|
55 |
+
from .detect import freevars, globalvars, varnames
|
56 |
+
dummy = lambda : '__this_is_a_big_dummy_function__'
|
57 |
+
# process the line (removing leading whitespace, etc)
|
58 |
+
lhs,rhs = line.split('lambda ',1)[-1].split(":", 1) #FIXME: if !1 inputs
|
59 |
+
try: #FIXME: unsafe
|
60 |
+
_ = eval("lambda %s : %s" % (lhs,rhs), globals(),locals())
|
61 |
+
except Exception: _ = dummy
|
62 |
+
# get code objects, for comparison
|
63 |
+
_, code = getcode(_).co_code, getcode(func).co_code
|
64 |
+
# check if func is in closure
|
65 |
+
_f = [line.count(i) for i in freevars(func).keys()]
|
66 |
+
if not _f: # not in closure
|
67 |
+
# check if code matches
|
68 |
+
if _ == code: return True
|
69 |
+
return False
|
70 |
+
# weak check on freevars
|
71 |
+
if not all(_f): return False #XXX: VERY WEAK
|
72 |
+
# weak check on varnames and globalvars
|
73 |
+
_f = varnames(func)
|
74 |
+
_f = [line.count(i) for i in _f[0]+_f[1]]
|
75 |
+
if _f and not all(_f): return False #XXX: VERY WEAK
|
76 |
+
_f = [line.count(i) for i in globalvars(func).keys()]
|
77 |
+
if _f and not all(_f): return False #XXX: VERY WEAK
|
78 |
+
# check if func is a double lambda
|
79 |
+
if (line.count('lambda ') > 1) and (lhs in freevars(func).keys()):
|
80 |
+
_lhs,_rhs = rhs.split('lambda ',1)[-1].split(":",1) #FIXME: if !1 inputs
|
81 |
+
try: #FIXME: unsafe
|
82 |
+
_f = eval("lambda %s : %s" % (_lhs,_rhs), globals(),locals())
|
83 |
+
except Exception: _f = dummy
|
84 |
+
# get code objects, for comparison
|
85 |
+
_, code = getcode(_f).co_code, getcode(func).co_code
|
86 |
+
if len(_) != len(code): return False
|
87 |
+
#NOTE: should be same code same order, but except for 't' and '\x88'
|
88 |
+
_ = set((i,j) for (i,j) in zip(_,code) if i != j)
|
89 |
+
if len(_) != 1: return False #('t','\x88')
|
90 |
+
return True
|
91 |
+
# check indentsize
|
92 |
+
if not indentsize(line): return False #FIXME: is this a good check???
|
93 |
+
# check if code 'pattern' matches
|
94 |
+
#XXX: or pattern match against dis.dis(code)? (or use uncompyle2?)
|
95 |
+
_ = _.split(_[0]) # 't' #XXX: remove matching values if starts the same?
|
96 |
+
_f = code.split(code[0]) # '\x88'
|
97 |
+
#NOTE: should be same code different order, with different first element
|
98 |
+
_ = dict(re.match(r'([\W\D\S])(.*)', _[i]).groups() for i in range(1,len(_)))
|
99 |
+
_f = dict(re.match(r'([\W\D\S])(.*)', _f[i]).groups() for i in range(1,len(_f)))
|
100 |
+
if (_.keys() == _f.keys()) and (sorted(_.values()) == sorted(_f.values())):
|
101 |
+
return True
|
102 |
+
return False
|
103 |
+
|
104 |
+
|
105 |
+
def findsource(object):
|
106 |
+
"""Return the entire source file and starting line number for an object.
|
107 |
+
For interactively-defined objects, the 'file' is the interpreter's history.
|
108 |
+
|
109 |
+
The argument may be a module, class, method, function, traceback, frame,
|
110 |
+
or code object. The source code is returned as a list of all the lines
|
111 |
+
in the file and the line number indexes a line in that list. An IOError
|
112 |
+
is raised if the source code cannot be retrieved, while a TypeError is
|
113 |
+
raised for objects where the source code is unavailable (e.g. builtins)."""
|
114 |
+
|
115 |
+
module = getmodule(object)
|
116 |
+
try: file = getfile(module)
|
117 |
+
except TypeError: file = None
|
118 |
+
is_module_main = (module and module.__name__ == '__main__' and not file)
|
119 |
+
if IS_IPYTHON and is_module_main:
|
120 |
+
#FIXME: quick fix for functions and classes in IPython interpreter
|
121 |
+
try:
|
122 |
+
file = getfile(object)
|
123 |
+
sourcefile = getsourcefile(object)
|
124 |
+
except TypeError:
|
125 |
+
if isclass(object):
|
126 |
+
for object_method in filter(isfunction, object.__dict__.values()):
|
127 |
+
# look for a method of the class
|
128 |
+
file_candidate = getfile(object_method)
|
129 |
+
if not file_candidate.startswith('<ipython-input-'):
|
130 |
+
continue
|
131 |
+
file = file_candidate
|
132 |
+
sourcefile = getsourcefile(object_method)
|
133 |
+
break
|
134 |
+
if file:
|
135 |
+
lines = linecache.getlines(file)
|
136 |
+
else:
|
137 |
+
# fallback to use history
|
138 |
+
history = '\n'.join(get_ipython().history_manager.input_hist_parsed)
|
139 |
+
lines = [line + '\n' for line in history.splitlines()]
|
140 |
+
# use readline when working in interpreter (i.e. __main__ and not file)
|
141 |
+
elif is_module_main:
|
142 |
+
try:
|
143 |
+
import readline
|
144 |
+
err = ''
|
145 |
+
except ImportError:
|
146 |
+
import sys
|
147 |
+
err = sys.exc_info()[1].args[0]
|
148 |
+
if sys.platform[:3] == 'win':
|
149 |
+
err += ", please install 'pyreadline'"
|
150 |
+
if err:
|
151 |
+
raise IOError(err)
|
152 |
+
lbuf = readline.get_current_history_length()
|
153 |
+
lines = [readline.get_history_item(i)+'\n' for i in range(1,lbuf)]
|
154 |
+
else:
|
155 |
+
try: # special handling for class instances
|
156 |
+
if not isclass(object) and isclass(type(object)): # __class__
|
157 |
+
file = getfile(module)
|
158 |
+
sourcefile = getsourcefile(module)
|
159 |
+
else: # builtins fail with a TypeError
|
160 |
+
file = getfile(object)
|
161 |
+
sourcefile = getsourcefile(object)
|
162 |
+
except (TypeError, AttributeError): # fail with better error
|
163 |
+
file = getfile(object)
|
164 |
+
sourcefile = getsourcefile(object)
|
165 |
+
if not sourcefile and file[:1] + file[-1:] != '<>':
|
166 |
+
raise IOError('source code not available')
|
167 |
+
file = sourcefile if sourcefile else file
|
168 |
+
|
169 |
+
module = getmodule(object, file)
|
170 |
+
if module:
|
171 |
+
lines = linecache.getlines(file, module.__dict__)
|
172 |
+
else:
|
173 |
+
lines = linecache.getlines(file)
|
174 |
+
|
175 |
+
if not lines:
|
176 |
+
raise IOError('could not extract source code')
|
177 |
+
|
178 |
+
#FIXME: all below may fail if exec used (i.e. exec('f = lambda x:x') )
|
179 |
+
if ismodule(object):
|
180 |
+
return lines, 0
|
181 |
+
|
182 |
+
#NOTE: beneficial if search goes from end to start of buffer history
|
183 |
+
name = pat1 = obj = ''
|
184 |
+
pat2 = r'^(\s*@)'
|
185 |
+
# pat1b = r'^(\s*%s\W*=)' % name #FIXME: finds 'f = decorate(f)', not exec
|
186 |
+
if ismethod(object):
|
187 |
+
name = object.__name__
|
188 |
+
if name == '<lambda>': pat1 = r'(.*(?<!\w)lambda(:|\s))'
|
189 |
+
else: pat1 = r'^(\s*def\s)'
|
190 |
+
object = object.__func__
|
191 |
+
if isfunction(object):
|
192 |
+
name = object.__name__
|
193 |
+
if name == '<lambda>':
|
194 |
+
pat1 = r'(.*(?<!\w)lambda(:|\s))'
|
195 |
+
obj = object #XXX: better a copy?
|
196 |
+
else: pat1 = r'^(\s*def\s)'
|
197 |
+
object = object.__code__
|
198 |
+
if istraceback(object):
|
199 |
+
object = object.tb_frame
|
200 |
+
if isframe(object):
|
201 |
+
object = object.f_code
|
202 |
+
if iscode(object):
|
203 |
+
if not hasattr(object, 'co_firstlineno'):
|
204 |
+
raise IOError('could not find function definition')
|
205 |
+
stdin = object.co_filename == '<stdin>'
|
206 |
+
if stdin:
|
207 |
+
lnum = len(lines) - 1 # can't get lnum easily, so leverage pat
|
208 |
+
if not pat1: pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)'
|
209 |
+
else:
|
210 |
+
lnum = object.co_firstlineno - 1
|
211 |
+
pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)'
|
212 |
+
pat1 = re.compile(pat1); pat2 = re.compile(pat2)
|
213 |
+
#XXX: candidate_lnum = [n for n in range(lnum) if pat1.match(lines[n])]
|
214 |
+
while lnum > 0: #XXX: won't find decorators in <stdin> ?
|
215 |
+
line = lines[lnum]
|
216 |
+
if pat1.match(line):
|
217 |
+
if not stdin: break # co_firstlineno does the job
|
218 |
+
if name == '<lambda>': # hackery needed to confirm a match
|
219 |
+
if _matchlambda(obj, line): break
|
220 |
+
else: # not a lambda, just look for the name
|
221 |
+
if name in line: # need to check for decorator...
|
222 |
+
hats = 0
|
223 |
+
for _lnum in range(lnum-1,-1,-1):
|
224 |
+
if pat2.match(lines[_lnum]): hats += 1
|
225 |
+
else: break
|
226 |
+
lnum = lnum - hats
|
227 |
+
break
|
228 |
+
lnum = lnum - 1
|
229 |
+
return lines, lnum
|
230 |
+
|
231 |
+
try: # turn instances into classes
|
232 |
+
if not isclass(object) and isclass(type(object)): # __class__
|
233 |
+
object = object.__class__ #XXX: sometimes type(class) is better?
|
234 |
+
#XXX: we don't find how the instance was built
|
235 |
+
except AttributeError: pass
|
236 |
+
if isclass(object):
|
237 |
+
name = object.__name__
|
238 |
+
pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
|
239 |
+
# make some effort to find the best matching class definition:
|
240 |
+
# use the one with the least indentation, which is the one
|
241 |
+
# that's most probably not inside a function definition.
|
242 |
+
candidates = []
|
243 |
+
for i in range(len(lines)-1,-1,-1):
|
244 |
+
match = pat.match(lines[i])
|
245 |
+
if match:
|
246 |
+
# if it's at toplevel, it's already the best one
|
247 |
+
if lines[i][0] == 'c':
|
248 |
+
return lines, i
|
249 |
+
# else add whitespace to candidate list
|
250 |
+
candidates.append((match.group(1), i))
|
251 |
+
if candidates:
|
252 |
+
# this will sort by whitespace, and by line number,
|
253 |
+
# less whitespace first #XXX: should sort high lnum before low
|
254 |
+
candidates.sort()
|
255 |
+
return lines, candidates[0][1]
|
256 |
+
else:
|
257 |
+
raise IOError('could not find class definition')
|
258 |
+
raise IOError('could not find code object')
|
259 |
+
|
260 |
+
|
261 |
+
def getblocks(object, lstrip=False, enclosing=False, locate=False):
|
262 |
+
"""Return a list of source lines and starting line number for an object.
|
263 |
+
Interactively-defined objects refer to lines in the interpreter's history.
|
264 |
+
|
265 |
+
If enclosing=True, then also return any enclosing code.
|
266 |
+
If lstrip=True, ensure there is no indentation in the first line of code.
|
267 |
+
If locate=True, then also return the line number for the block of code.
|
268 |
+
|
269 |
+
DEPRECATED: use 'getsourcelines' instead
|
270 |
+
"""
|
271 |
+
lines, lnum = findsource(object)
|
272 |
+
|
273 |
+
if ismodule(object):
|
274 |
+
if lstrip: lines = _outdent(lines)
|
275 |
+
return ([lines], [0]) if locate is True else [lines]
|
276 |
+
|
277 |
+
#XXX: 'enclosing' means: closures only? or classes and files?
|
278 |
+
indent = indentsize(lines[lnum])
|
279 |
+
block = getblock(lines[lnum:]) #XXX: catch any TokenError here?
|
280 |
+
|
281 |
+
if not enclosing or not indent:
|
282 |
+
if lstrip: block = _outdent(block)
|
283 |
+
return ([block], [lnum]) if locate is True else [block]
|
284 |
+
|
285 |
+
pat1 = r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))'; pat1 = re.compile(pat1)
|
286 |
+
pat2 = r'^(\s*@)'; pat2 = re.compile(pat2)
|
287 |
+
#pat3 = r'^(\s*class\s)'; pat3 = re.compile(pat3) #XXX: enclosing class?
|
288 |
+
#FIXME: bound methods need enclosing class (and then instantiation)
|
289 |
+
# *or* somehow apply a partial using the instance
|
290 |
+
|
291 |
+
skip = 0
|
292 |
+
line = 0
|
293 |
+
blocks = []; _lnum = []
|
294 |
+
target = ''.join(block)
|
295 |
+
while line <= lnum: #XXX: repeat lnum? or until line < lnum?
|
296 |
+
# see if starts with ('def','lambda') and contains our target block
|
297 |
+
if pat1.match(lines[line]):
|
298 |
+
if not skip:
|
299 |
+
try: code = getblock(lines[line:])
|
300 |
+
except TokenError: code = [lines[line]]
|
301 |
+
if indentsize(lines[line]) > indent: #XXX: should be >= ?
|
302 |
+
line += len(code) - skip
|
303 |
+
elif target in ''.join(code):
|
304 |
+
blocks.append(code) # save code block as the potential winner
|
305 |
+
_lnum.append(line - skip) # save the line number for the match
|
306 |
+
line += len(code) - skip
|
307 |
+
else:
|
308 |
+
line += 1
|
309 |
+
skip = 0
|
310 |
+
# find skip: the number of consecutive decorators
|
311 |
+
elif pat2.match(lines[line]):
|
312 |
+
try: code = getblock(lines[line:])
|
313 |
+
except TokenError: code = [lines[line]]
|
314 |
+
skip = 1
|
315 |
+
for _line in code[1:]: # skip lines that are decorators
|
316 |
+
if not pat2.match(_line): break
|
317 |
+
skip += 1
|
318 |
+
line += skip
|
319 |
+
# no match: reset skip and go to the next line
|
320 |
+
else:
|
321 |
+
line +=1
|
322 |
+
skip = 0
|
323 |
+
|
324 |
+
if not blocks:
|
325 |
+
blocks = [block]
|
326 |
+
_lnum = [lnum]
|
327 |
+
if lstrip: blocks = [_outdent(block) for block in blocks]
|
328 |
+
# return last match
|
329 |
+
return (blocks, _lnum) if locate is True else blocks
|
330 |
+
|
331 |
+
|
332 |
+
def getsourcelines(object, lstrip=False, enclosing=False):
|
333 |
+
"""Return a list of source lines and starting line number for an object.
|
334 |
+
Interactively-defined objects refer to lines in the interpreter's history.
|
335 |
+
|
336 |
+
The argument may be a module, class, method, function, traceback, frame,
|
337 |
+
or code object. The source code is returned as a list of the lines
|
338 |
+
corresponding to the object and the line number indicates where in the
|
339 |
+
original source file the first line of code was found. An IOError is
|
340 |
+
raised if the source code cannot be retrieved, while a TypeError is
|
341 |
+
raised for objects where the source code is unavailable (e.g. builtins).
|
342 |
+
|
343 |
+
If lstrip=True, ensure there is no indentation in the first line of code.
|
344 |
+
If enclosing=True, then also return any enclosing code."""
|
345 |
+
code, n = getblocks(object, lstrip=lstrip, enclosing=enclosing, locate=True)
|
346 |
+
return code[-1], n[-1]
|
347 |
+
|
348 |
+
|
349 |
+
#NOTE: broke backward compatibility 4/16/14 (was lstrip=True, force=True)
|
350 |
+
def getsource(object, alias='', lstrip=False, enclosing=False, \
|
351 |
+
force=False, builtin=False):
|
352 |
+
"""Return the text of the source code for an object. The source code for
|
353 |
+
interactively-defined objects are extracted from the interpreter's history.
|
354 |
+
|
355 |
+
The argument may be a module, class, method, function, traceback, frame,
|
356 |
+
or code object. The source code is returned as a single string. An
|
357 |
+
IOError is raised if the source code cannot be retrieved, while a
|
358 |
+
TypeError is raised for objects where the source code is unavailable
|
359 |
+
(e.g. builtins).
|
360 |
+
|
361 |
+
If alias is provided, then add a line of code that renames the object.
|
362 |
+
If lstrip=True, ensure there is no indentation in the first line of code.
|
363 |
+
If enclosing=True, then also return any enclosing code.
|
364 |
+
If force=True, catch (TypeError,IOError) and try to use import hooks.
|
365 |
+
If builtin=True, force an import for any builtins
|
366 |
+
"""
|
367 |
+
# hascode denotes a callable
|
368 |
+
hascode = _hascode(object)
|
369 |
+
# is a class instance type (and not in builtins)
|
370 |
+
instance = _isinstance(object)
|
371 |
+
|
372 |
+
# get source lines; if fail, try to 'force' an import
|
373 |
+
try: # fails for builtins, and other assorted object types
|
374 |
+
lines, lnum = getsourcelines(object, enclosing=enclosing)
|
375 |
+
except (TypeError, IOError): # failed to get source, resort to import hooks
|
376 |
+
if not force: # don't try to get types that findsource can't get
|
377 |
+
raise
|
378 |
+
if not getmodule(object): # get things like 'None' and '1'
|
379 |
+
if not instance: return getimport(object, alias, builtin=builtin)
|
380 |
+
# special handling (numpy arrays, ...)
|
381 |
+
_import = getimport(object, builtin=builtin)
|
382 |
+
name = getname(object, force=True)
|
383 |
+
_alias = "%s = " % alias if alias else ""
|
384 |
+
if alias == name: _alias = ""
|
385 |
+
return _import+_alias+"%s\n" % name
|
386 |
+
else: #FIXME: could use a good bit of cleanup, since using getimport...
|
387 |
+
if not instance: return getimport(object, alias, builtin=builtin)
|
388 |
+
# now we are dealing with an instance...
|
389 |
+
name = object.__class__.__name__
|
390 |
+
module = object.__module__
|
391 |
+
if module in ['builtins','__builtin__']:
|
392 |
+
return getimport(object, alias, builtin=builtin)
|
393 |
+
else: #FIXME: leverage getimport? use 'from module import name'?
|
394 |
+
lines, lnum = ["%s = __import__('%s', fromlist=['%s']).%s\n" % (name,module,name,name)], 0
|
395 |
+
obj = eval(lines[0].lstrip(name + ' = '))
|
396 |
+
lines, lnum = getsourcelines(obj, enclosing=enclosing)
|
397 |
+
|
398 |
+
# strip leading indent (helps ensure can be imported)
|
399 |
+
if lstrip or alias:
|
400 |
+
lines = _outdent(lines)
|
401 |
+
|
402 |
+
# instantiate, if there's a nice repr #XXX: BAD IDEA???
|
403 |
+
if instance: #and force: #XXX: move into findsource or getsourcelines ?
|
404 |
+
if '(' in repr(object): lines.append('%r\n' % object)
|
405 |
+
#else: #XXX: better to somehow to leverage __reduce__ ?
|
406 |
+
# reconstructor,args = object.__reduce__()
|
407 |
+
# _ = reconstructor(*args)
|
408 |
+
else: # fall back to serialization #XXX: bad idea?
|
409 |
+
#XXX: better not duplicate work? #XXX: better new/enclose=True?
|
410 |
+
lines = dumpsource(object, alias='', new=force, enclose=False)
|
411 |
+
lines, lnum = [line+'\n' for line in lines.split('\n')][:-1], 0
|
412 |
+
#else: object.__code__ # raise AttributeError
|
413 |
+
|
414 |
+
# add an alias to the source code
|
415 |
+
if alias:
|
416 |
+
if hascode:
|
417 |
+
skip = 0
|
418 |
+
for line in lines: # skip lines that are decorators
|
419 |
+
if not line.startswith('@'): break
|
420 |
+
skip += 1
|
421 |
+
#XXX: use regex from findsource / getsourcelines ?
|
422 |
+
if lines[skip].lstrip().startswith('def '): # we have a function
|
423 |
+
if alias != object.__name__:
|
424 |
+
lines.append('\n%s = %s\n' % (alias, object.__name__))
|
425 |
+
elif 'lambda ' in lines[skip]: # we have a lambda
|
426 |
+
if alias != lines[skip].split('=')[0].strip():
|
427 |
+
lines[skip] = '%s = %s' % (alias, lines[skip])
|
428 |
+
else: # ...try to use the object's name
|
429 |
+
if alias != object.__name__:
|
430 |
+
lines.append('\n%s = %s\n' % (alias, object.__name__))
|
431 |
+
else: # class or class instance
|
432 |
+
if instance:
|
433 |
+
if alias != lines[-1].split('=')[0].strip():
|
434 |
+
lines[-1] = ('%s = ' % alias) + lines[-1]
|
435 |
+
else:
|
436 |
+
name = getname(object, force=True) or object.__name__
|
437 |
+
if alias != name:
|
438 |
+
lines.append('\n%s = %s\n' % (alias, name))
|
439 |
+
return ''.join(lines)
|
440 |
+
|
441 |
+
|
442 |
+
def _hascode(object):
|
443 |
+
'''True if object has an attribute that stores it's __code__'''
|
444 |
+
return getattr(object,'__code__',None) or getattr(object,'func_code',None)
|
445 |
+
|
446 |
+
def _isinstance(object):
|
447 |
+
'''True if object is a class instance type (and is not a builtin)'''
|
448 |
+
if _hascode(object) or isclass(object) or ismodule(object):
|
449 |
+
return False
|
450 |
+
if istraceback(object) or isframe(object) or iscode(object):
|
451 |
+
return False
|
452 |
+
# special handling (numpy arrays, ...)
|
453 |
+
if not getmodule(object) and getmodule(type(object)).__name__ in ['numpy']:
|
454 |
+
return True
|
455 |
+
# # check if is instance of a builtin
|
456 |
+
# if not getmodule(object) and getmodule(type(object)).__name__ in ['__builtin__','builtins']:
|
457 |
+
# return False
|
458 |
+
_types = ('<class ',"<type 'instance'>")
|
459 |
+
if not repr(type(object)).startswith(_types): #FIXME: weak hack
|
460 |
+
return False
|
461 |
+
if not getmodule(object) or object.__module__ in ['builtins','__builtin__'] or getname(object, force=True) in ['array']:
|
462 |
+
return False
|
463 |
+
return True # by process of elimination... it's what we want
|
464 |
+
|
465 |
+
|
466 |
+
def _intypes(object):
|
467 |
+
'''check if object is in the 'types' module'''
|
468 |
+
import types
|
469 |
+
# allow user to pass in object or object.__name__
|
470 |
+
if type(object) is not type(''):
|
471 |
+
object = getname(object, force=True)
|
472 |
+
if object == 'ellipsis': object = 'EllipsisType'
|
473 |
+
return True if hasattr(types, object) else False
|
474 |
+
|
475 |
+
|
476 |
+
def _isstring(object): #XXX: isstringlike better?
|
477 |
+
'''check if object is a string-like type'''
|
478 |
+
return isinstance(object, (str, bytes))
|
479 |
+
|
480 |
+
|
481 |
+
def indent(code, spaces=4):
|
482 |
+
'''indent a block of code with whitespace (default is 4 spaces)'''
|
483 |
+
indent = indentsize(code)
|
484 |
+
if type(spaces) is int: spaces = ' '*spaces
|
485 |
+
# if '\t' is provided, will indent with a tab
|
486 |
+
nspaces = indentsize(spaces)
|
487 |
+
# blank lines (etc) need to be ignored
|
488 |
+
lines = code.split('\n')
|
489 |
+
## stq = "'''"; dtq = '"""'
|
490 |
+
## in_stq = in_dtq = False
|
491 |
+
for i in range(len(lines)):
|
492 |
+
#FIXME: works... but shouldn't indent 2nd+ lines of multiline doc
|
493 |
+
_indent = indentsize(lines[i])
|
494 |
+
if indent > _indent: continue
|
495 |
+
lines[i] = spaces+lines[i]
|
496 |
+
## #FIXME: may fail when stq and dtq in same line (depends on ordering)
|
497 |
+
## nstq, ndtq = lines[i].count(stq), lines[i].count(dtq)
|
498 |
+
## if not in_dtq and not in_stq:
|
499 |
+
## lines[i] = spaces+lines[i] # we indent
|
500 |
+
## # entering a comment block
|
501 |
+
## if nstq%2: in_stq = not in_stq
|
502 |
+
## if ndtq%2: in_dtq = not in_dtq
|
503 |
+
## # leaving a comment block
|
504 |
+
## elif in_dtq and ndtq%2: in_dtq = not in_dtq
|
505 |
+
## elif in_stq and nstq%2: in_stq = not in_stq
|
506 |
+
## else: pass
|
507 |
+
if lines[-1].strip() == '': lines[-1] = ''
|
508 |
+
return '\n'.join(lines)
|
509 |
+
|
510 |
+
|
511 |
+
def _outdent(lines, spaces=None, all=True):
|
512 |
+
'''outdent lines of code, accounting for docs and line continuations'''
|
513 |
+
indent = indentsize(lines[0])
|
514 |
+
if spaces is None or spaces > indent or spaces < 0: spaces = indent
|
515 |
+
for i in range(len(lines) if all else 1):
|
516 |
+
#FIXME: works... but shouldn't outdent 2nd+ lines of multiline doc
|
517 |
+
_indent = indentsize(lines[i])
|
518 |
+
if spaces > _indent: _spaces = _indent
|
519 |
+
else: _spaces = spaces
|
520 |
+
lines[i] = lines[i][_spaces:]
|
521 |
+
return lines
|
522 |
+
|
523 |
+
def outdent(code, spaces=None, all=True):
|
524 |
+
'''outdent a block of code (default is to strip all leading whitespace)'''
|
525 |
+
indent = indentsize(code)
|
526 |
+
if spaces is None or spaces > indent or spaces < 0: spaces = indent
|
527 |
+
#XXX: will this delete '\n' in some cases?
|
528 |
+
if not all: return code[spaces:]
|
529 |
+
return '\n'.join(_outdent(code.split('\n'), spaces=spaces, all=all))
|
530 |
+
|
531 |
+
|
532 |
+
#XXX: not sure what the point of _wrap is...
|
533 |
+
__globals__ = globals()
|
534 |
+
__locals__ = locals()
|
535 |
+
def _wrap(f):
|
536 |
+
""" encapsulate a function and it's __import__ """
|
537 |
+
def func(*args, **kwds):
|
538 |
+
try:
|
539 |
+
# _ = eval(getsource(f, force=True)) #XXX: safer but less robust
|
540 |
+
exec(getimportable(f, alias='_'), __globals__, __locals__)
|
541 |
+
except Exception:
|
542 |
+
raise ImportError('cannot import name ' + f.__name__)
|
543 |
+
return _(*args, **kwds)
|
544 |
+
func.__name__ = f.__name__
|
545 |
+
func.__doc__ = f.__doc__
|
546 |
+
return func
|
547 |
+
|
548 |
+
|
549 |
+
def _enclose(object, alias=''): #FIXME: needs alias to hold returned object
|
550 |
+
"""create a function enclosure around the source of some object"""
|
551 |
+
#XXX: dummy and stub should append a random string
|
552 |
+
dummy = '__this_is_a_big_dummy_enclosing_function__'
|
553 |
+
stub = '__this_is_a_stub_variable__'
|
554 |
+
code = 'def %s():\n' % dummy
|
555 |
+
code += indent(getsource(object, alias=stub, lstrip=True, force=True))
|
556 |
+
code += indent('return %s\n' % stub)
|
557 |
+
if alias: code += '%s = ' % alias
|
558 |
+
code += '%s(); del %s\n' % (dummy, dummy)
|
559 |
+
#code += "globals().pop('%s',lambda :None)()\n" % dummy
|
560 |
+
return code
|
561 |
+
|
562 |
+
|
563 |
+
def dumpsource(object, alias='', new=False, enclose=True):
|
564 |
+
"""'dump to source', where the code includes a pickled object.
|
565 |
+
|
566 |
+
If new=True and object is a class instance, then create a new
|
567 |
+
instance using the unpacked class source code. If enclose, then
|
568 |
+
create the object inside a function enclosure (thus minimizing
|
569 |
+
any global namespace pollution).
|
570 |
+
"""
|
571 |
+
from dill import dumps
|
572 |
+
pik = repr(dumps(object))
|
573 |
+
code = 'import dill\n'
|
574 |
+
if enclose:
|
575 |
+
stub = '__this_is_a_stub_variable__' #XXX: *must* be same _enclose.stub
|
576 |
+
pre = '%s = ' % stub
|
577 |
+
new = False #FIXME: new=True doesn't work with enclose=True
|
578 |
+
else:
|
579 |
+
stub = alias
|
580 |
+
pre = '%s = ' % stub if alias else alias
|
581 |
+
|
582 |
+
# if a 'new' instance is not needed, then just dump and load
|
583 |
+
if not new or not _isinstance(object):
|
584 |
+
code += pre + 'dill.loads(%s)\n' % pik
|
585 |
+
else: #XXX: other cases where source code is needed???
|
586 |
+
code += getsource(object.__class__, alias='', lstrip=True, force=True)
|
587 |
+
mod = repr(object.__module__) # should have a module (no builtins here)
|
588 |
+
code += pre + 'dill.loads(%s.replace(b%s,bytes(__name__,"UTF-8")))\n' % (pik,mod)
|
589 |
+
#code += 'del %s' % object.__class__.__name__ #NOTE: kills any existing!
|
590 |
+
|
591 |
+
if enclose:
|
592 |
+
# generation of the 'enclosure'
|
593 |
+
dummy = '__this_is_a_big_dummy_object__'
|
594 |
+
dummy = _enclose(dummy, alias=alias)
|
595 |
+
# hack to replace the 'dummy' with the 'real' code
|
596 |
+
dummy = dummy.split('\n')
|
597 |
+
code = dummy[0]+'\n' + indent(code) + '\n'.join(dummy[-3:])
|
598 |
+
|
599 |
+
return code #XXX: better 'dumpsourcelines', returning list of lines?
|
600 |
+
|
601 |
+
|
602 |
+
def getname(obj, force=False, fqn=False): #XXX: throw(?) to raise error on fail?
|
603 |
+
"""get the name of the object. for lambdas, get the name of the pointer """
|
604 |
+
if fqn: return '.'.join(_namespace(obj))
|
605 |
+
module = getmodule(obj)
|
606 |
+
if not module: # things like "None" and "1"
|
607 |
+
if not force: return None
|
608 |
+
return repr(obj)
|
609 |
+
try:
|
610 |
+
#XXX: 'wrong' for decorators and curried functions ?
|
611 |
+
# if obj.func_closure: ...use logic from getimportable, etc ?
|
612 |
+
name = obj.__name__
|
613 |
+
if name == '<lambda>':
|
614 |
+
return getsource(obj).split('=',1)[0].strip()
|
615 |
+
# handle some special cases
|
616 |
+
if module.__name__ in ['builtins','__builtin__']:
|
617 |
+
if name == 'ellipsis': name = 'EllipsisType'
|
618 |
+
return name
|
619 |
+
except AttributeError: #XXX: better to just throw AttributeError ?
|
620 |
+
if not force: return None
|
621 |
+
name = repr(obj)
|
622 |
+
if name.startswith('<'): # or name.split('('):
|
623 |
+
return None
|
624 |
+
return name
|
625 |
+
|
626 |
+
|
627 |
+
def _namespace(obj):
|
628 |
+
"""_namespace(obj); return namespace hierarchy (as a list of names)
|
629 |
+
for the given object. For an instance, find the class hierarchy.
|
630 |
+
|
631 |
+
For example:
|
632 |
+
|
633 |
+
>>> from functools import partial
|
634 |
+
>>> p = partial(int, base=2)
|
635 |
+
>>> _namespace(p)
|
636 |
+
[\'functools\', \'partial\']
|
637 |
+
"""
|
638 |
+
# mostly for functions and modules and such
|
639 |
+
#FIXME: 'wrong' for decorators and curried functions
|
640 |
+
try: #XXX: needs some work and testing on different types
|
641 |
+
module = qual = str(getmodule(obj)).split()[1].strip('>').strip('"').strip("'")
|
642 |
+
qual = qual.split('.')
|
643 |
+
if ismodule(obj):
|
644 |
+
return qual
|
645 |
+
# get name of a lambda, function, etc
|
646 |
+
name = getname(obj) or obj.__name__ # failing, raise AttributeError
|
647 |
+
# check special cases (NoneType, ...)
|
648 |
+
if module in ['builtins','__builtin__']: # BuiltinFunctionType
|
649 |
+
if _intypes(name): return ['types'] + [name]
|
650 |
+
return qual + [name] #XXX: can be wrong for some aliased objects
|
651 |
+
except Exception: pass
|
652 |
+
# special case: numpy.inf and numpy.nan (we don't want them as floats)
|
653 |
+
if str(obj) in ['inf','nan','Inf','NaN']: # is more, but are they needed?
|
654 |
+
return ['numpy'] + [str(obj)]
|
655 |
+
# mostly for classes and class instances and such
|
656 |
+
module = getattr(obj.__class__, '__module__', None)
|
657 |
+
qual = str(obj.__class__)
|
658 |
+
try: qual = qual[qual.index("'")+1:-2]
|
659 |
+
except ValueError: pass # str(obj.__class__) made the 'try' unnecessary
|
660 |
+
qual = qual.split(".")
|
661 |
+
if module in ['builtins','__builtin__']:
|
662 |
+
# check special cases (NoneType, Ellipsis, ...)
|
663 |
+
if qual[-1] == 'ellipsis': qual[-1] = 'EllipsisType'
|
664 |
+
if _intypes(qual[-1]): module = 'types' #XXX: BuiltinFunctionType
|
665 |
+
qual = [module] + qual
|
666 |
+
return qual
|
667 |
+
|
668 |
+
|
669 |
+
#NOTE: 05/25/14 broke backward compatibility: added 'alias' as 3rd argument
|
670 |
+
def _getimport(head, tail, alias='', verify=True, builtin=False):
|
671 |
+
"""helper to build a likely import string from head and tail of namespace.
|
672 |
+
('head','tail') are used in the following context: "from head import tail"
|
673 |
+
|
674 |
+
If verify=True, then test the import string before returning it.
|
675 |
+
If builtin=True, then force an import for builtins where possible.
|
676 |
+
If alias is provided, then rename the object on import.
|
677 |
+
"""
|
678 |
+
# special handling for a few common types
|
679 |
+
if tail in ['Ellipsis', 'NotImplemented'] and head in ['types']:
|
680 |
+
head = len.__module__
|
681 |
+
elif tail in ['None'] and head in ['types']:
|
682 |
+
_alias = '%s = ' % alias if alias else ''
|
683 |
+
if alias == tail: _alias = ''
|
684 |
+
return _alias+'%s\n' % tail
|
685 |
+
# we don't need to import from builtins, so return ''
|
686 |
+
# elif tail in ['NoneType','int','float','long','complex']: return '' #XXX: ?
|
687 |
+
if head in ['builtins','__builtin__']:
|
688 |
+
# special cases (NoneType, Ellipsis, ...) #XXX: BuiltinFunctionType
|
689 |
+
if tail == 'ellipsis': tail = 'EllipsisType'
|
690 |
+
if _intypes(tail): head = 'types'
|
691 |
+
elif not builtin:
|
692 |
+
_alias = '%s = ' % alias if alias else ''
|
693 |
+
if alias == tail: _alias = ''
|
694 |
+
return _alias+'%s\n' % tail
|
695 |
+
else: pass # handle builtins below
|
696 |
+
# get likely import string
|
697 |
+
if not head: _str = "import %s" % tail
|
698 |
+
else: _str = "from %s import %s" % (head, tail)
|
699 |
+
_alias = " as %s\n" % alias if alias else "\n"
|
700 |
+
if alias == tail: _alias = "\n"
|
701 |
+
_str += _alias
|
702 |
+
# FIXME: fails on most decorators, currying, and such...
|
703 |
+
# (could look for magic __wrapped__ or __func__ attr)
|
704 |
+
# (could fix in 'namespace' to check obj for closure)
|
705 |
+
if verify and not head.startswith('dill.'):# weird behavior for dill
|
706 |
+
#print(_str)
|
707 |
+
try: exec(_str) #XXX: check if == obj? (name collision)
|
708 |
+
except ImportError: #XXX: better top-down or bottom-up recursion?
|
709 |
+
_head = head.rsplit(".",1)[0] #(or get all, then compare == obj?)
|
710 |
+
if not _head: raise
|
711 |
+
if _head != head:
|
712 |
+
_str = _getimport(_head, tail, alias, verify)
|
713 |
+
return _str
|
714 |
+
|
715 |
+
|
716 |
+
#XXX: rename builtin to force? vice versa? verify to force? (as in getsource)
|
717 |
+
#NOTE: 05/25/14 broke backward compatibility: added 'alias' as 2nd argument
|
718 |
+
def getimport(obj, alias='', verify=True, builtin=False, enclosing=False):
|
719 |
+
"""get the likely import string for the given object
|
720 |
+
|
721 |
+
obj is the object to inspect
|
722 |
+
If verify=True, then test the import string before returning it.
|
723 |
+
If builtin=True, then force an import for builtins where possible.
|
724 |
+
If enclosing=True, get the import for the outermost enclosing callable.
|
725 |
+
If alias is provided, then rename the object on import.
|
726 |
+
"""
|
727 |
+
if enclosing:
|
728 |
+
from .detect import outermost
|
729 |
+
_obj = outermost(obj)
|
730 |
+
obj = _obj if _obj else obj
|
731 |
+
# get the namespace
|
732 |
+
qual = _namespace(obj)
|
733 |
+
head = '.'.join(qual[:-1])
|
734 |
+
tail = qual[-1]
|
735 |
+
# for named things... with a nice repr #XXX: move into _namespace?
|
736 |
+
try: # look for '<...>' and be mindful it might be in lists, dicts, etc...
|
737 |
+
name = repr(obj).split('<',1)[1].split('>',1)[1]
|
738 |
+
name = None # we have a 'object'-style repr
|
739 |
+
except Exception: # it's probably something 'importable'
|
740 |
+
if head in ['builtins','__builtin__']:
|
741 |
+
name = repr(obj) #XXX: catch [1,2], (1,2), set([1,2])... others?
|
742 |
+
else:
|
743 |
+
name = repr(obj).split('(')[0]
|
744 |
+
#if not repr(obj).startswith('<'): name = repr(obj).split('(')[0]
|
745 |
+
#else: name = None
|
746 |
+
if name: # try using name instead of tail
|
747 |
+
try: return _getimport(head, name, alias, verify, builtin)
|
748 |
+
except ImportError: pass
|
749 |
+
except SyntaxError:
|
750 |
+
if head in ['builtins','__builtin__']:
|
751 |
+
_alias = '%s = ' % alias if alias else ''
|
752 |
+
if alias == name: _alias = ''
|
753 |
+
return _alias+'%s\n' % name
|
754 |
+
else: pass
|
755 |
+
try:
|
756 |
+
#if type(obj) is type(abs): _builtin = builtin # BuiltinFunctionType
|
757 |
+
#else: _builtin = False
|
758 |
+
return _getimport(head, tail, alias, verify, builtin)
|
759 |
+
except ImportError:
|
760 |
+
raise # could do some checking against obj
|
761 |
+
except SyntaxError:
|
762 |
+
if head in ['builtins','__builtin__']:
|
763 |
+
_alias = '%s = ' % alias if alias else ''
|
764 |
+
if alias == tail: _alias = ''
|
765 |
+
return _alias+'%s\n' % tail
|
766 |
+
raise # could do some checking against obj
|
767 |
+
|
768 |
+
|
769 |
+
def _importable(obj, alias='', source=None, enclosing=False, force=True, \
|
770 |
+
builtin=True, lstrip=True):
|
771 |
+
"""get an import string (or the source code) for the given object
|
772 |
+
|
773 |
+
This function will attempt to discover the name of the object, or the repr
|
774 |
+
of the object, or the source code for the object. To attempt to force
|
775 |
+
discovery of the source code, use source=True, to attempt to force the
|
776 |
+
use of an import, use source=False; otherwise an import will be sought
|
777 |
+
for objects not defined in __main__. The intent is to build a string
|
778 |
+
that can be imported from a python file. obj is the object to inspect.
|
779 |
+
If alias is provided, then rename the object with the given alias.
|
780 |
+
|
781 |
+
If source=True, use these options:
|
782 |
+
If enclosing=True, then also return any enclosing code.
|
783 |
+
If force=True, catch (TypeError,IOError) and try to use import hooks.
|
784 |
+
If lstrip=True, ensure there is no indentation in the first line of code.
|
785 |
+
|
786 |
+
If source=False, use these options:
|
787 |
+
If enclosing=True, get the import for the outermost enclosing callable.
|
788 |
+
If force=True, then don't test the import string before returning it.
|
789 |
+
If builtin=True, then force an import for builtins where possible.
|
790 |
+
"""
|
791 |
+
if source is None:
|
792 |
+
source = True if isfrommain(obj) else False
|
793 |
+
if source: # first try to get the source
|
794 |
+
try:
|
795 |
+
return getsource(obj, alias, enclosing=enclosing, \
|
796 |
+
force=force, lstrip=lstrip, builtin=builtin)
|
797 |
+
except Exception: pass
|
798 |
+
try:
|
799 |
+
if not _isinstance(obj):
|
800 |
+
return getimport(obj, alias, enclosing=enclosing, \
|
801 |
+
verify=(not force), builtin=builtin)
|
802 |
+
# first 'get the import', then 'get the instance'
|
803 |
+
_import = getimport(obj, enclosing=enclosing, \
|
804 |
+
verify=(not force), builtin=builtin)
|
805 |
+
name = getname(obj, force=True)
|
806 |
+
if not name:
|
807 |
+
raise AttributeError("object has no atribute '__name__'")
|
808 |
+
_alias = "%s = " % alias if alias else ""
|
809 |
+
if alias == name: _alias = ""
|
810 |
+
return _import+_alias+"%s\n" % name
|
811 |
+
|
812 |
+
except Exception: pass
|
813 |
+
if not source: # try getsource, only if it hasn't been tried yet
|
814 |
+
try:
|
815 |
+
return getsource(obj, alias, enclosing=enclosing, \
|
816 |
+
force=force, lstrip=lstrip, builtin=builtin)
|
817 |
+
except Exception: pass
|
818 |
+
# get the name (of functions, lambdas, and classes)
|
819 |
+
# or hope that obj can be built from the __repr__
|
820 |
+
#XXX: what to do about class instances and such?
|
821 |
+
obj = getname(obj, force=force)
|
822 |
+
# we either have __repr__ or __name__ (or None)
|
823 |
+
if not obj or obj.startswith('<'):
|
824 |
+
raise AttributeError("object has no atribute '__name__'")
|
825 |
+
_alias = '%s = ' % alias if alias else ''
|
826 |
+
if alias == obj: _alias = ''
|
827 |
+
return _alias+'%s\n' % obj
|
828 |
+
#XXX: possible failsafe... (for example, for instances when source=False)
|
829 |
+
# "import dill; result = dill.loads(<pickled_object>); # repr(<object>)"
|
830 |
+
|
831 |
+
def _closuredimport(func, alias='', builtin=False):
|
832 |
+
"""get import for closured objects; return a dict of 'name' and 'import'"""
|
833 |
+
import re
|
834 |
+
from .detect import freevars, outermost
|
835 |
+
free_vars = freevars(func)
|
836 |
+
func_vars = {}
|
837 |
+
# split into 'funcs' and 'non-funcs'
|
838 |
+
for name,obj in list(free_vars.items()):
|
839 |
+
if not isfunction(obj): continue
|
840 |
+
# get import for 'funcs'
|
841 |
+
fobj = free_vars.pop(name)
|
842 |
+
src = getsource(fobj)
|
843 |
+
if src.lstrip().startswith('@'): # we have a decorator
|
844 |
+
src = getimport(fobj, alias=alias, builtin=builtin)
|
845 |
+
else: # we have to "hack" a bit... and maybe be lucky
|
846 |
+
encl = outermost(func)
|
847 |
+
# pattern: 'func = enclosing(fobj'
|
848 |
+
pat = r'.*[\w\s]=\s*'+getname(encl)+r'\('+getname(fobj)
|
849 |
+
mod = getname(getmodule(encl))
|
850 |
+
#HACK: get file containing 'outer' function; is func there?
|
851 |
+
lines,_ = findsource(encl)
|
852 |
+
candidate = [line for line in lines if getname(encl) in line and \
|
853 |
+
re.match(pat, line)]
|
854 |
+
if not candidate:
|
855 |
+
mod = getname(getmodule(fobj))
|
856 |
+
#HACK: get file containing 'inner' function; is func there?
|
857 |
+
lines,_ = findsource(fobj)
|
858 |
+
candidate = [line for line in lines \
|
859 |
+
if getname(fobj) in line and re.match(pat, line)]
|
860 |
+
if not len(candidate): raise TypeError('import could not be found')
|
861 |
+
candidate = candidate[-1]
|
862 |
+
name = candidate.split('=',1)[0].split()[-1].strip()
|
863 |
+
src = _getimport(mod, name, alias=alias, builtin=builtin)
|
864 |
+
func_vars[name] = src
|
865 |
+
if not func_vars:
|
866 |
+
name = outermost(func)
|
867 |
+
mod = getname(getmodule(name))
|
868 |
+
if not mod or name is func: # then it can be handled by getimport
|
869 |
+
name = getname(func, force=True) #XXX: better key?
|
870 |
+
src = getimport(func, alias=alias, builtin=builtin)
|
871 |
+
else:
|
872 |
+
lines,_ = findsource(name)
|
873 |
+
# pattern: 'func = enclosing('
|
874 |
+
candidate = [line for line in lines if getname(name) in line and \
|
875 |
+
re.match(r'.*[\w\s]=\s*'+getname(name)+r'\(', line)]
|
876 |
+
if not len(candidate): raise TypeError('import could not be found')
|
877 |
+
candidate = candidate[-1]
|
878 |
+
name = candidate.split('=',1)[0].split()[-1].strip()
|
879 |
+
src = _getimport(mod, name, alias=alias, builtin=builtin)
|
880 |
+
func_vars[name] = src
|
881 |
+
return func_vars
|
882 |
+
|
883 |
+
#XXX: should be able to use __qualname__
|
884 |
+
def _closuredsource(func, alias=''):
|
885 |
+
"""get source code for closured objects; return a dict of 'name'
|
886 |
+
and 'code blocks'"""
|
887 |
+
#FIXME: this entire function is a messy messy HACK
|
888 |
+
# - pollutes global namespace
|
889 |
+
# - fails if name of freevars are reused
|
890 |
+
# - can unnecessarily duplicate function code
|
891 |
+
from .detect import freevars
|
892 |
+
free_vars = freevars(func)
|
893 |
+
func_vars = {}
|
894 |
+
# split into 'funcs' and 'non-funcs'
|
895 |
+
for name,obj in list(free_vars.items()):
|
896 |
+
if not isfunction(obj):
|
897 |
+
# get source for 'non-funcs'
|
898 |
+
free_vars[name] = getsource(obj, force=True, alias=name)
|
899 |
+
continue
|
900 |
+
# get source for 'funcs'
|
901 |
+
fobj = free_vars.pop(name)
|
902 |
+
src = getsource(fobj, alias) # DO NOT include dependencies
|
903 |
+
# if source doesn't start with '@', use name as the alias
|
904 |
+
if not src.lstrip().startswith('@'): #FIXME: 'enclose' in dummy;
|
905 |
+
src = importable(fobj,alias=name)# wrong ref 'name'
|
906 |
+
org = getsource(func, alias, enclosing=False, lstrip=True)
|
907 |
+
src = (src, org) # undecorated first, then target
|
908 |
+
else: #NOTE: reproduces the code!
|
909 |
+
org = getsource(func, enclosing=True, lstrip=False)
|
910 |
+
src = importable(fobj, alias, source=True) # include dependencies
|
911 |
+
src = (org, src) # target first, then decorated
|
912 |
+
func_vars[name] = src
|
913 |
+
src = ''.join(free_vars.values())
|
914 |
+
if not func_vars: #FIXME: 'enclose' in dummy; wrong ref 'name'
|
915 |
+
org = getsource(func, alias, force=True, enclosing=False, lstrip=True)
|
916 |
+
src = (src, org) # variables first, then target
|
917 |
+
else:
|
918 |
+
src = (src, None) # just variables (better '' instead of None?)
|
919 |
+
func_vars[None] = src
|
920 |
+
# FIXME: remove duplicates (however, order is important...)
|
921 |
+
return func_vars
|
922 |
+
|
923 |
+
def importable(obj, alias='', source=None, builtin=True):
|
924 |
+
"""get an importable string (i.e. source code or the import string)
|
925 |
+
for the given object, including any required objects from the enclosing
|
926 |
+
and global scope
|
927 |
+
|
928 |
+
This function will attempt to discover the name of the object, or the repr
|
929 |
+
of the object, or the source code for the object. To attempt to force
|
930 |
+
discovery of the source code, use source=True, to attempt to force the
|
931 |
+
use of an import, use source=False; otherwise an import will be sought
|
932 |
+
for objects not defined in __main__. The intent is to build a string
|
933 |
+
that can be imported from a python file.
|
934 |
+
|
935 |
+
obj is the object to inspect. If alias is provided, then rename the
|
936 |
+
object with the given alias. If builtin=True, then force an import for
|
937 |
+
builtins where possible.
|
938 |
+
"""
|
939 |
+
#NOTE: we always 'force', and 'lstrip' as necessary
|
940 |
+
#NOTE: for 'enclosing', use importable(outermost(obj))
|
941 |
+
if source is None:
|
942 |
+
source = True if isfrommain(obj) else False
|
943 |
+
elif builtin and isbuiltin(obj):
|
944 |
+
source = False
|
945 |
+
tried_source = tried_import = False
|
946 |
+
while True:
|
947 |
+
if not source: # we want an import
|
948 |
+
try:
|
949 |
+
if _isinstance(obj): # for instances, punt to _importable
|
950 |
+
return _importable(obj, alias, source=False, builtin=builtin)
|
951 |
+
src = _closuredimport(obj, alias=alias, builtin=builtin)
|
952 |
+
if len(src) == 0:
|
953 |
+
raise NotImplementedError('not implemented')
|
954 |
+
if len(src) > 1:
|
955 |
+
raise NotImplementedError('not implemented')
|
956 |
+
return list(src.values())[0]
|
957 |
+
except Exception:
|
958 |
+
if tried_source: raise
|
959 |
+
tried_import = True
|
960 |
+
# we want the source
|
961 |
+
try:
|
962 |
+
src = _closuredsource(obj, alias=alias)
|
963 |
+
if len(src) == 0:
|
964 |
+
raise NotImplementedError('not implemented')
|
965 |
+
# groan... an inline code stitcher
|
966 |
+
def _code_stitcher(block):
|
967 |
+
"stitch together the strings in tuple 'block'"
|
968 |
+
if block[0] and block[-1]: block = '\n'.join(block)
|
969 |
+
elif block[0]: block = block[0]
|
970 |
+
elif block[-1]: block = block[-1]
|
971 |
+
else: block = ''
|
972 |
+
return block
|
973 |
+
# get free_vars first
|
974 |
+
_src = _code_stitcher(src.pop(None))
|
975 |
+
_src = [_src] if _src else []
|
976 |
+
# get func_vars
|
977 |
+
for xxx in src.values():
|
978 |
+
xxx = _code_stitcher(xxx)
|
979 |
+
if xxx: _src.append(xxx)
|
980 |
+
# make a single source string
|
981 |
+
if not len(_src):
|
982 |
+
src = ''
|
983 |
+
elif len(_src) == 1:
|
984 |
+
src = _src[0]
|
985 |
+
else:
|
986 |
+
src = '\n'.join(_src)
|
987 |
+
# get source code of objects referred to by obj in global scope
|
988 |
+
from .detect import globalvars
|
989 |
+
obj = globalvars(obj) #XXX: don't worry about alias? recurse? etc?
|
990 |
+
obj = list(getsource(_obj,name,force=True) for (name,_obj) in obj.items() if not isbuiltin(_obj))
|
991 |
+
obj = '\n'.join(obj) if obj else ''
|
992 |
+
# combine all referred-to source (global then enclosing)
|
993 |
+
if not obj: return src
|
994 |
+
if not src: return obj
|
995 |
+
return obj + src
|
996 |
+
except Exception:
|
997 |
+
if tried_import: raise
|
998 |
+
tried_source = True
|
999 |
+
source = not source
|
1000 |
+
# should never get here
|
1001 |
+
return
|
1002 |
+
|
1003 |
+
|
1004 |
+
# backward compatibility
|
1005 |
+
def getimportable(obj, alias='', byname=True, explicit=False):
|
1006 |
+
return importable(obj,alias,source=(not byname),builtin=explicit)
|
1007 |
+
#return outdent(_importable(obj,alias,source=(not byname),builtin=explicit))
|
1008 |
+
def likely_import(obj, passive=False, explicit=False):
|
1009 |
+
return getimport(obj, verify=(not passive), builtin=explicit)
|
1010 |
+
def _likely_import(first, last, passive=False, explicit=True):
|
1011 |
+
return _getimport(first, last, verify=(not passive), builtin=explicit)
|
1012 |
+
_get_name = getname
|
1013 |
+
getblocks_from_history = getblocks
|
1014 |
+
|
1015 |
+
|
1016 |
+
|
1017 |
+
# EOF
|
env-llmeval/lib/python3.10/site-packages/dill/temp.py
ADDED
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
Methods for serialized objects (or source code) stored in temporary files
|
10 |
+
and file-like objects.
|
11 |
+
"""
|
12 |
+
#XXX: better instead to have functions write to any given file-like object ?
|
13 |
+
#XXX: currently, all file-like objects are created by the function...
|
14 |
+
|
15 |
+
__all__ = ['dump_source', 'dump', 'dumpIO_source', 'dumpIO',\
|
16 |
+
'load_source', 'load', 'loadIO_source', 'loadIO',\
|
17 |
+
'capture']
|
18 |
+
|
19 |
+
import contextlib
|
20 |
+
|
21 |
+
|
22 |
+
@contextlib.contextmanager
|
23 |
+
def capture(stream='stdout'):
|
24 |
+
"""builds a context that temporarily replaces the given stream name
|
25 |
+
|
26 |
+
>>> with capture('stdout') as out:
|
27 |
+
... print ("foo!")
|
28 |
+
...
|
29 |
+
>>> print (out.getvalue())
|
30 |
+
foo!
|
31 |
+
|
32 |
+
"""
|
33 |
+
import sys
|
34 |
+
from io import StringIO
|
35 |
+
orig = getattr(sys, stream)
|
36 |
+
setattr(sys, stream, StringIO())
|
37 |
+
try:
|
38 |
+
yield getattr(sys, stream)
|
39 |
+
finally:
|
40 |
+
setattr(sys, stream, orig)
|
41 |
+
|
42 |
+
|
43 |
+
def b(x): # deal with b'foo' versus 'foo'
|
44 |
+
import codecs
|
45 |
+
return codecs.latin_1_encode(x)[0]
|
46 |
+
|
47 |
+
def load_source(file, **kwds):
|
48 |
+
"""load an object that was stored with dill.temp.dump_source
|
49 |
+
|
50 |
+
file: filehandle
|
51 |
+
alias: string name of stored object
|
52 |
+
mode: mode to open the file, one of: {'r', 'rb'}
|
53 |
+
|
54 |
+
>>> f = lambda x: x**2
|
55 |
+
>>> pyfile = dill.temp.dump_source(f, alias='_f')
|
56 |
+
>>> _f = dill.temp.load_source(pyfile)
|
57 |
+
>>> _f(4)
|
58 |
+
16
|
59 |
+
"""
|
60 |
+
alias = kwds.pop('alias', None)
|
61 |
+
mode = kwds.pop('mode', 'r')
|
62 |
+
fname = getattr(file, 'name', file) # fname=file.name or fname=file (if str)
|
63 |
+
source = open(fname, mode=mode, **kwds).read()
|
64 |
+
if not alias:
|
65 |
+
tag = source.strip().splitlines()[-1].split()
|
66 |
+
if tag[0] != '#NAME:':
|
67 |
+
stub = source.splitlines()[0]
|
68 |
+
raise IOError("unknown name for code: %s" % stub)
|
69 |
+
alias = tag[-1]
|
70 |
+
local = {}
|
71 |
+
exec(source, local)
|
72 |
+
_ = eval("%s" % alias, local)
|
73 |
+
return _
|
74 |
+
|
75 |
+
def dump_source(object, **kwds):
|
76 |
+
"""write object source to a NamedTemporaryFile (instead of dill.dump)
|
77 |
+
Loads with "import" or "dill.temp.load_source". Returns the filehandle.
|
78 |
+
|
79 |
+
>>> f = lambda x: x**2
|
80 |
+
>>> pyfile = dill.temp.dump_source(f, alias='_f')
|
81 |
+
>>> _f = dill.temp.load_source(pyfile)
|
82 |
+
>>> _f(4)
|
83 |
+
16
|
84 |
+
|
85 |
+
>>> f = lambda x: x**2
|
86 |
+
>>> pyfile = dill.temp.dump_source(f, dir='.')
|
87 |
+
>>> modulename = os.path.basename(pyfile.name).split('.py')[0]
|
88 |
+
>>> exec('from %s import f as _f' % modulename)
|
89 |
+
>>> _f(4)
|
90 |
+
16
|
91 |
+
|
92 |
+
Optional kwds:
|
93 |
+
If 'alias' is specified, the object will be renamed to the given string.
|
94 |
+
|
95 |
+
If 'prefix' is specified, the file name will begin with that prefix,
|
96 |
+
otherwise a default prefix is used.
|
97 |
+
|
98 |
+
If 'dir' is specified, the file will be created in that directory,
|
99 |
+
otherwise a default directory is used.
|
100 |
+
|
101 |
+
If 'text' is specified and true, the file is opened in text
|
102 |
+
mode. Else (the default) the file is opened in binary mode. On
|
103 |
+
some operating systems, this makes no difference.
|
104 |
+
|
105 |
+
NOTE: Keep the return value for as long as you want your file to exist !
|
106 |
+
""" #XXX: write a "load_source"?
|
107 |
+
from .source import importable, getname
|
108 |
+
import tempfile
|
109 |
+
kwds.setdefault('delete', True)
|
110 |
+
kwds.pop('suffix', '') # this is *always* '.py'
|
111 |
+
alias = kwds.pop('alias', '') #XXX: include an alias so a name is known
|
112 |
+
name = str(alias) or getname(object)
|
113 |
+
name = "\n#NAME: %s\n" % name
|
114 |
+
#XXX: assumes kwds['dir'] is writable and on $PYTHONPATH
|
115 |
+
file = tempfile.NamedTemporaryFile(suffix='.py', **kwds)
|
116 |
+
file.write(b(''.join([importable(object, alias=alias),name])))
|
117 |
+
file.flush()
|
118 |
+
return file
|
119 |
+
|
120 |
+
def load(file, **kwds):
|
121 |
+
"""load an object that was stored with dill.temp.dump
|
122 |
+
|
123 |
+
file: filehandle
|
124 |
+
mode: mode to open the file, one of: {'r', 'rb'}
|
125 |
+
|
126 |
+
>>> dumpfile = dill.temp.dump([1, 2, 3, 4, 5])
|
127 |
+
>>> dill.temp.load(dumpfile)
|
128 |
+
[1, 2, 3, 4, 5]
|
129 |
+
"""
|
130 |
+
import dill as pickle
|
131 |
+
mode = kwds.pop('mode', 'rb')
|
132 |
+
name = getattr(file, 'name', file) # name=file.name or name=file (if str)
|
133 |
+
return pickle.load(open(name, mode=mode, **kwds))
|
134 |
+
|
135 |
+
def dump(object, **kwds):
|
136 |
+
"""dill.dump of object to a NamedTemporaryFile.
|
137 |
+
Loads with "dill.temp.load". Returns the filehandle.
|
138 |
+
|
139 |
+
>>> dumpfile = dill.temp.dump([1, 2, 3, 4, 5])
|
140 |
+
>>> dill.temp.load(dumpfile)
|
141 |
+
[1, 2, 3, 4, 5]
|
142 |
+
|
143 |
+
Optional kwds:
|
144 |
+
If 'suffix' is specified, the file name will end with that suffix,
|
145 |
+
otherwise there will be no suffix.
|
146 |
+
|
147 |
+
If 'prefix' is specified, the file name will begin with that prefix,
|
148 |
+
otherwise a default prefix is used.
|
149 |
+
|
150 |
+
If 'dir' is specified, the file will be created in that directory,
|
151 |
+
otherwise a default directory is used.
|
152 |
+
|
153 |
+
If 'text' is specified and true, the file is opened in text
|
154 |
+
mode. Else (the default) the file is opened in binary mode. On
|
155 |
+
some operating systems, this makes no difference.
|
156 |
+
|
157 |
+
NOTE: Keep the return value for as long as you want your file to exist !
|
158 |
+
"""
|
159 |
+
import dill as pickle
|
160 |
+
import tempfile
|
161 |
+
kwds.setdefault('delete', True)
|
162 |
+
file = tempfile.NamedTemporaryFile(**kwds)
|
163 |
+
pickle.dump(object, file)
|
164 |
+
file.flush()
|
165 |
+
return file
|
166 |
+
|
167 |
+
def loadIO(buffer, **kwds):
|
168 |
+
"""load an object that was stored with dill.temp.dumpIO
|
169 |
+
|
170 |
+
buffer: buffer object
|
171 |
+
|
172 |
+
>>> dumpfile = dill.temp.dumpIO([1, 2, 3, 4, 5])
|
173 |
+
>>> dill.temp.loadIO(dumpfile)
|
174 |
+
[1, 2, 3, 4, 5]
|
175 |
+
"""
|
176 |
+
import dill as pickle
|
177 |
+
from io import BytesIO as StringIO
|
178 |
+
value = getattr(buffer, 'getvalue', buffer) # value or buffer.getvalue
|
179 |
+
if value != buffer: value = value() # buffer.getvalue()
|
180 |
+
return pickle.load(StringIO(value))
|
181 |
+
|
182 |
+
def dumpIO(object, **kwds):
|
183 |
+
"""dill.dump of object to a buffer.
|
184 |
+
Loads with "dill.temp.loadIO". Returns the buffer object.
|
185 |
+
|
186 |
+
>>> dumpfile = dill.temp.dumpIO([1, 2, 3, 4, 5])
|
187 |
+
>>> dill.temp.loadIO(dumpfile)
|
188 |
+
[1, 2, 3, 4, 5]
|
189 |
+
"""
|
190 |
+
import dill as pickle
|
191 |
+
from io import BytesIO as StringIO
|
192 |
+
file = StringIO()
|
193 |
+
pickle.dump(object, file)
|
194 |
+
file.flush()
|
195 |
+
return file
|
196 |
+
|
197 |
+
def loadIO_source(buffer, **kwds):
|
198 |
+
"""load an object that was stored with dill.temp.dumpIO_source
|
199 |
+
|
200 |
+
buffer: buffer object
|
201 |
+
alias: string name of stored object
|
202 |
+
|
203 |
+
>>> f = lambda x:x**2
|
204 |
+
>>> pyfile = dill.temp.dumpIO_source(f, alias='_f')
|
205 |
+
>>> _f = dill.temp.loadIO_source(pyfile)
|
206 |
+
>>> _f(4)
|
207 |
+
16
|
208 |
+
"""
|
209 |
+
alias = kwds.pop('alias', None)
|
210 |
+
source = getattr(buffer, 'getvalue', buffer) # source or buffer.getvalue
|
211 |
+
if source != buffer: source = source() # buffer.getvalue()
|
212 |
+
source = source.decode() # buffer to string
|
213 |
+
if not alias:
|
214 |
+
tag = source.strip().splitlines()[-1].split()
|
215 |
+
if tag[0] != '#NAME:':
|
216 |
+
stub = source.splitlines()[0]
|
217 |
+
raise IOError("unknown name for code: %s" % stub)
|
218 |
+
alias = tag[-1]
|
219 |
+
local = {}
|
220 |
+
exec(source, local)
|
221 |
+
_ = eval("%s" % alias, local)
|
222 |
+
return _
|
223 |
+
|
224 |
+
def dumpIO_source(object, **kwds):
|
225 |
+
"""write object source to a buffer (instead of dill.dump)
|
226 |
+
Loads by with dill.temp.loadIO_source. Returns the buffer object.
|
227 |
+
|
228 |
+
>>> f = lambda x:x**2
|
229 |
+
>>> pyfile = dill.temp.dumpIO_source(f, alias='_f')
|
230 |
+
>>> _f = dill.temp.loadIO_source(pyfile)
|
231 |
+
>>> _f(4)
|
232 |
+
16
|
233 |
+
|
234 |
+
Optional kwds:
|
235 |
+
If 'alias' is specified, the object will be renamed to the given string.
|
236 |
+
"""
|
237 |
+
from .source import importable, getname
|
238 |
+
from io import BytesIO as StringIO
|
239 |
+
alias = kwds.pop('alias', '') #XXX: include an alias so a name is known
|
240 |
+
name = str(alias) or getname(object)
|
241 |
+
name = "\n#NAME: %s\n" % name
|
242 |
+
#XXX: assumes kwds['dir'] is writable and on $PYTHONPATH
|
243 |
+
file = StringIO()
|
244 |
+
file.write(b(''.join([importable(object, alias=alias),name])))
|
245 |
+
file.flush()
|
246 |
+
return file
|
247 |
+
|
248 |
+
|
249 |
+
del contextlib
|
250 |
+
|
251 |
+
|
252 |
+
# EOF
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__main__.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2018-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
|
8 |
+
import glob
|
9 |
+
import os
|
10 |
+
import sys
|
11 |
+
import subprocess as sp
|
12 |
+
python = sys.executable
|
13 |
+
try:
|
14 |
+
import pox
|
15 |
+
python = pox.which_python(version=True) or python
|
16 |
+
except ImportError:
|
17 |
+
pass
|
18 |
+
shell = sys.platform[:3] == 'win'
|
19 |
+
|
20 |
+
suite = os.path.dirname(__file__) or os.path.curdir
|
21 |
+
tests = glob.glob(suite + os.path.sep + 'test_*.py')
|
22 |
+
|
23 |
+
|
24 |
+
if __name__ == '__main__':
|
25 |
+
|
26 |
+
failed = 0
|
27 |
+
for test in tests:
|
28 |
+
p = sp.Popen([python, test], shell=shell).wait()
|
29 |
+
if p:
|
30 |
+
print('F', end='', flush=True)
|
31 |
+
failed = 1
|
32 |
+
else:
|
33 |
+
print('.', end='', flush=True)
|
34 |
+
print('')
|
35 |
+
exit(failed)
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (378 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_abc.cpython-310.pyc
ADDED
Binary file (5.25 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc
ADDED
Binary file (1.04 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_extendpickle.cpython-310.pyc
ADDED
Binary file (1.28 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_module.cpython-310.pyc
ADDED
Binary file (1.74 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_moduledict.cpython-310.pyc
ADDED
Binary file (1.73 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/__pycache__/test_selected.cpython-310.pyc
ADDED
Binary file (3.22 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_abc.py
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2023-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
"""
|
8 |
+
test dill's ability to pickle abstract base class objects
|
9 |
+
"""
|
10 |
+
import dill
|
11 |
+
import abc
|
12 |
+
from abc import ABC
|
13 |
+
import warnings
|
14 |
+
|
15 |
+
from types import FunctionType
|
16 |
+
|
17 |
+
dill.settings['recurse'] = True
|
18 |
+
|
19 |
+
class OneTwoThree(ABC):
|
20 |
+
@abc.abstractmethod
|
21 |
+
def foo(self):
|
22 |
+
"""A method"""
|
23 |
+
pass
|
24 |
+
|
25 |
+
@property
|
26 |
+
@abc.abstractmethod
|
27 |
+
def bar(self):
|
28 |
+
"""Property getter"""
|
29 |
+
pass
|
30 |
+
|
31 |
+
@bar.setter
|
32 |
+
@abc.abstractmethod
|
33 |
+
def bar(self, value):
|
34 |
+
"""Property setter"""
|
35 |
+
pass
|
36 |
+
|
37 |
+
@classmethod
|
38 |
+
@abc.abstractmethod
|
39 |
+
def cfoo(cls):
|
40 |
+
"""Class method"""
|
41 |
+
pass
|
42 |
+
|
43 |
+
@staticmethod
|
44 |
+
@abc.abstractmethod
|
45 |
+
def sfoo():
|
46 |
+
"""Static method"""
|
47 |
+
pass
|
48 |
+
|
49 |
+
class EasyAsAbc(OneTwoThree):
|
50 |
+
def __init__(self):
|
51 |
+
self._bar = None
|
52 |
+
|
53 |
+
def foo(self):
|
54 |
+
return "Instance Method FOO"
|
55 |
+
|
56 |
+
@property
|
57 |
+
def bar(self):
|
58 |
+
return self._bar
|
59 |
+
|
60 |
+
@bar.setter
|
61 |
+
def bar(self, value):
|
62 |
+
self._bar = value
|
63 |
+
|
64 |
+
@classmethod
|
65 |
+
def cfoo(cls):
|
66 |
+
return "Class Method CFOO"
|
67 |
+
|
68 |
+
@staticmethod
|
69 |
+
def sfoo():
|
70 |
+
return "Static Method SFOO"
|
71 |
+
|
72 |
+
def test_abc_non_local():
|
73 |
+
assert dill.copy(OneTwoThree) is not OneTwoThree
|
74 |
+
assert dill.copy(EasyAsAbc) is not EasyAsAbc
|
75 |
+
|
76 |
+
with warnings.catch_warnings():
|
77 |
+
warnings.simplefilter("ignore", dill.PicklingWarning)
|
78 |
+
assert dill.copy(OneTwoThree, byref=True) is OneTwoThree
|
79 |
+
assert dill.copy(EasyAsAbc, byref=True) is EasyAsAbc
|
80 |
+
|
81 |
+
instance = EasyAsAbc()
|
82 |
+
# Set a property that StockPickle can't preserve
|
83 |
+
instance.bar = lambda x: x**2
|
84 |
+
depickled = dill.copy(instance)
|
85 |
+
assert type(depickled) is type(instance) #NOTE: issue #612, test_abc_local
|
86 |
+
#NOTE: dill.copy of local (or non-local) classes should (not) be the same?
|
87 |
+
assert type(depickled.bar) is FunctionType
|
88 |
+
assert depickled.bar(3) == 9
|
89 |
+
assert depickled.sfoo() == "Static Method SFOO"
|
90 |
+
assert depickled.cfoo() == "Class Method CFOO"
|
91 |
+
assert depickled.foo() == "Instance Method FOO"
|
92 |
+
|
93 |
+
def test_abc_local():
|
94 |
+
"""
|
95 |
+
Test using locally scoped ABC class
|
96 |
+
"""
|
97 |
+
class LocalABC(ABC):
|
98 |
+
@abc.abstractmethod
|
99 |
+
def foo(self):
|
100 |
+
pass
|
101 |
+
|
102 |
+
def baz(self):
|
103 |
+
return repr(self)
|
104 |
+
|
105 |
+
labc = dill.copy(LocalABC)
|
106 |
+
assert labc is not LocalABC
|
107 |
+
assert type(labc) is type(LocalABC)
|
108 |
+
#NOTE: dill.copy of local (or non-local) classes should (not) be the same?
|
109 |
+
# <class '__main__.LocalABC'>
|
110 |
+
# <class '__main__.test_abc_local.<locals>.LocalABC'>
|
111 |
+
|
112 |
+
class Real(labc):
|
113 |
+
def foo(self):
|
114 |
+
return "True!"
|
115 |
+
|
116 |
+
def baz(self):
|
117 |
+
return "My " + super(Real, self).baz()
|
118 |
+
|
119 |
+
real = Real()
|
120 |
+
assert real.foo() == "True!"
|
121 |
+
|
122 |
+
try:
|
123 |
+
labc()
|
124 |
+
except TypeError as e:
|
125 |
+
# Expected error
|
126 |
+
pass
|
127 |
+
else:
|
128 |
+
print('Failed to raise type error')
|
129 |
+
assert False
|
130 |
+
|
131 |
+
labc2, pik = dill.copy((labc, Real()))
|
132 |
+
assert 'Real' == type(pik).__name__
|
133 |
+
assert '.Real' in type(pik).__qualname__
|
134 |
+
assert type(pik) is not Real
|
135 |
+
assert labc2 is not LocalABC
|
136 |
+
assert labc2 is not labc
|
137 |
+
assert isinstance(pik, labc2)
|
138 |
+
assert not isinstance(pik, labc)
|
139 |
+
assert not isinstance(pik, LocalABC)
|
140 |
+
assert pik.baz() == "My " + repr(pik)
|
141 |
+
|
142 |
+
def test_meta_local_no_cache():
|
143 |
+
"""
|
144 |
+
Test calling metaclass and cache registration
|
145 |
+
"""
|
146 |
+
LocalMetaABC = abc.ABCMeta('LocalMetaABC', (), {})
|
147 |
+
|
148 |
+
class ClassyClass:
|
149 |
+
pass
|
150 |
+
|
151 |
+
class KlassyClass:
|
152 |
+
pass
|
153 |
+
|
154 |
+
LocalMetaABC.register(ClassyClass)
|
155 |
+
|
156 |
+
assert not issubclass(KlassyClass, LocalMetaABC)
|
157 |
+
assert issubclass(ClassyClass, LocalMetaABC)
|
158 |
+
|
159 |
+
res = dill.dumps((LocalMetaABC, ClassyClass, KlassyClass))
|
160 |
+
|
161 |
+
lmabc, cc, kc = dill.loads(res)
|
162 |
+
assert type(lmabc) == type(LocalMetaABC)
|
163 |
+
assert not issubclass(kc, lmabc)
|
164 |
+
assert issubclass(cc, lmabc)
|
165 |
+
|
166 |
+
if __name__ == '__main__':
|
167 |
+
test_abc_non_local()
|
168 |
+
test_abc_local()
|
169 |
+
test_meta_local_no_cache()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_check.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
from dill import check
|
10 |
+
import sys
|
11 |
+
|
12 |
+
from dill.temp import capture
|
13 |
+
|
14 |
+
|
15 |
+
#FIXME: this doesn't catch output... it's from the internal call
|
16 |
+
def raise_check(func, **kwds):
|
17 |
+
try:
|
18 |
+
with capture('stdout') as out:
|
19 |
+
check(func, **kwds)
|
20 |
+
except Exception:
|
21 |
+
e = sys.exc_info()[1]
|
22 |
+
raise AssertionError(str(e))
|
23 |
+
else:
|
24 |
+
assert 'Traceback' not in out.getvalue()
|
25 |
+
finally:
|
26 |
+
out.close()
|
27 |
+
|
28 |
+
|
29 |
+
f = lambda x:x**2
|
30 |
+
|
31 |
+
|
32 |
+
def test_simple(verbose=None):
|
33 |
+
raise_check(f, verbose=verbose)
|
34 |
+
|
35 |
+
|
36 |
+
def test_recurse(verbose=None):
|
37 |
+
raise_check(f, recurse=True, verbose=verbose)
|
38 |
+
|
39 |
+
|
40 |
+
def test_byref(verbose=None):
|
41 |
+
raise_check(f, byref=True, verbose=verbose)
|
42 |
+
|
43 |
+
|
44 |
+
def test_protocol(verbose=None):
|
45 |
+
raise_check(f, protocol=True, verbose=verbose)
|
46 |
+
|
47 |
+
|
48 |
+
def test_python(verbose=None):
|
49 |
+
raise_check(f, python=None, verbose=verbose)
|
50 |
+
|
51 |
+
|
52 |
+
#TODO: test incompatible versions
|
53 |
+
#TODO: test dump failure
|
54 |
+
#TODO: test load failure
|
55 |
+
|
56 |
+
|
57 |
+
if __name__ == '__main__':
|
58 |
+
test_simple()
|
59 |
+
test_recurse()
|
60 |
+
test_byref()
|
61 |
+
test_protocol()
|
62 |
+
test_python()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_classdef.py
ADDED
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import dill
|
10 |
+
from enum import EnumMeta
|
11 |
+
import sys
|
12 |
+
dill.settings['recurse'] = True
|
13 |
+
|
14 |
+
# test classdefs
|
15 |
+
class _class:
|
16 |
+
def _method(self):
|
17 |
+
pass
|
18 |
+
def ok(self):
|
19 |
+
return True
|
20 |
+
|
21 |
+
class _class2:
|
22 |
+
def __call__(self):
|
23 |
+
pass
|
24 |
+
def ok(self):
|
25 |
+
return True
|
26 |
+
|
27 |
+
class _newclass(object):
|
28 |
+
def _method(self):
|
29 |
+
pass
|
30 |
+
def ok(self):
|
31 |
+
return True
|
32 |
+
|
33 |
+
class _newclass2(object):
|
34 |
+
def __call__(self):
|
35 |
+
pass
|
36 |
+
def ok(self):
|
37 |
+
return True
|
38 |
+
|
39 |
+
class _meta(type):
|
40 |
+
pass
|
41 |
+
|
42 |
+
def __call__(self):
|
43 |
+
pass
|
44 |
+
def ok(self):
|
45 |
+
return True
|
46 |
+
|
47 |
+
_mclass = _meta("_mclass", (object,), {"__call__": __call__, "ok": ok})
|
48 |
+
|
49 |
+
del __call__
|
50 |
+
del ok
|
51 |
+
|
52 |
+
o = _class()
|
53 |
+
oc = _class2()
|
54 |
+
n = _newclass()
|
55 |
+
nc = _newclass2()
|
56 |
+
m = _mclass()
|
57 |
+
|
58 |
+
if sys.hexversion < 0x03090000:
|
59 |
+
import typing
|
60 |
+
class customIntList(typing.List[int]):
|
61 |
+
pass
|
62 |
+
else:
|
63 |
+
class customIntList(list[int]):
|
64 |
+
pass
|
65 |
+
|
66 |
+
# test pickles for class instances
|
67 |
+
def test_class_instances():
|
68 |
+
assert dill.pickles(o)
|
69 |
+
assert dill.pickles(oc)
|
70 |
+
assert dill.pickles(n)
|
71 |
+
assert dill.pickles(nc)
|
72 |
+
assert dill.pickles(m)
|
73 |
+
|
74 |
+
def test_class_objects():
|
75 |
+
clslist = [_class,_class2,_newclass,_newclass2,_mclass]
|
76 |
+
objlist = [o,oc,n,nc,m]
|
77 |
+
_clslist = [dill.dumps(obj) for obj in clslist]
|
78 |
+
_objlist = [dill.dumps(obj) for obj in objlist]
|
79 |
+
|
80 |
+
for obj in clslist:
|
81 |
+
globals().pop(obj.__name__)
|
82 |
+
del clslist
|
83 |
+
for obj in ['o','oc','n','nc']:
|
84 |
+
globals().pop(obj)
|
85 |
+
del objlist
|
86 |
+
del obj
|
87 |
+
|
88 |
+
for obj,cls in zip(_objlist,_clslist):
|
89 |
+
_cls = dill.loads(cls)
|
90 |
+
_obj = dill.loads(obj)
|
91 |
+
assert _obj.ok()
|
92 |
+
assert _cls.ok(_cls())
|
93 |
+
if _cls.__name__ == "_mclass":
|
94 |
+
assert type(_cls).__name__ == "_meta"
|
95 |
+
|
96 |
+
# test NoneType
|
97 |
+
def test_specialtypes():
|
98 |
+
assert dill.pickles(type(None))
|
99 |
+
assert dill.pickles(type(NotImplemented))
|
100 |
+
assert dill.pickles(type(Ellipsis))
|
101 |
+
assert dill.pickles(type(EnumMeta))
|
102 |
+
|
103 |
+
from collections import namedtuple
|
104 |
+
Z = namedtuple("Z", ['a','b'])
|
105 |
+
Zi = Z(0,1)
|
106 |
+
X = namedtuple("Y", ['a','b'])
|
107 |
+
X.__name__ = "X"
|
108 |
+
X.__qualname__ = "X" #XXX: name must 'match' or fails to pickle
|
109 |
+
Xi = X(0,1)
|
110 |
+
Bad = namedtuple("FakeName", ['a','b'])
|
111 |
+
Badi = Bad(0,1)
|
112 |
+
Defaults = namedtuple('Defaults', ['x', 'y'], defaults=[1])
|
113 |
+
Defaultsi = Defaults(2)
|
114 |
+
|
115 |
+
# test namedtuple
|
116 |
+
def test_namedtuple():
|
117 |
+
assert Z is dill.loads(dill.dumps(Z))
|
118 |
+
assert Zi == dill.loads(dill.dumps(Zi))
|
119 |
+
assert X is dill.loads(dill.dumps(X))
|
120 |
+
assert Xi == dill.loads(dill.dumps(Xi))
|
121 |
+
assert Defaults is dill.loads(dill.dumps(Defaults))
|
122 |
+
assert Defaultsi == dill.loads(dill.dumps(Defaultsi))
|
123 |
+
assert Bad is not dill.loads(dill.dumps(Bad))
|
124 |
+
assert Bad._fields == dill.loads(dill.dumps(Bad))._fields
|
125 |
+
assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi)))
|
126 |
+
|
127 |
+
class A:
|
128 |
+
class B(namedtuple("C", ["one", "two"])):
|
129 |
+
'''docstring'''
|
130 |
+
B.__module__ = 'testing'
|
131 |
+
|
132 |
+
a = A()
|
133 |
+
assert dill.copy(a)
|
134 |
+
|
135 |
+
assert dill.copy(A.B).__name__ == 'B'
|
136 |
+
assert dill.copy(A.B).__qualname__.endswith('.<locals>.A.B')
|
137 |
+
assert dill.copy(A.B).__doc__ == 'docstring'
|
138 |
+
assert dill.copy(A.B).__module__ == 'testing'
|
139 |
+
|
140 |
+
from typing import NamedTuple
|
141 |
+
|
142 |
+
def A():
|
143 |
+
class B(NamedTuple):
|
144 |
+
x: int
|
145 |
+
return B
|
146 |
+
|
147 |
+
assert type(dill.copy(A()(8))).__qualname__ == type(A()(8)).__qualname__
|
148 |
+
|
149 |
+
def test_dtype():
|
150 |
+
try:
|
151 |
+
import numpy as np
|
152 |
+
|
153 |
+
dti = np.dtype('int')
|
154 |
+
assert np.dtype == dill.copy(np.dtype)
|
155 |
+
assert dti == dill.copy(dti)
|
156 |
+
except ImportError: pass
|
157 |
+
|
158 |
+
|
159 |
+
def test_array_nested():
|
160 |
+
try:
|
161 |
+
import numpy as np
|
162 |
+
|
163 |
+
x = np.array([1])
|
164 |
+
y = (x,)
|
165 |
+
assert y == dill.copy(y)
|
166 |
+
|
167 |
+
except ImportError: pass
|
168 |
+
|
169 |
+
|
170 |
+
def test_array_subclass():
|
171 |
+
try:
|
172 |
+
import numpy as np
|
173 |
+
|
174 |
+
class TestArray(np.ndarray):
|
175 |
+
def __new__(cls, input_array, color):
|
176 |
+
obj = np.asarray(input_array).view(cls)
|
177 |
+
obj.color = color
|
178 |
+
return obj
|
179 |
+
def __array_finalize__(self, obj):
|
180 |
+
if obj is None:
|
181 |
+
return
|
182 |
+
if isinstance(obj, type(self)):
|
183 |
+
self.color = obj.color
|
184 |
+
def __getnewargs__(self):
|
185 |
+
return np.asarray(self), self.color
|
186 |
+
|
187 |
+
a1 = TestArray(np.zeros(100), color='green')
|
188 |
+
if not dill._dill.IS_PYPY:
|
189 |
+
assert dill.pickles(a1)
|
190 |
+
assert a1.__dict__ == dill.copy(a1).__dict__
|
191 |
+
|
192 |
+
a2 = a1[0:9]
|
193 |
+
if not dill._dill.IS_PYPY:
|
194 |
+
assert dill.pickles(a2)
|
195 |
+
assert a2.__dict__ == dill.copy(a2).__dict__
|
196 |
+
|
197 |
+
class TestArray2(np.ndarray):
|
198 |
+
color = 'blue'
|
199 |
+
|
200 |
+
a3 = TestArray2([1,2,3,4,5])
|
201 |
+
a3.color = 'green'
|
202 |
+
if not dill._dill.IS_PYPY:
|
203 |
+
assert dill.pickles(a3)
|
204 |
+
assert a3.__dict__ == dill.copy(a3).__dict__
|
205 |
+
|
206 |
+
except ImportError: pass
|
207 |
+
|
208 |
+
|
209 |
+
def test_method_decorator():
|
210 |
+
class A(object):
|
211 |
+
@classmethod
|
212 |
+
def test(cls):
|
213 |
+
pass
|
214 |
+
|
215 |
+
a = A()
|
216 |
+
|
217 |
+
res = dill.dumps(a)
|
218 |
+
new_obj = dill.loads(res)
|
219 |
+
new_obj.__class__.test()
|
220 |
+
|
221 |
+
# test slots
|
222 |
+
class Y(object):
|
223 |
+
__slots__ = ('y', '__weakref__')
|
224 |
+
def __init__(self, y):
|
225 |
+
self.y = y
|
226 |
+
|
227 |
+
value = 123
|
228 |
+
y = Y(value)
|
229 |
+
|
230 |
+
class Y2(object):
|
231 |
+
__slots__ = 'y'
|
232 |
+
def __init__(self, y):
|
233 |
+
self.y = y
|
234 |
+
|
235 |
+
def test_slots():
|
236 |
+
assert dill.pickles(Y)
|
237 |
+
assert dill.pickles(y)
|
238 |
+
assert dill.pickles(Y.y)
|
239 |
+
assert dill.copy(y).y == value
|
240 |
+
assert dill.copy(Y2(value)).y == value
|
241 |
+
|
242 |
+
def test_origbases():
|
243 |
+
assert dill.copy(customIntList).__orig_bases__ == customIntList.__orig_bases__
|
244 |
+
|
245 |
+
def test_attr():
|
246 |
+
import attr
|
247 |
+
@attr.s
|
248 |
+
class A:
|
249 |
+
a = attr.ib()
|
250 |
+
|
251 |
+
v = A(1)
|
252 |
+
assert dill.copy(v) == v
|
253 |
+
|
254 |
+
def test_metaclass():
|
255 |
+
class metaclass_with_new(type):
|
256 |
+
def __new__(mcls, name, bases, ns, **kwds):
|
257 |
+
cls = super().__new__(mcls, name, bases, ns, **kwds)
|
258 |
+
assert mcls is not None
|
259 |
+
assert cls.method(mcls)
|
260 |
+
return cls
|
261 |
+
def method(cls, mcls):
|
262 |
+
return isinstance(cls, mcls)
|
263 |
+
|
264 |
+
l = locals()
|
265 |
+
exec("""class subclass_with_new(metaclass=metaclass_with_new):
|
266 |
+
def __new__(cls):
|
267 |
+
self = super().__new__(cls)
|
268 |
+
return self""", None, l)
|
269 |
+
subclass_with_new = l['subclass_with_new']
|
270 |
+
|
271 |
+
assert dill.copy(subclass_with_new())
|
272 |
+
|
273 |
+
def test_enummeta():
|
274 |
+
from http import HTTPStatus
|
275 |
+
import enum
|
276 |
+
assert dill.copy(HTTPStatus.OK) is HTTPStatus.OK
|
277 |
+
assert dill.copy(enum.EnumMeta) is enum.EnumMeta
|
278 |
+
|
279 |
+
def test_inherit(): #NOTE: see issue #612
|
280 |
+
class Foo:
|
281 |
+
w = 0
|
282 |
+
x = 1
|
283 |
+
y = 1.1
|
284 |
+
a = ()
|
285 |
+
b = (1,)
|
286 |
+
n = None
|
287 |
+
|
288 |
+
class Bar(Foo):
|
289 |
+
w = 2
|
290 |
+
x = 1
|
291 |
+
y = 1.1
|
292 |
+
z = 0.2
|
293 |
+
a = ()
|
294 |
+
b = (1,)
|
295 |
+
c = (2,)
|
296 |
+
n = None
|
297 |
+
|
298 |
+
Baz = dill.copy(Bar)
|
299 |
+
|
300 |
+
import platform
|
301 |
+
is_pypy = platform.python_implementation() == 'PyPy'
|
302 |
+
assert Bar.__dict__ == Baz.__dict__
|
303 |
+
# ints
|
304 |
+
assert 'w' in Bar.__dict__ and 'w' in Baz.__dict__
|
305 |
+
assert Bar.__dict__['w'] is Baz.__dict__['w']
|
306 |
+
assert 'x' in Bar.__dict__ and 'x' in Baz.__dict__
|
307 |
+
assert Bar.__dict__['x'] is Baz.__dict__['x']
|
308 |
+
# floats
|
309 |
+
assert 'y' in Bar.__dict__ and 'y' in Baz.__dict__
|
310 |
+
same = Bar.__dict__['y'] is Baz.__dict__['y']
|
311 |
+
assert same if is_pypy else not same
|
312 |
+
assert 'z' in Bar.__dict__ and 'z' in Baz.__dict__
|
313 |
+
same = Bar.__dict__['z'] is Baz.__dict__['z']
|
314 |
+
assert same if is_pypy else not same
|
315 |
+
# tuples
|
316 |
+
assert 'a' in Bar.__dict__ and 'a' in Baz.__dict__
|
317 |
+
assert Bar.__dict__['a'] is Baz.__dict__['a']
|
318 |
+
assert 'b' in Bar.__dict__ and 'b' in Baz.__dict__
|
319 |
+
assert Bar.__dict__['b'] is not Baz.__dict__['b']
|
320 |
+
assert 'c' in Bar.__dict__ and 'c' in Baz.__dict__
|
321 |
+
assert Bar.__dict__['c'] is not Baz.__dict__['c']
|
322 |
+
# None
|
323 |
+
assert 'n' in Bar.__dict__ and 'n' in Baz.__dict__
|
324 |
+
assert Bar.__dict__['n'] is Baz.__dict__['n']
|
325 |
+
|
326 |
+
|
327 |
+
if __name__ == '__main__':
|
328 |
+
test_class_instances()
|
329 |
+
test_class_objects()
|
330 |
+
test_specialtypes()
|
331 |
+
test_namedtuple()
|
332 |
+
test_dtype()
|
333 |
+
test_array_nested()
|
334 |
+
test_array_subclass()
|
335 |
+
test_method_decorator()
|
336 |
+
test_slots()
|
337 |
+
test_origbases()
|
338 |
+
test_metaclass()
|
339 |
+
test_enummeta()
|
340 |
+
test_inherit()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_detect.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
from dill.detect import baditems, badobjects, badtypes, errors, parent, at, globalvars
|
10 |
+
from dill import settings
|
11 |
+
from dill._dill import IS_PYPY
|
12 |
+
from pickle import PicklingError
|
13 |
+
|
14 |
+
import inspect
|
15 |
+
import sys
|
16 |
+
import os
|
17 |
+
|
18 |
+
def test_bad_things():
|
19 |
+
f = inspect.currentframe()
|
20 |
+
assert baditems(f) == [f]
|
21 |
+
#assert baditems(globals()) == [f] #XXX
|
22 |
+
assert badobjects(f) is f
|
23 |
+
assert badtypes(f) == type(f)
|
24 |
+
assert type(errors(f)) is TypeError
|
25 |
+
d = badtypes(f, 1)
|
26 |
+
assert isinstance(d, dict)
|
27 |
+
assert list(badobjects(f, 1).keys()) == list(d.keys())
|
28 |
+
assert list(errors(f, 1).keys()) == list(d.keys())
|
29 |
+
s = set([(err.__class__.__name__,err.args[0]) for err in list(errors(f, 1).values())])
|
30 |
+
a = dict(s)
|
31 |
+
if not os.environ.get('COVERAGE'): #XXX: travis-ci
|
32 |
+
assert len(s) is len(a) # TypeError (and possibly PicklingError)
|
33 |
+
n = 2
|
34 |
+
assert len(a) is n if 'PicklingError' in a.keys() else n-1
|
35 |
+
|
36 |
+
def test_parent():
|
37 |
+
x = [4,5,6,7]
|
38 |
+
listiter = iter(x)
|
39 |
+
obj = parent(listiter, list)
|
40 |
+
assert obj is x
|
41 |
+
|
42 |
+
if IS_PYPY: assert parent(obj, int) is None
|
43 |
+
else: assert parent(obj, int) is x[-1] # python oddly? finds last int
|
44 |
+
assert at(id(at)) is at
|
45 |
+
|
46 |
+
a, b, c = 1, 2, 3
|
47 |
+
|
48 |
+
def squared(x):
|
49 |
+
return a+x**2
|
50 |
+
|
51 |
+
def foo(x):
|
52 |
+
def bar(y):
|
53 |
+
return squared(x)+y
|
54 |
+
return bar
|
55 |
+
|
56 |
+
class _class:
|
57 |
+
def _method(self):
|
58 |
+
pass
|
59 |
+
def ok(self):
|
60 |
+
return True
|
61 |
+
|
62 |
+
def test_globals():
|
63 |
+
def f():
|
64 |
+
a
|
65 |
+
def g():
|
66 |
+
b
|
67 |
+
def h():
|
68 |
+
c
|
69 |
+
assert globalvars(f) == dict(a=1, b=2, c=3)
|
70 |
+
|
71 |
+
res = globalvars(foo, recurse=True)
|
72 |
+
assert set(res) == set(['squared', 'a'])
|
73 |
+
res = globalvars(foo, recurse=False)
|
74 |
+
assert res == {}
|
75 |
+
zap = foo(2)
|
76 |
+
res = globalvars(zap, recurse=True)
|
77 |
+
assert set(res) == set(['squared', 'a'])
|
78 |
+
res = globalvars(zap, recurse=False)
|
79 |
+
assert set(res) == set(['squared'])
|
80 |
+
del zap
|
81 |
+
res = globalvars(squared)
|
82 |
+
assert set(res) == set(['a'])
|
83 |
+
# FIXME: should find referenced __builtins__
|
84 |
+
#res = globalvars(_class, recurse=True)
|
85 |
+
#assert set(res) == set(['True'])
|
86 |
+
#res = globalvars(_class, recurse=False)
|
87 |
+
#assert res == {}
|
88 |
+
#res = globalvars(_class.ok, recurse=True)
|
89 |
+
#assert set(res) == set(['True'])
|
90 |
+
#res = globalvars(_class.ok, recurse=False)
|
91 |
+
#assert set(res) == set(['True'])
|
92 |
+
|
93 |
+
|
94 |
+
#98 dill ignores __getstate__ in interactive lambdas
|
95 |
+
bar = [0]
|
96 |
+
|
97 |
+
class Foo(object):
|
98 |
+
def __init__(self):
|
99 |
+
pass
|
100 |
+
def __getstate__(self):
|
101 |
+
bar[0] = bar[0]+1
|
102 |
+
return {}
|
103 |
+
def __setstate__(self, data):
|
104 |
+
pass
|
105 |
+
|
106 |
+
f = Foo()
|
107 |
+
|
108 |
+
def test_getstate():
|
109 |
+
from dill import dumps, loads
|
110 |
+
dumps(f)
|
111 |
+
b = bar[0]
|
112 |
+
dumps(lambda: f, recurse=False) # doesn't call __getstate__
|
113 |
+
assert bar[0] == b
|
114 |
+
dumps(lambda: f, recurse=True) # calls __getstate__
|
115 |
+
assert bar[0] == b + 1
|
116 |
+
|
117 |
+
#97 serialize lambdas in test files
|
118 |
+
def test_deleted():
|
119 |
+
global sin
|
120 |
+
from dill import dumps, loads
|
121 |
+
from math import sin, pi
|
122 |
+
|
123 |
+
def sinc(x):
|
124 |
+
return sin(x)/x
|
125 |
+
|
126 |
+
settings['recurse'] = True
|
127 |
+
_sinc = dumps(sinc)
|
128 |
+
sin = globals().pop('sin')
|
129 |
+
sin = 1
|
130 |
+
del sin
|
131 |
+
sinc_ = loads(_sinc) # no NameError... pickling preserves 'sin'
|
132 |
+
res = sinc_(1)
|
133 |
+
from math import sin
|
134 |
+
assert sinc(1) == res
|
135 |
+
|
136 |
+
|
137 |
+
def test_lambdify():
|
138 |
+
try:
|
139 |
+
from sympy import symbols, lambdify
|
140 |
+
except ImportError:
|
141 |
+
return
|
142 |
+
settings['recurse'] = True
|
143 |
+
x = symbols("x")
|
144 |
+
y = x**2
|
145 |
+
f = lambdify([x], y)
|
146 |
+
z = min
|
147 |
+
d = globals()
|
148 |
+
globalvars(f, recurse=True, builtin=True)
|
149 |
+
assert z is min
|
150 |
+
assert d is globals()
|
151 |
+
|
152 |
+
|
153 |
+
if __name__ == '__main__':
|
154 |
+
test_bad_things()
|
155 |
+
test_parent()
|
156 |
+
test_globals()
|
157 |
+
test_getstate()
|
158 |
+
test_deleted()
|
159 |
+
test_lambdify()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_dictviews.py
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Author: Anirudh Vegesana ([email protected])
|
5 |
+
# Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import dill
|
10 |
+
from dill._dill import OLD310, MAPPING_PROXY_TRICK, DictProxyType
|
11 |
+
|
12 |
+
def test_dictproxy():
|
13 |
+
assert dill.copy(DictProxyType({'a': 2}))
|
14 |
+
|
15 |
+
def test_dictviews():
|
16 |
+
x = {'a': 1}
|
17 |
+
assert dill.copy(x.keys())
|
18 |
+
assert dill.copy(x.values())
|
19 |
+
assert dill.copy(x.items())
|
20 |
+
|
21 |
+
def test_dictproxy_trick():
|
22 |
+
if not OLD310 and MAPPING_PROXY_TRICK:
|
23 |
+
x = {'a': 1}
|
24 |
+
all_views = (x.values(), x.items(), x.keys(), x)
|
25 |
+
seperate_views = dill.copy(all_views)
|
26 |
+
new_x = seperate_views[-1]
|
27 |
+
new_x['b'] = 2
|
28 |
+
new_x['c'] = 1
|
29 |
+
assert len(new_x) == 3 and len(x) == 1
|
30 |
+
assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1
|
31 |
+
assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1
|
32 |
+
assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1
|
33 |
+
assert dict(all_views[1]) == x
|
34 |
+
assert dict(seperate_views[1]) == new_x
|
35 |
+
|
36 |
+
if __name__ == '__main__':
|
37 |
+
test_dictproxy()
|
38 |
+
test_dictviews()
|
39 |
+
test_dictproxy_trick()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_extendpickle.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import dill as pickle
|
10 |
+
from io import BytesIO as StringIO
|
11 |
+
|
12 |
+
|
13 |
+
def my_fn(x):
|
14 |
+
return x * 17
|
15 |
+
|
16 |
+
|
17 |
+
def test_extend():
|
18 |
+
obj = lambda : my_fn(34)
|
19 |
+
assert obj() == 578
|
20 |
+
|
21 |
+
obj_io = StringIO()
|
22 |
+
pickler = pickle.Pickler(obj_io)
|
23 |
+
pickler.dump(obj)
|
24 |
+
|
25 |
+
obj_str = obj_io.getvalue()
|
26 |
+
|
27 |
+
obj2_io = StringIO(obj_str)
|
28 |
+
unpickler = pickle.Unpickler(obj2_io)
|
29 |
+
obj2 = unpickler.load()
|
30 |
+
|
31 |
+
assert obj2() == 578
|
32 |
+
|
33 |
+
|
34 |
+
def test_isdill():
|
35 |
+
obj_io = StringIO()
|
36 |
+
pickler = pickle.Pickler(obj_io)
|
37 |
+
assert pickle._dill.is_dill(pickler) is True
|
38 |
+
|
39 |
+
pickler = pickle._dill.StockPickler(obj_io)
|
40 |
+
assert pickle._dill.is_dill(pickler) is False
|
41 |
+
|
42 |
+
try:
|
43 |
+
import multiprocess as mp
|
44 |
+
pickler = mp.reduction.ForkingPickler(obj_io)
|
45 |
+
assert pickle._dill.is_dill(pickler, child=True) is True
|
46 |
+
assert pickle._dill.is_dill(pickler, child=False) is False
|
47 |
+
except Exception:
|
48 |
+
pass
|
49 |
+
|
50 |
+
|
51 |
+
if __name__ == '__main__':
|
52 |
+
test_extend()
|
53 |
+
test_isdill()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_fglobals.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2021-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
|
8 |
+
import dill
|
9 |
+
dill.settings['recurse'] = True
|
10 |
+
|
11 |
+
def get_fun_with_strftime():
|
12 |
+
def fun_with_strftime():
|
13 |
+
import datetime
|
14 |
+
return datetime.datetime.strptime("04-01-1943", "%d-%m-%Y").strftime(
|
15 |
+
"%Y-%m-%d %H:%M:%S"
|
16 |
+
)
|
17 |
+
return fun_with_strftime
|
18 |
+
|
19 |
+
|
20 |
+
def get_fun_with_strftime2():
|
21 |
+
import datetime
|
22 |
+
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
23 |
+
|
24 |
+
|
25 |
+
def test_doc_dill_issue_219():
|
26 |
+
back_fn = dill.loads(dill.dumps(get_fun_with_strftime()))
|
27 |
+
assert back_fn() == "1943-01-04 00:00:00"
|
28 |
+
dupl = dill.loads(dill.dumps(get_fun_with_strftime2))
|
29 |
+
assert dupl() == get_fun_with_strftime2()
|
30 |
+
|
31 |
+
|
32 |
+
def get_fun_with_internal_import():
|
33 |
+
def fun_with_import():
|
34 |
+
import re
|
35 |
+
return re.compile("$")
|
36 |
+
return fun_with_import
|
37 |
+
|
38 |
+
|
39 |
+
def test_method_with_internal_import_should_work():
|
40 |
+
import re
|
41 |
+
back_fn = dill.loads(dill.dumps(get_fun_with_internal_import()))
|
42 |
+
import inspect
|
43 |
+
if hasattr(inspect, 'getclosurevars'):
|
44 |
+
vars = inspect.getclosurevars(back_fn)
|
45 |
+
assert vars.globals == {}
|
46 |
+
assert vars.nonlocals == {}
|
47 |
+
assert back_fn() == re.compile("$")
|
48 |
+
assert "__builtins__" in back_fn.__globals__
|
49 |
+
|
50 |
+
|
51 |
+
if __name__ == "__main__":
|
52 |
+
import sys
|
53 |
+
if (sys.version_info[:3] != (3,10,0) or sys.version_info[3] != 'alpha'):
|
54 |
+
test_doc_dill_issue_219()
|
55 |
+
test_method_with_internal_import_should_work()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_functions.py
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2019-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
|
8 |
+
import functools
|
9 |
+
import dill
|
10 |
+
import sys
|
11 |
+
dill.settings['recurse'] = True
|
12 |
+
|
13 |
+
|
14 |
+
def function_a(a):
|
15 |
+
return a
|
16 |
+
|
17 |
+
|
18 |
+
def function_b(b, b1):
|
19 |
+
return b + b1
|
20 |
+
|
21 |
+
|
22 |
+
def function_c(c, c1=1):
|
23 |
+
return c + c1
|
24 |
+
|
25 |
+
|
26 |
+
def function_d(d, d1, d2=1):
|
27 |
+
"""doc string"""
|
28 |
+
return d + d1 + d2
|
29 |
+
|
30 |
+
function_d.__module__ = 'a module'
|
31 |
+
|
32 |
+
|
33 |
+
exec('''
|
34 |
+
def function_e(e, *e1, e2=1, e3=2):
|
35 |
+
return e + sum(e1) + e2 + e3''')
|
36 |
+
|
37 |
+
globalvar = 0
|
38 |
+
|
39 |
+
@functools.lru_cache(None)
|
40 |
+
def function_with_cache(x):
|
41 |
+
global globalvar
|
42 |
+
globalvar += x
|
43 |
+
return globalvar
|
44 |
+
|
45 |
+
|
46 |
+
def function_with_unassigned_variable():
|
47 |
+
if False:
|
48 |
+
value = None
|
49 |
+
return (lambda: value)
|
50 |
+
|
51 |
+
|
52 |
+
def test_issue_510():
|
53 |
+
# A very bizzare use of functions and methods that pickle doesn't get
|
54 |
+
# correctly for odd reasons.
|
55 |
+
class Foo:
|
56 |
+
def __init__(self):
|
57 |
+
def f2(self):
|
58 |
+
return self
|
59 |
+
self.f2 = f2.__get__(self)
|
60 |
+
|
61 |
+
import dill, pickletools
|
62 |
+
f = Foo()
|
63 |
+
f1 = dill.copy(f)
|
64 |
+
assert f1.f2() is f1
|
65 |
+
|
66 |
+
|
67 |
+
def test_functions():
|
68 |
+
dumped_func_a = dill.dumps(function_a)
|
69 |
+
assert dill.loads(dumped_func_a)(0) == 0
|
70 |
+
|
71 |
+
dumped_func_b = dill.dumps(function_b)
|
72 |
+
assert dill.loads(dumped_func_b)(1,2) == 3
|
73 |
+
|
74 |
+
dumped_func_c = dill.dumps(function_c)
|
75 |
+
assert dill.loads(dumped_func_c)(1) == 2
|
76 |
+
assert dill.loads(dumped_func_c)(1, 2) == 3
|
77 |
+
|
78 |
+
dumped_func_d = dill.dumps(function_d)
|
79 |
+
assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__
|
80 |
+
assert dill.loads(dumped_func_d).__module__ == function_d.__module__
|
81 |
+
assert dill.loads(dumped_func_d)(1, 2) == 4
|
82 |
+
assert dill.loads(dumped_func_d)(1, 2, 3) == 6
|
83 |
+
assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6
|
84 |
+
|
85 |
+
function_with_cache(1)
|
86 |
+
globalvar = 0
|
87 |
+
dumped_func_cache = dill.dumps(function_with_cache)
|
88 |
+
assert function_with_cache(2) == 3
|
89 |
+
assert function_with_cache(1) == 1
|
90 |
+
assert function_with_cache(3) == 6
|
91 |
+
assert function_with_cache(2) == 3
|
92 |
+
|
93 |
+
empty_cell = function_with_unassigned_variable()
|
94 |
+
cell_copy = dill.loads(dill.dumps(empty_cell))
|
95 |
+
assert 'empty' in str(cell_copy.__closure__[0])
|
96 |
+
try:
|
97 |
+
cell_copy()
|
98 |
+
except Exception:
|
99 |
+
# this is good
|
100 |
+
pass
|
101 |
+
else:
|
102 |
+
raise AssertionError('cell_copy() did not read an empty cell')
|
103 |
+
|
104 |
+
exec('''
|
105 |
+
dumped_func_e = dill.dumps(function_e)
|
106 |
+
assert dill.loads(dumped_func_e)(1, 2) == 6
|
107 |
+
assert dill.loads(dumped_func_e)(1, 2, 3) == 9
|
108 |
+
assert dill.loads(dumped_func_e)(1, 2, e2=3) == 8
|
109 |
+
assert dill.loads(dumped_func_e)(1, 2, e2=3, e3=4) == 10
|
110 |
+
assert dill.loads(dumped_func_e)(1, 2, 3, e2=4) == 12
|
111 |
+
assert dill.loads(dumped_func_e)(1, 2, 3, e2=4, e3=5) == 15''')
|
112 |
+
|
113 |
+
def test_code_object():
|
114 |
+
import warnings
|
115 |
+
from dill._dill import ALL_CODE_PARAMS, CODE_PARAMS, CODE_VERSION, _create_code
|
116 |
+
code = function_c.__code__
|
117 |
+
warnings.filterwarnings('ignore', category=DeprecationWarning) # issue 597
|
118 |
+
LNOTAB = getattr(code, 'co_lnotab', b'')
|
119 |
+
if warnings.filters: del warnings.filters[0]
|
120 |
+
fields = {f: getattr(code, 'co_'+f) for f in CODE_PARAMS}
|
121 |
+
fields.setdefault('posonlyargcount', 0) # python >= 3.8
|
122 |
+
fields.setdefault('lnotab', LNOTAB) # python <= 3.9
|
123 |
+
fields.setdefault('linetable', b'') # python >= 3.10
|
124 |
+
fields.setdefault('qualname', fields['name']) # python >= 3.11
|
125 |
+
fields.setdefault('exceptiontable', b'') # python >= 3.11
|
126 |
+
fields.setdefault('endlinetable', None) # python == 3.11a
|
127 |
+
fields.setdefault('columntable', None) # python == 3.11a
|
128 |
+
|
129 |
+
for version, _, params in ALL_CODE_PARAMS:
|
130 |
+
args = tuple(fields[p] for p in params.split())
|
131 |
+
try:
|
132 |
+
_create_code(*args)
|
133 |
+
if version >= (3,10):
|
134 |
+
_create_code(fields['lnotab'], *args)
|
135 |
+
except Exception as error:
|
136 |
+
raise Exception("failed to construct code object with format version {}".format(version)) from error
|
137 |
+
|
138 |
+
if __name__ == '__main__':
|
139 |
+
test_functions()
|
140 |
+
test_issue_510()
|
141 |
+
test_code_object()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_functors.py
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import functools
|
10 |
+
import dill
|
11 |
+
dill.settings['recurse'] = True
|
12 |
+
|
13 |
+
|
14 |
+
def f(a, b, c): # without keywords
|
15 |
+
pass
|
16 |
+
|
17 |
+
|
18 |
+
def g(a, b, c=2): # with keywords
|
19 |
+
pass
|
20 |
+
|
21 |
+
|
22 |
+
def h(a=1, b=2, c=3): # without args
|
23 |
+
pass
|
24 |
+
|
25 |
+
|
26 |
+
def test_functools():
|
27 |
+
fp = functools.partial(f, 1, 2)
|
28 |
+
gp = functools.partial(g, 1, c=2)
|
29 |
+
hp = functools.partial(h, 1, c=2)
|
30 |
+
bp = functools.partial(int, base=2)
|
31 |
+
|
32 |
+
assert dill.pickles(fp, safe=True)
|
33 |
+
assert dill.pickles(gp, safe=True)
|
34 |
+
assert dill.pickles(hp, safe=True)
|
35 |
+
assert dill.pickles(bp, safe=True)
|
36 |
+
|
37 |
+
|
38 |
+
if __name__ == '__main__':
|
39 |
+
test_functools()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_logger.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
|
3 |
+
# Author: Leonardo Gama (@leogama)
|
4 |
+
# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
|
8 |
+
import logging
|
9 |
+
import re
|
10 |
+
import tempfile
|
11 |
+
|
12 |
+
import dill
|
13 |
+
from dill import detect
|
14 |
+
from dill.logger import stderr_handler, adapter as logger
|
15 |
+
|
16 |
+
try:
|
17 |
+
from StringIO import StringIO
|
18 |
+
except ImportError:
|
19 |
+
from io import StringIO
|
20 |
+
|
21 |
+
test_obj = {'a': (1, 2), 'b': object(), 'f': lambda x: x**2, 'big': list(range(10))}
|
22 |
+
|
23 |
+
def test_logging(should_trace):
|
24 |
+
buffer = StringIO()
|
25 |
+
handler = logging.StreamHandler(buffer)
|
26 |
+
logger.addHandler(handler)
|
27 |
+
try:
|
28 |
+
dill.dumps(test_obj)
|
29 |
+
if should_trace:
|
30 |
+
regex = re.compile(r'(\S*┬ \w.*[^)]' # begin pickling object
|
31 |
+
r'|│*└ # \w.* \[\d+ (\wi)?B])' # object written (with size)
|
32 |
+
)
|
33 |
+
for line in buffer.getvalue().splitlines():
|
34 |
+
assert regex.fullmatch(line)
|
35 |
+
return buffer.getvalue()
|
36 |
+
else:
|
37 |
+
assert buffer.getvalue() == ""
|
38 |
+
finally:
|
39 |
+
logger.removeHandler(handler)
|
40 |
+
buffer.close()
|
41 |
+
|
42 |
+
def test_trace_to_file(stream_trace):
|
43 |
+
file = tempfile.NamedTemporaryFile(mode='r')
|
44 |
+
with detect.trace(file.name, mode='w'):
|
45 |
+
dill.dumps(test_obj)
|
46 |
+
file_trace = file.read()
|
47 |
+
file.close()
|
48 |
+
# Apparently, objects can change location in memory...
|
49 |
+
reghex = re.compile(r'0x[0-9A-Za-z]+')
|
50 |
+
file_trace, stream_trace = reghex.sub('0x', file_trace), reghex.sub('0x', stream_trace)
|
51 |
+
# PyPy prints dictionary contents with repr(dict)...
|
52 |
+
regdict = re.compile(r'(dict\.__repr__ of ).*')
|
53 |
+
file_trace, stream_trace = regdict.sub(r'\1{}>', file_trace), regdict.sub(r'\1{}>', stream_trace)
|
54 |
+
assert file_trace == stream_trace
|
55 |
+
|
56 |
+
if __name__ == '__main__':
|
57 |
+
logger.removeHandler(stderr_handler)
|
58 |
+
test_logging(should_trace=False)
|
59 |
+
detect.trace(True)
|
60 |
+
test_logging(should_trace=True)
|
61 |
+
detect.trace(False)
|
62 |
+
test_logging(should_trace=False)
|
63 |
+
|
64 |
+
loglevel = logging.ERROR
|
65 |
+
logger.setLevel(loglevel)
|
66 |
+
with detect.trace():
|
67 |
+
stream_trace = test_logging(should_trace=True)
|
68 |
+
test_logging(should_trace=False)
|
69 |
+
assert logger.getEffectiveLevel() == loglevel
|
70 |
+
test_trace_to_file(stream_trace)
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_mixins.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import dill
|
10 |
+
dill.settings['recurse'] = True
|
11 |
+
|
12 |
+
|
13 |
+
def wtf(x,y,z):
|
14 |
+
def zzz():
|
15 |
+
return x
|
16 |
+
def yyy():
|
17 |
+
return y
|
18 |
+
def xxx():
|
19 |
+
return z
|
20 |
+
return zzz,yyy
|
21 |
+
|
22 |
+
|
23 |
+
def quad(a=1, b=1, c=0):
|
24 |
+
inverted = [False]
|
25 |
+
def invert():
|
26 |
+
inverted[0] = not inverted[0]
|
27 |
+
def dec(f):
|
28 |
+
def func(*args, **kwds):
|
29 |
+
x = f(*args, **kwds)
|
30 |
+
if inverted[0]: x = -x
|
31 |
+
return a*x**2 + b*x + c
|
32 |
+
func.__wrapped__ = f
|
33 |
+
func.invert = invert
|
34 |
+
func.inverted = inverted
|
35 |
+
return func
|
36 |
+
return dec
|
37 |
+
|
38 |
+
|
39 |
+
@quad(a=0,b=2)
|
40 |
+
def double_add(*args):
|
41 |
+
return sum(args)
|
42 |
+
|
43 |
+
|
44 |
+
fx = sum([1,2,3])
|
45 |
+
|
46 |
+
|
47 |
+
### to make it interesting...
|
48 |
+
def quad_factory(a=1,b=1,c=0):
|
49 |
+
def dec(f):
|
50 |
+
def func(*args,**kwds):
|
51 |
+
fx = f(*args,**kwds)
|
52 |
+
return a*fx**2 + b*fx + c
|
53 |
+
return func
|
54 |
+
return dec
|
55 |
+
|
56 |
+
|
57 |
+
@quad_factory(a=0,b=4,c=0)
|
58 |
+
def quadish(x):
|
59 |
+
return x+1
|
60 |
+
|
61 |
+
|
62 |
+
quadratic = quad_factory()
|
63 |
+
|
64 |
+
|
65 |
+
def doubler(f):
|
66 |
+
def inner(*args, **kwds):
|
67 |
+
fx = f(*args, **kwds)
|
68 |
+
return 2*fx
|
69 |
+
return inner
|
70 |
+
|
71 |
+
|
72 |
+
@doubler
|
73 |
+
def quadruple(x):
|
74 |
+
return 2*x
|
75 |
+
|
76 |
+
|
77 |
+
def test_mixins():
|
78 |
+
# test mixins
|
79 |
+
assert double_add(1,2,3) == 2*fx
|
80 |
+
double_add.invert()
|
81 |
+
assert double_add(1,2,3) == -2*fx
|
82 |
+
|
83 |
+
_d = dill.copy(double_add)
|
84 |
+
assert _d(1,2,3) == -2*fx
|
85 |
+
#_d.invert() #FIXME: fails seemingly randomly
|
86 |
+
#assert _d(1,2,3) == 2*fx
|
87 |
+
|
88 |
+
assert _d.__wrapped__(1,2,3) == fx
|
89 |
+
|
90 |
+
# XXX: issue or feature? in python3.4, inverted is linked through copy
|
91 |
+
if not double_add.inverted[0]:
|
92 |
+
double_add.invert()
|
93 |
+
|
94 |
+
# test some stuff from source and pointers
|
95 |
+
ds = dill.source
|
96 |
+
dd = dill.detect
|
97 |
+
assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
|
98 |
+
assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n'
|
99 |
+
assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__
|
100 |
+
assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__
|
101 |
+
assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__
|
102 |
+
assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__
|
103 |
+
assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n'
|
104 |
+
#***** #FIXME: this needs work
|
105 |
+
result = ds.importable(quadish, source=True)
|
106 |
+
a,b,c,_,result = result.split('\n',4)
|
107 |
+
assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
|
108 |
+
assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4'])
|
109 |
+
result = ds.importable(quadratic, source=True)
|
110 |
+
a,b,c,result = result.split('\n',3)
|
111 |
+
assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
|
112 |
+
assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1'])
|
113 |
+
result = ds.importable(double_add, source=True)
|
114 |
+
a,b,c,d,_,result = result.split('\n',5)
|
115 |
+
assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n'
|
116 |
+
assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]'])
|
117 |
+
#*****
|
118 |
+
|
119 |
+
|
120 |
+
if __name__ == '__main__':
|
121 |
+
test_mixins()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_moduledict.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import dill
|
10 |
+
dill.settings['recurse'] = True
|
11 |
+
|
12 |
+
def f(func):
|
13 |
+
def w(*args):
|
14 |
+
return f(*args)
|
15 |
+
return w
|
16 |
+
|
17 |
+
@f
|
18 |
+
def f2(): pass
|
19 |
+
|
20 |
+
# check when __main__ and on import
|
21 |
+
def test_decorated():
|
22 |
+
assert dill.pickles(f2)
|
23 |
+
|
24 |
+
|
25 |
+
import doctest
|
26 |
+
import logging
|
27 |
+
logging.basicConfig(level=logging.DEBUG)
|
28 |
+
|
29 |
+
class SomeUnreferencedUnpicklableClass(object):
|
30 |
+
def __reduce__(self):
|
31 |
+
raise Exception
|
32 |
+
|
33 |
+
unpicklable = SomeUnreferencedUnpicklableClass()
|
34 |
+
|
35 |
+
# This works fine outside of Doctest:
|
36 |
+
def test_normal():
|
37 |
+
serialized = dill.dumps(lambda x: x)
|
38 |
+
|
39 |
+
# should not try to pickle unpicklable object in __globals__
|
40 |
+
def tests():
|
41 |
+
"""
|
42 |
+
>>> serialized = dill.dumps(lambda x: x)
|
43 |
+
"""
|
44 |
+
return
|
45 |
+
|
46 |
+
#print("\n\nRunning Doctest:")
|
47 |
+
def test_doctest():
|
48 |
+
doctest.testmod()
|
49 |
+
|
50 |
+
|
51 |
+
if __name__ == '__main__':
|
52 |
+
test_decorated()
|
53 |
+
test_normal()
|
54 |
+
test_doctest()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_nested.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
test dill's ability to handle nested functions
|
10 |
+
"""
|
11 |
+
|
12 |
+
import os
|
13 |
+
import math
|
14 |
+
|
15 |
+
import dill as pickle
|
16 |
+
pickle.settings['recurse'] = True
|
17 |
+
|
18 |
+
|
19 |
+
# the nested function: pickle should fail here, but dill is ok.
|
20 |
+
def adder(augend):
|
21 |
+
zero = [0]
|
22 |
+
|
23 |
+
def inner(addend):
|
24 |
+
return addend + augend + zero[0]
|
25 |
+
return inner
|
26 |
+
|
27 |
+
|
28 |
+
# rewrite the nested function using a class: standard pickle should work here.
|
29 |
+
class cadder(object):
|
30 |
+
def __init__(self, augend):
|
31 |
+
self.augend = augend
|
32 |
+
self.zero = [0]
|
33 |
+
|
34 |
+
def __call__(self, addend):
|
35 |
+
return addend + self.augend + self.zero[0]
|
36 |
+
|
37 |
+
|
38 |
+
# rewrite again, but as an old-style class
|
39 |
+
class c2adder:
|
40 |
+
def __init__(self, augend):
|
41 |
+
self.augend = augend
|
42 |
+
self.zero = [0]
|
43 |
+
|
44 |
+
def __call__(self, addend):
|
45 |
+
return addend + self.augend + self.zero[0]
|
46 |
+
|
47 |
+
|
48 |
+
# some basic class stuff
|
49 |
+
class basic(object):
|
50 |
+
pass
|
51 |
+
|
52 |
+
|
53 |
+
class basic2:
|
54 |
+
pass
|
55 |
+
|
56 |
+
|
57 |
+
x = 5
|
58 |
+
y = 1
|
59 |
+
|
60 |
+
|
61 |
+
def test_basic():
|
62 |
+
a = [0, 1, 2]
|
63 |
+
pa = pickle.dumps(a)
|
64 |
+
pmath = pickle.dumps(math) #XXX: FAILS in pickle
|
65 |
+
pmap = pickle.dumps(map)
|
66 |
+
# ...
|
67 |
+
la = pickle.loads(pa)
|
68 |
+
lmath = pickle.loads(pmath)
|
69 |
+
lmap = pickle.loads(pmap)
|
70 |
+
assert list(map(math.sin, a)) == list(lmap(lmath.sin, la))
|
71 |
+
|
72 |
+
|
73 |
+
def test_basic_class():
|
74 |
+
pbasic2 = pickle.dumps(basic2)
|
75 |
+
_pbasic2 = pickle.loads(pbasic2)()
|
76 |
+
pbasic = pickle.dumps(basic)
|
77 |
+
_pbasic = pickle.loads(pbasic)()
|
78 |
+
|
79 |
+
|
80 |
+
def test_c2adder():
|
81 |
+
pc2adder = pickle.dumps(c2adder)
|
82 |
+
pc2add5 = pickle.loads(pc2adder)(x)
|
83 |
+
assert pc2add5(y) == x+y
|
84 |
+
|
85 |
+
|
86 |
+
def test_pickled_cadder():
|
87 |
+
pcadder = pickle.dumps(cadder)
|
88 |
+
pcadd5 = pickle.loads(pcadder)(x)
|
89 |
+
assert pcadd5(y) == x+y
|
90 |
+
|
91 |
+
|
92 |
+
def test_raw_adder_and_inner():
|
93 |
+
add5 = adder(x)
|
94 |
+
assert add5(y) == x+y
|
95 |
+
|
96 |
+
|
97 |
+
def test_pickled_adder():
|
98 |
+
padder = pickle.dumps(adder)
|
99 |
+
padd5 = pickle.loads(padder)(x)
|
100 |
+
assert padd5(y) == x+y
|
101 |
+
|
102 |
+
|
103 |
+
def test_pickled_inner():
|
104 |
+
add5 = adder(x)
|
105 |
+
pinner = pickle.dumps(add5) #XXX: FAILS in pickle
|
106 |
+
p5add = pickle.loads(pinner)
|
107 |
+
assert p5add(y) == x+y
|
108 |
+
|
109 |
+
|
110 |
+
def test_moduledict_where_not_main():
|
111 |
+
try:
|
112 |
+
from . import test_moduledict
|
113 |
+
except ImportError:
|
114 |
+
import test_moduledict
|
115 |
+
name = 'test_moduledict.py'
|
116 |
+
if os.path.exists(name) and os.path.exists(name+'c'):
|
117 |
+
os.remove(name+'c')
|
118 |
+
|
119 |
+
if os.path.exists(name) and hasattr(test_moduledict, "__cached__") \
|
120 |
+
and os.path.exists(test_moduledict.__cached__):
|
121 |
+
os.remove(getattr(test_moduledict, "__cached__"))
|
122 |
+
|
123 |
+
if os.path.exists("__pycache__") and not os.listdir("__pycache__"):
|
124 |
+
os.removedirs("__pycache__")
|
125 |
+
|
126 |
+
|
127 |
+
if __name__ == '__main__':
|
128 |
+
test_basic()
|
129 |
+
test_basic_class()
|
130 |
+
test_c2adder()
|
131 |
+
test_pickled_cadder()
|
132 |
+
test_raw_adder_and_inner()
|
133 |
+
test_pickled_adder()
|
134 |
+
test_pickled_inner()
|
135 |
+
test_moduledict_where_not_main()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_pycapsule.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Author: Anirudh Vegesana ([email protected])
|
5 |
+
# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
test pickling a PyCapsule object
|
10 |
+
"""
|
11 |
+
|
12 |
+
import dill
|
13 |
+
import warnings
|
14 |
+
|
15 |
+
test_pycapsule = None
|
16 |
+
|
17 |
+
if dill._dill._testcapsule is not None:
|
18 |
+
import ctypes
|
19 |
+
def test_pycapsule():
|
20 |
+
name = ctypes.create_string_buffer(b'dill._testcapsule')
|
21 |
+
capsule = dill._dill._PyCapsule_New(
|
22 |
+
ctypes.cast(dill._dill._PyCapsule_New, ctypes.c_void_p),
|
23 |
+
name,
|
24 |
+
None
|
25 |
+
)
|
26 |
+
with warnings.catch_warnings():
|
27 |
+
warnings.simplefilter("ignore")
|
28 |
+
dill.copy(capsule)
|
29 |
+
dill._testcapsule = capsule
|
30 |
+
with warnings.catch_warnings():
|
31 |
+
warnings.simplefilter("ignore")
|
32 |
+
dill.copy(capsule)
|
33 |
+
dill._testcapsule = None
|
34 |
+
try:
|
35 |
+
with warnings.catch_warnings():
|
36 |
+
warnings.simplefilter("ignore", dill.PicklingWarning)
|
37 |
+
dill.copy(capsule)
|
38 |
+
except dill.UnpicklingError:
|
39 |
+
pass
|
40 |
+
else:
|
41 |
+
raise AssertionError("Expected a different error")
|
42 |
+
|
43 |
+
if __name__ == '__main__':
|
44 |
+
if test_pycapsule is not None:
|
45 |
+
test_pycapsule()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_registered.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
"""
|
8 |
+
test pickling registered objects
|
9 |
+
"""
|
10 |
+
|
11 |
+
import dill
|
12 |
+
from dill._objects import failures, registered, succeeds
|
13 |
+
import warnings
|
14 |
+
warnings.filterwarnings('ignore')
|
15 |
+
|
16 |
+
def check(d, ok=True):
|
17 |
+
res = []
|
18 |
+
for k,v in d.items():
|
19 |
+
try:
|
20 |
+
z = dill.copy(v)
|
21 |
+
if ok: res.append(k)
|
22 |
+
except:
|
23 |
+
if not ok: res.append(k)
|
24 |
+
return res
|
25 |
+
|
26 |
+
fails = check(failures)
|
27 |
+
try:
|
28 |
+
assert not bool(fails)
|
29 |
+
except AssertionError as e:
|
30 |
+
print("FAILS: %s" % fails)
|
31 |
+
raise e from None
|
32 |
+
|
33 |
+
register = check(registered, ok=False)
|
34 |
+
try:
|
35 |
+
assert not bool(register)
|
36 |
+
except AssertionError as e:
|
37 |
+
print("REGISTER: %s" % register)
|
38 |
+
raise e from None
|
39 |
+
|
40 |
+
success = check(succeeds, ok=False)
|
41 |
+
try:
|
42 |
+
assert not bool(success)
|
43 |
+
except AssertionError as e:
|
44 |
+
print("SUCCESS: %s" % success)
|
45 |
+
raise e from None
|
46 |
+
|
47 |
+
import builtins
|
48 |
+
import types
|
49 |
+
q = dill._dill._reverse_typemap
|
50 |
+
p = {k:v for k,v in q.items() if k not in vars(builtins) and k not in vars(types)}
|
51 |
+
|
52 |
+
diff = set(p.keys()).difference(registered.keys())
|
53 |
+
try:
|
54 |
+
assert not bool(diff)
|
55 |
+
except AssertionError as e:
|
56 |
+
print("DIFF: %s" % diff)
|
57 |
+
raise e from None
|
58 |
+
|
59 |
+
miss = set(registered.keys()).difference(p.keys())
|
60 |
+
try:
|
61 |
+
assert not bool(miss)
|
62 |
+
except AssertionError as e:
|
63 |
+
print("MISS: %s" % miss)
|
64 |
+
raise e from None
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_restricted.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Kirill Makhonin (@kirillmakhonin)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
import dill
|
10 |
+
|
11 |
+
class RestrictedType:
|
12 |
+
def __bool__(*args, **kwargs):
|
13 |
+
raise Exception('Restricted function')
|
14 |
+
|
15 |
+
__eq__ = __lt__ = __le__ = __ne__ = __gt__ = __ge__ = __hash__ = __bool__
|
16 |
+
|
17 |
+
glob_obj = RestrictedType()
|
18 |
+
|
19 |
+
def restricted_func():
|
20 |
+
a = glob_obj
|
21 |
+
|
22 |
+
def test_function_with_restricted_object():
|
23 |
+
deserialized = dill.loads(dill.dumps(restricted_func, recurse=True))
|
24 |
+
|
25 |
+
|
26 |
+
if __name__ == '__main__':
|
27 |
+
test_function_with_restricted_object()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_selected.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
"""
|
9 |
+
testing some selected object types
|
10 |
+
"""
|
11 |
+
|
12 |
+
import dill
|
13 |
+
dill.settings['recurse'] = True
|
14 |
+
|
15 |
+
verbose = False
|
16 |
+
|
17 |
+
def test_dict_contents():
|
18 |
+
c = type.__dict__
|
19 |
+
for i,j in c.items():
|
20 |
+
#try:
|
21 |
+
ok = dill.pickles(j)
|
22 |
+
#except Exception:
|
23 |
+
# print ("FAIL: %s with %s" % (i, dill.detect.errors(j)))
|
24 |
+
if verbose: print ("%s: %s, %s" % (ok, type(j), j))
|
25 |
+
assert ok
|
26 |
+
if verbose: print ("")
|
27 |
+
|
28 |
+
def _g(x): yield x;
|
29 |
+
|
30 |
+
def _f():
|
31 |
+
try: raise
|
32 |
+
except Exception:
|
33 |
+
from sys import exc_info
|
34 |
+
e, er, tb = exc_info()
|
35 |
+
return er, tb
|
36 |
+
|
37 |
+
class _d(object):
|
38 |
+
def _method(self):
|
39 |
+
pass
|
40 |
+
|
41 |
+
from dill import objects
|
42 |
+
from dill import load_types
|
43 |
+
load_types(pickleable=True,unpickleable=False)
|
44 |
+
_newclass = objects['ClassObjectType']
|
45 |
+
# some clean-up #FIXME: should happen internal to dill
|
46 |
+
objects['TemporaryFileType'].close()
|
47 |
+
objects['TextWrapperType'].close()
|
48 |
+
objects['BufferedRandomType'].close()
|
49 |
+
objects['BufferedReaderType'].close()
|
50 |
+
objects['BufferedWriterType'].close()
|
51 |
+
objects['FileType'].close()
|
52 |
+
del objects
|
53 |
+
|
54 |
+
# getset_descriptor for new-style classes (fails on '_method', if not __main__)
|
55 |
+
def test_class_descriptors():
|
56 |
+
d = _d.__dict__
|
57 |
+
for i in d.values():
|
58 |
+
ok = dill.pickles(i)
|
59 |
+
if verbose: print ("%s: %s, %s" % (ok, type(i), i))
|
60 |
+
assert ok
|
61 |
+
if verbose: print ("")
|
62 |
+
od = _newclass.__dict__
|
63 |
+
for i in od.values():
|
64 |
+
ok = dill.pickles(i)
|
65 |
+
if verbose: print ("%s: %s, %s" % (ok, type(i), i))
|
66 |
+
assert ok
|
67 |
+
if verbose: print ("")
|
68 |
+
|
69 |
+
# (__main__) class instance for new-style classes
|
70 |
+
def test_class():
|
71 |
+
o = _d()
|
72 |
+
oo = _newclass()
|
73 |
+
ok = dill.pickles(o)
|
74 |
+
if verbose: print ("%s: %s, %s" % (ok, type(o), o))
|
75 |
+
assert ok
|
76 |
+
ok = dill.pickles(oo)
|
77 |
+
if verbose: print ("%s: %s, %s" % (ok, type(oo), oo))
|
78 |
+
assert ok
|
79 |
+
if verbose: print ("")
|
80 |
+
|
81 |
+
# frames, generators, and tracebacks (all depend on frame)
|
82 |
+
def test_frame_related():
|
83 |
+
g = _g(1)
|
84 |
+
f = g.gi_frame
|
85 |
+
e,t = _f()
|
86 |
+
_is = lambda ok: ok
|
87 |
+
ok = dill.pickles(f)
|
88 |
+
if verbose: print ("%s: %s, %s" % (ok, type(f), f))
|
89 |
+
assert not ok
|
90 |
+
ok = dill.pickles(g)
|
91 |
+
if verbose: print ("%s: %s, %s" % (ok, type(g), g))
|
92 |
+
assert _is(not ok) #XXX: dill fails
|
93 |
+
ok = dill.pickles(t)
|
94 |
+
if verbose: print ("%s: %s, %s" % (ok, type(t), t))
|
95 |
+
assert not ok #XXX: dill fails
|
96 |
+
ok = dill.pickles(e)
|
97 |
+
if verbose: print ("%s: %s, %s" % (ok, type(e), e))
|
98 |
+
assert ok
|
99 |
+
if verbose: print ("")
|
100 |
+
|
101 |
+
def test_typing():
|
102 |
+
import typing
|
103 |
+
x = typing.Any
|
104 |
+
assert x == dill.copy(x)
|
105 |
+
x = typing.Dict[int, str]
|
106 |
+
assert x == dill.copy(x)
|
107 |
+
x = typing.List[int]
|
108 |
+
assert x == dill.copy(x)
|
109 |
+
x = typing.Tuple[int, str]
|
110 |
+
assert x == dill.copy(x)
|
111 |
+
x = typing.Tuple[int]
|
112 |
+
assert x == dill.copy(x)
|
113 |
+
x = typing.Tuple[()]
|
114 |
+
assert x == dill.copy(x)
|
115 |
+
x = typing.Tuple[()].copy_with(())
|
116 |
+
assert x == dill.copy(x)
|
117 |
+
return
|
118 |
+
|
119 |
+
|
120 |
+
if __name__ == '__main__':
|
121 |
+
test_frame_related()
|
122 |
+
test_dict_contents()
|
123 |
+
test_class()
|
124 |
+
test_class_descriptors()
|
125 |
+
test_typing()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_session.py
ADDED
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
|
3 |
+
# Author: Leonardo Gama (@leogama)
|
4 |
+
# Copyright (c) 2022-2024 The Uncertainty Quantification Foundation.
|
5 |
+
# License: 3-clause BSD. The full license text is available at:
|
6 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
7 |
+
|
8 |
+
import atexit
|
9 |
+
import os
|
10 |
+
import sys
|
11 |
+
import __main__
|
12 |
+
from contextlib import suppress
|
13 |
+
from io import BytesIO
|
14 |
+
|
15 |
+
import dill
|
16 |
+
|
17 |
+
session_file = os.path.join(os.path.dirname(__file__), 'session-refimported-%s.pkl')
|
18 |
+
|
19 |
+
###################
|
20 |
+
# Child process #
|
21 |
+
###################
|
22 |
+
|
23 |
+
def _error_line(error, obj, refimported):
|
24 |
+
import traceback
|
25 |
+
line = traceback.format_exc().splitlines()[-2].replace('[obj]', '['+repr(obj)+']')
|
26 |
+
return "while testing (with refimported=%s): %s" % (refimported, line.lstrip())
|
27 |
+
|
28 |
+
if __name__ == '__main__' and len(sys.argv) >= 3 and sys.argv[1] == '--child':
|
29 |
+
# Test session loading in a fresh interpreter session.
|
30 |
+
refimported = (sys.argv[2] == 'True')
|
31 |
+
dill.load_module(session_file % refimported, module='__main__')
|
32 |
+
|
33 |
+
def test_modules(refimported):
|
34 |
+
# FIXME: In this test setting with CPython 3.7, 'calendar' is not included
|
35 |
+
# in sys.modules, independent of the value of refimported. Tried to
|
36 |
+
# run garbage collection just before loading the session with no luck. It
|
37 |
+
# fails even when preceding them with 'import calendar'. Needed to run
|
38 |
+
# these kinds of tests in a supbrocess. Failing test sample:
|
39 |
+
# assert globals()['day_name'] is sys.modules['calendar'].__dict__['day_name']
|
40 |
+
try:
|
41 |
+
for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
|
42 |
+
assert globals()[obj].__name__ in sys.modules
|
43 |
+
assert 'calendar' in sys.modules and 'cmath' in sys.modules
|
44 |
+
import calendar, cmath
|
45 |
+
|
46 |
+
for obj in ('Calendar', 'isleap'):
|
47 |
+
assert globals()[obj] is sys.modules['calendar'].__dict__[obj]
|
48 |
+
assert __main__.day_name.__module__ == 'calendar'
|
49 |
+
if refimported:
|
50 |
+
assert __main__.day_name is calendar.day_name
|
51 |
+
|
52 |
+
assert __main__.complex_log is cmath.log
|
53 |
+
|
54 |
+
except AssertionError as error:
|
55 |
+
error.args = (_error_line(error, obj, refimported),)
|
56 |
+
raise
|
57 |
+
|
58 |
+
test_modules(refimported)
|
59 |
+
sys.exit()
|
60 |
+
|
61 |
+
####################
|
62 |
+
# Parent process #
|
63 |
+
####################
|
64 |
+
|
65 |
+
# Create various kinds of objects to test different internal logics.
|
66 |
+
|
67 |
+
## Modules.
|
68 |
+
import json # top-level module
|
69 |
+
import urllib as url # top-level module under alias
|
70 |
+
from xml import sax # submodule
|
71 |
+
import xml.dom.minidom as dom # submodule under alias
|
72 |
+
import test_dictviews as local_mod # non-builtin top-level module
|
73 |
+
|
74 |
+
## Imported objects.
|
75 |
+
from calendar import Calendar, isleap, day_name # class, function, other object
|
76 |
+
from cmath import log as complex_log # imported with alias
|
77 |
+
|
78 |
+
## Local objects.
|
79 |
+
x = 17
|
80 |
+
empty = None
|
81 |
+
names = ['Alice', 'Bob', 'Carol']
|
82 |
+
def squared(x): return x**2
|
83 |
+
cubed = lambda x: x**3
|
84 |
+
class Person:
|
85 |
+
def __init__(self, name, age):
|
86 |
+
self.name = name
|
87 |
+
self.age = age
|
88 |
+
person = Person(names[0], x)
|
89 |
+
class CalendarSubclass(Calendar):
|
90 |
+
def weekdays(self):
|
91 |
+
return [day_name[i] for i in self.iterweekdays()]
|
92 |
+
cal = CalendarSubclass()
|
93 |
+
selfref = __main__
|
94 |
+
|
95 |
+
# Setup global namespace for session saving tests.
|
96 |
+
class TestNamespace:
|
97 |
+
test_globals = globals().copy()
|
98 |
+
def __init__(self, **extra):
|
99 |
+
self.extra = extra
|
100 |
+
def __enter__(self):
|
101 |
+
self.backup = globals().copy()
|
102 |
+
globals().clear()
|
103 |
+
globals().update(self.test_globals)
|
104 |
+
globals().update(self.extra)
|
105 |
+
return self
|
106 |
+
def __exit__(self, *exc_info):
|
107 |
+
globals().clear()
|
108 |
+
globals().update(self.backup)
|
109 |
+
|
110 |
+
def _clean_up_cache(module):
|
111 |
+
cached = module.__file__.split('.', 1)[0] + '.pyc'
|
112 |
+
cached = module.__cached__ if hasattr(module, '__cached__') else cached
|
113 |
+
pycache = os.path.join(os.path.dirname(module.__file__), '__pycache__')
|
114 |
+
for remove, file in [(os.remove, cached), (os.removedirs, pycache)]:
|
115 |
+
with suppress(OSError):
|
116 |
+
remove(file)
|
117 |
+
|
118 |
+
atexit.register(_clean_up_cache, local_mod)
|
119 |
+
|
120 |
+
def _test_objects(main, globals_copy, refimported):
|
121 |
+
try:
|
122 |
+
main_dict = __main__.__dict__
|
123 |
+
global Person, person, Calendar, CalendarSubclass, cal, selfref
|
124 |
+
|
125 |
+
for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
|
126 |
+
assert globals()[obj].__name__ == globals_copy[obj].__name__
|
127 |
+
|
128 |
+
for obj in ('x', 'empty', 'names'):
|
129 |
+
assert main_dict[obj] == globals_copy[obj]
|
130 |
+
|
131 |
+
for obj in ['squared', 'cubed']:
|
132 |
+
assert main_dict[obj].__globals__ is main_dict
|
133 |
+
assert main_dict[obj](3) == globals_copy[obj](3)
|
134 |
+
|
135 |
+
assert Person.__module__ == __main__.__name__
|
136 |
+
assert isinstance(person, Person)
|
137 |
+
assert person.age == globals_copy['person'].age
|
138 |
+
|
139 |
+
assert issubclass(CalendarSubclass, Calendar)
|
140 |
+
assert isinstance(cal, CalendarSubclass)
|
141 |
+
assert cal.weekdays() == globals_copy['cal'].weekdays()
|
142 |
+
|
143 |
+
assert selfref is __main__
|
144 |
+
|
145 |
+
except AssertionError as error:
|
146 |
+
error.args = (_error_line(error, obj, refimported),)
|
147 |
+
raise
|
148 |
+
|
149 |
+
def test_session_main(refimported):
|
150 |
+
"""test dump/load_module() for __main__, both in this process and in a subprocess"""
|
151 |
+
extra_objects = {}
|
152 |
+
if refimported:
|
153 |
+
# Test unpickleable imported object in main.
|
154 |
+
from sys import flags
|
155 |
+
extra_objects['flags'] = flags
|
156 |
+
|
157 |
+
with TestNamespace(**extra_objects) as ns:
|
158 |
+
try:
|
159 |
+
# Test session loading in a new session.
|
160 |
+
dill.dump_module(session_file % refimported, refimported=refimported)
|
161 |
+
from dill.tests.__main__ import python, shell, sp
|
162 |
+
error = sp.call([python, __file__, '--child', str(refimported)], shell=shell)
|
163 |
+
if error: sys.exit(error)
|
164 |
+
finally:
|
165 |
+
with suppress(OSError):
|
166 |
+
os.remove(session_file % refimported)
|
167 |
+
|
168 |
+
# Test session loading in the same session.
|
169 |
+
session_buffer = BytesIO()
|
170 |
+
dill.dump_module(session_buffer, refimported=refimported)
|
171 |
+
session_buffer.seek(0)
|
172 |
+
dill.load_module(session_buffer, module='__main__')
|
173 |
+
ns.backup['_test_objects'](__main__, ns.backup, refimported)
|
174 |
+
|
175 |
+
def test_session_other():
|
176 |
+
"""test dump/load_module() for a module other than __main__"""
|
177 |
+
import test_classdef as module
|
178 |
+
atexit.register(_clean_up_cache, module)
|
179 |
+
module.selfref = module
|
180 |
+
dict_objects = [obj for obj in module.__dict__.keys() if not obj.startswith('__')]
|
181 |
+
|
182 |
+
session_buffer = BytesIO()
|
183 |
+
dill.dump_module(session_buffer, module)
|
184 |
+
|
185 |
+
for obj in dict_objects:
|
186 |
+
del module.__dict__[obj]
|
187 |
+
|
188 |
+
session_buffer.seek(0)
|
189 |
+
dill.load_module(session_buffer, module)
|
190 |
+
|
191 |
+
assert all(obj in module.__dict__ for obj in dict_objects)
|
192 |
+
assert module.selfref is module
|
193 |
+
|
194 |
+
def test_runtime_module():
|
195 |
+
from types import ModuleType
|
196 |
+
modname = '__runtime__'
|
197 |
+
runtime = ModuleType(modname)
|
198 |
+
runtime.x = 42
|
199 |
+
|
200 |
+
mod = dill.session._stash_modules(runtime)
|
201 |
+
if mod is not runtime:
|
202 |
+
print("There are objects to save by referenece that shouldn't be:",
|
203 |
+
mod.__dill_imported, mod.__dill_imported_as, mod.__dill_imported_top_level,
|
204 |
+
file=sys.stderr)
|
205 |
+
|
206 |
+
# This is also for code coverage, tests the use case of dump_module(refimported=True)
|
207 |
+
# without imported objects in the namespace. It's a contrived example because
|
208 |
+
# even dill can't be in it. This should work after fixing #462.
|
209 |
+
session_buffer = BytesIO()
|
210 |
+
dill.dump_module(session_buffer, module=runtime, refimported=True)
|
211 |
+
session_dump = session_buffer.getvalue()
|
212 |
+
|
213 |
+
# Pass a new runtime created module with the same name.
|
214 |
+
runtime = ModuleType(modname) # empty
|
215 |
+
return_val = dill.load_module(BytesIO(session_dump), module=runtime)
|
216 |
+
assert return_val is None
|
217 |
+
assert runtime.__name__ == modname
|
218 |
+
assert runtime.x == 42
|
219 |
+
assert runtime not in sys.modules.values()
|
220 |
+
|
221 |
+
# Pass nothing as main. load_module() must create it.
|
222 |
+
session_buffer.seek(0)
|
223 |
+
runtime = dill.load_module(BytesIO(session_dump))
|
224 |
+
assert runtime.__name__ == modname
|
225 |
+
assert runtime.x == 42
|
226 |
+
assert runtime not in sys.modules.values()
|
227 |
+
|
228 |
+
def test_refimported_imported_as():
|
229 |
+
import collections
|
230 |
+
import concurrent.futures
|
231 |
+
import types
|
232 |
+
import typing
|
233 |
+
mod = sys.modules['__test__'] = types.ModuleType('__test__')
|
234 |
+
dill.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
|
235 |
+
mod.Dict = collections.UserDict # select by type
|
236 |
+
mod.AsyncCM = typing.AsyncContextManager # select by __module__
|
237 |
+
mod.thread_exec = dill.executor # select by __module__ with regex
|
238 |
+
|
239 |
+
session_buffer = BytesIO()
|
240 |
+
dill.dump_module(session_buffer, mod, refimported=True)
|
241 |
+
session_buffer.seek(0)
|
242 |
+
mod = dill.load(session_buffer)
|
243 |
+
del sys.modules['__test__']
|
244 |
+
|
245 |
+
assert set(mod.__dill_imported_as) == {
|
246 |
+
('collections', 'UserDict', 'Dict'),
|
247 |
+
('typing', 'AsyncContextManager', 'AsyncCM'),
|
248 |
+
('dill', 'executor', 'thread_exec'),
|
249 |
+
}
|
250 |
+
|
251 |
+
def test_load_module_asdict():
|
252 |
+
with TestNamespace():
|
253 |
+
session_buffer = BytesIO()
|
254 |
+
dill.dump_module(session_buffer)
|
255 |
+
|
256 |
+
global empty, names, x, y
|
257 |
+
x = y = 0 # change x and create y
|
258 |
+
del empty
|
259 |
+
globals_state = globals().copy()
|
260 |
+
|
261 |
+
session_buffer.seek(0)
|
262 |
+
main_vars = dill.load_module_asdict(session_buffer)
|
263 |
+
|
264 |
+
assert main_vars is not globals()
|
265 |
+
assert globals() == globals_state
|
266 |
+
|
267 |
+
assert main_vars['__name__'] == '__main__'
|
268 |
+
assert main_vars['names'] == names
|
269 |
+
assert main_vars['names'] is not names
|
270 |
+
assert main_vars['x'] != x
|
271 |
+
assert 'y' not in main_vars
|
272 |
+
assert 'empty' in main_vars
|
273 |
+
|
274 |
+
if __name__ == '__main__':
|
275 |
+
test_session_main(refimported=False)
|
276 |
+
test_session_main(refimported=True)
|
277 |
+
test_session_other()
|
278 |
+
test_runtime_module()
|
279 |
+
test_refimported_imported_as()
|
280 |
+
test_load_module_asdict()
|
env-llmeval/lib/python3.10/site-packages/dill/tests/test_source.py
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
#
|
3 |
+
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
|
4 |
+
# Copyright (c) 2008-2016 California Institute of Technology.
|
5 |
+
# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
|
6 |
+
# License: 3-clause BSD. The full license text is available at:
|
7 |
+
# - https://github.com/uqfoundation/dill/blob/master/LICENSE
|
8 |
+
|
9 |
+
from dill.source import getsource, getname, _wrap, likely_import
|
10 |
+
from dill.source import getimportable
|
11 |
+
from dill._dill import IS_PYPY
|
12 |
+
|
13 |
+
import sys
|
14 |
+
PY310b = 0x30a00b1
|
15 |
+
|
16 |
+
f = lambda x: x**2
|
17 |
+
def g(x): return f(x) - x
|
18 |
+
|
19 |
+
def h(x):
|
20 |
+
def g(x): return x
|
21 |
+
return g(x) - x
|
22 |
+
|
23 |
+
class Foo(object):
|
24 |
+
def bar(self, x):
|
25 |
+
return x*x+x
|
26 |
+
_foo = Foo()
|
27 |
+
|
28 |
+
def add(x,y):
|
29 |
+
return x+y
|
30 |
+
|
31 |
+
# yes, same as 'f', but things are tricky when it comes to pointers
|
32 |
+
squared = lambda x:x**2
|
33 |
+
|
34 |
+
class Bar:
|
35 |
+
pass
|
36 |
+
_bar = Bar()
|
37 |
+
|
38 |
+
# inspect.getsourcelines # dill.source.getblocks
|
39 |
+
def test_getsource():
|
40 |
+
assert getsource(f) == 'f = lambda x: x**2\n'
|
41 |
+
assert getsource(g) == 'def g(x): return f(x) - x\n'
|
42 |
+
assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n'
|
43 |
+
assert getname(f) == 'f'
|
44 |
+
assert getname(g) == 'g'
|
45 |
+
assert getname(h) == 'h'
|
46 |
+
assert _wrap(f)(4) == 16
|
47 |
+
assert _wrap(g)(4) == 12
|
48 |
+
assert _wrap(h)(4) == 0
|
49 |
+
|
50 |
+
assert getname(Foo) == 'Foo'
|
51 |
+
assert getname(Bar) == 'Bar'
|
52 |
+
assert getsource(Bar) == 'class Bar:\n pass\n'
|
53 |
+
assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
|
54 |
+
#XXX: add getsource for _foo, _bar
|
55 |
+
|
56 |
+
# test itself
|
57 |
+
def test_itself():
|
58 |
+
assert likely_import(likely_import)=='from dill.source import likely_import\n'
|
59 |
+
|
60 |
+
# builtin functions and objects
|
61 |
+
def test_builtin():
|
62 |
+
assert likely_import(pow) == 'pow\n'
|
63 |
+
assert likely_import(100) == '100\n'
|
64 |
+
assert likely_import(True) == 'True\n'
|
65 |
+
assert likely_import(pow, explicit=True) == 'from builtins import pow\n'
|
66 |
+
assert likely_import(100, explicit=True) == '100\n'
|
67 |
+
assert likely_import(True, explicit=True) == 'True\n'
|
68 |
+
# this is kinda BS... you can't import a None
|
69 |
+
assert likely_import(None) == 'None\n'
|
70 |
+
assert likely_import(None, explicit=True) == 'None\n'
|
71 |
+
|
72 |
+
|
73 |
+
# other imported functions
|
74 |
+
def test_imported():
|
75 |
+
from math import sin
|
76 |
+
assert likely_import(sin) == 'from math import sin\n'
|
77 |
+
|
78 |
+
# interactively defined functions
|
79 |
+
def test_dynamic():
|
80 |
+
assert likely_import(add) == 'from %s import add\n' % __name__
|
81 |
+
# interactive lambdas
|
82 |
+
assert likely_import(squared) == 'from %s import squared\n' % __name__
|
83 |
+
|
84 |
+
# classes and class instances
|
85 |
+
def test_classes():
|
86 |
+
from io import BytesIO as StringIO
|
87 |
+
y = "from _io import BytesIO\n"
|
88 |
+
x = y if (IS_PYPY or sys.hexversion >= PY310b) else "from io import BytesIO\n"
|
89 |
+
s = StringIO()
|
90 |
+
|
91 |
+
assert likely_import(StringIO) == x
|
92 |
+
assert likely_import(s) == y
|
93 |
+
# interactively defined classes and class instances
|
94 |
+
assert likely_import(Foo) == 'from %s import Foo\n' % __name__
|
95 |
+
assert likely_import(_foo) == 'from %s import Foo\n' % __name__
|
96 |
+
|
97 |
+
|
98 |
+
# test getimportable
|
99 |
+
def test_importable():
|
100 |
+
assert getimportable(add) == 'from %s import add\n' % __name__
|
101 |
+
assert getimportable(squared) == 'from %s import squared\n' % __name__
|
102 |
+
assert getimportable(Foo) == 'from %s import Foo\n' % __name__
|
103 |
+
assert getimportable(Foo.bar) == 'from %s import bar\n' % __name__
|
104 |
+
assert getimportable(_foo.bar) == 'from %s import bar\n' % __name__
|
105 |
+
assert getimportable(None) == 'None\n'
|
106 |
+
assert getimportable(100) == '100\n'
|
107 |
+
|
108 |
+
assert getimportable(add, byname=False) == 'def add(x,y):\n return x+y\n'
|
109 |
+
assert getimportable(squared, byname=False) == 'squared = lambda x:x**2\n'
|
110 |
+
assert getimportable(None, byname=False) == 'None\n'
|
111 |
+
assert getimportable(Bar, byname=False) == 'class Bar:\n pass\n'
|
112 |
+
assert getimportable(Foo, byname=False) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
|
113 |
+
assert getimportable(Foo.bar, byname=False) == 'def bar(self, x):\n return x*x+x\n'
|
114 |
+
assert getimportable(Foo.bar, byname=True) == 'from %s import bar\n' % __name__
|
115 |
+
assert getimportable(Foo.bar, alias='memo', byname=True) == 'from %s import bar as memo\n' % __name__
|
116 |
+
assert getimportable(Foo, alias='memo', byname=True) == 'from %s import Foo as memo\n' % __name__
|
117 |
+
assert getimportable(squared, alias='memo', byname=True) == 'from %s import squared as memo\n' % __name__
|
118 |
+
assert getimportable(squared, alias='memo', byname=False) == 'memo = squared = lambda x:x**2\n'
|
119 |
+
assert getimportable(add, alias='memo', byname=False) == 'def add(x,y):\n return x+y\n\nmemo = add\n'
|
120 |
+
assert getimportable(None, alias='memo', byname=False) == 'memo = None\n'
|
121 |
+
assert getimportable(100, alias='memo', byname=False) == 'memo = 100\n'
|
122 |
+
assert getimportable(add, explicit=True) == 'from %s import add\n' % __name__
|
123 |
+
assert getimportable(squared, explicit=True) == 'from %s import squared\n' % __name__
|
124 |
+
assert getimportable(Foo, explicit=True) == 'from %s import Foo\n' % __name__
|
125 |
+
assert getimportable(Foo.bar, explicit=True) == 'from %s import bar\n' % __name__
|
126 |
+
assert getimportable(_foo.bar, explicit=True) == 'from %s import bar\n' % __name__
|
127 |
+
assert getimportable(None, explicit=True) == 'None\n'
|
128 |
+
assert getimportable(100, explicit=True) == '100\n'
|
129 |
+
|
130 |
+
|
131 |
+
def test_numpy():
|
132 |
+
try:
|
133 |
+
from numpy import array
|
134 |
+
x = array([1,2,3])
|
135 |
+
assert getimportable(x) == 'from numpy import array\narray([1, 2, 3])\n'
|
136 |
+
assert getimportable(array) == 'from %s import array\n' % array.__module__
|
137 |
+
assert getimportable(x, byname=False) == 'from numpy import array\narray([1, 2, 3])\n'
|
138 |
+
assert getimportable(array, byname=False) == 'from %s import array\n' % array.__module__
|
139 |
+
except ImportError: pass
|
140 |
+
|
141 |
+
#NOTE: if before likely_import(pow), will cause pow to throw AssertionError
|
142 |
+
def test_foo():
|
143 |
+
assert getimportable(_foo, byname=False).startswith("import dill\nclass Foo(object):\n def bar(self, x):\n return x*x+x\ndill.loads(")
|
144 |
+
|
145 |
+
if __name__ == '__main__':
|
146 |
+
test_getsource()
|
147 |
+
test_itself()
|
148 |
+
test_builtin()
|
149 |
+
test_imported()
|
150 |
+
test_dynamic()
|
151 |
+
test_classes()
|
152 |
+
test_importable()
|
153 |
+
test_numpy()
|
154 |
+
test_foo()
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (1.07 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_api.cpython-310.pyc
ADDED
Binary file (11.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_soft.cpython-310.pyc
ADDED
Binary file (1.56 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_unix.cpython-310.pyc
ADDED
Binary file (2.13 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_windows.cpython-310.pyc
ADDED
Binary file (2.07 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/License.txt
ADDED
@@ -0,0 +1,1568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
End User License Agreement
|
2 |
+
--------------------------
|
3 |
+
|
4 |
+
|
5 |
+
Preface
|
6 |
+
-------
|
7 |
+
|
8 |
+
The Software License Agreement in Chapter 1 and the Supplement
|
9 |
+
in Chapter 2 contain license terms and conditions that govern
|
10 |
+
the use of NVIDIA software. By accepting this agreement, you
|
11 |
+
agree to comply with all the terms and conditions applicable
|
12 |
+
to the product(s) included herein.
|
13 |
+
|
14 |
+
|
15 |
+
NVIDIA Driver
|
16 |
+
|
17 |
+
|
18 |
+
Description
|
19 |
+
|
20 |
+
This package contains the operating system driver and
|
21 |
+
fundamental system software components for NVIDIA GPUs.
|
22 |
+
|
23 |
+
|
24 |
+
NVIDIA CUDA Toolkit
|
25 |
+
|
26 |
+
|
27 |
+
Description
|
28 |
+
|
29 |
+
The NVIDIA CUDA Toolkit provides command-line and graphical
|
30 |
+
tools for building, debugging and optimizing the performance
|
31 |
+
of applications accelerated by NVIDIA GPUs, runtime and math
|
32 |
+
libraries, and documentation including programming guides,
|
33 |
+
user manuals, and API references.
|
34 |
+
|
35 |
+
|
36 |
+
Default Install Location of CUDA Toolkit
|
37 |
+
|
38 |
+
Windows platform:
|
39 |
+
|
40 |
+
%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
|
41 |
+
|
42 |
+
Linux platform:
|
43 |
+
|
44 |
+
/usr/local/cuda-#.#
|
45 |
+
|
46 |
+
Mac platform:
|
47 |
+
|
48 |
+
/Developer/NVIDIA/CUDA-#.#
|
49 |
+
|
50 |
+
|
51 |
+
NVIDIA CUDA Samples
|
52 |
+
|
53 |
+
|
54 |
+
Description
|
55 |
+
|
56 |
+
This package includes over 100+ CUDA examples that demonstrate
|
57 |
+
various CUDA programming principles, and efficient CUDA
|
58 |
+
implementation of algorithms in specific application domains.
|
59 |
+
|
60 |
+
|
61 |
+
Default Install Location of CUDA Samples
|
62 |
+
|
63 |
+
Windows platform:
|
64 |
+
|
65 |
+
%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
|
66 |
+
|
67 |
+
Linux platform:
|
68 |
+
|
69 |
+
/usr/local/cuda-#.#/samples
|
70 |
+
|
71 |
+
and
|
72 |
+
|
73 |
+
$HOME/NVIDIA_CUDA-#.#_Samples
|
74 |
+
|
75 |
+
Mac platform:
|
76 |
+
|
77 |
+
/Developer/NVIDIA/CUDA-#.#/samples
|
78 |
+
|
79 |
+
|
80 |
+
NVIDIA Nsight Visual Studio Edition (Windows only)
|
81 |
+
|
82 |
+
|
83 |
+
Description
|
84 |
+
|
85 |
+
NVIDIA Nsight Development Platform, Visual Studio Edition is a
|
86 |
+
development environment integrated into Microsoft Visual
|
87 |
+
Studio that provides tools for debugging, profiling, analyzing
|
88 |
+
and optimizing your GPU computing and graphics applications.
|
89 |
+
|
90 |
+
|
91 |
+
Default Install Location of Nsight Visual Studio Edition
|
92 |
+
|
93 |
+
Windows platform:
|
94 |
+
|
95 |
+
%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
|
96 |
+
|
97 |
+
|
98 |
+
1. License Agreement for NVIDIA Software Development Kits
|
99 |
+
---------------------------------------------------------
|
100 |
+
|
101 |
+
|
102 |
+
Release Date: July 26, 2018
|
103 |
+
---------------------------
|
104 |
+
|
105 |
+
|
106 |
+
Important NoticeRead before downloading, installing,
|
107 |
+
copying or using the licensed software:
|
108 |
+
-------------------------------------------------------
|
109 |
+
|
110 |
+
This license agreement, including exhibits attached
|
111 |
+
("Agreement”) is a legal agreement between you and NVIDIA
|
112 |
+
Corporation ("NVIDIA") and governs your use of a NVIDIA
|
113 |
+
software development kit (“SDK”).
|
114 |
+
|
115 |
+
Each SDK has its own set of software and materials, but here
|
116 |
+
is a description of the types of items that may be included in
|
117 |
+
a SDK: source code, header files, APIs, data sets and assets
|
118 |
+
(examples include images, textures, models, scenes, videos,
|
119 |
+
native API input/output files), binary software, sample code,
|
120 |
+
libraries, utility programs, programming code and
|
121 |
+
documentation.
|
122 |
+
|
123 |
+
This Agreement can be accepted only by an adult of legal age
|
124 |
+
of majority in the country in which the SDK is used.
|
125 |
+
|
126 |
+
If you are entering into this Agreement on behalf of a company
|
127 |
+
or other legal entity, you represent that you have the legal
|
128 |
+
authority to bind the entity to this Agreement, in which case
|
129 |
+
“you” will mean the entity you represent.
|
130 |
+
|
131 |
+
If you don’t have the required age or authority to accept
|
132 |
+
this Agreement, or if you don’t accept all the terms and
|
133 |
+
conditions of this Agreement, do not download, install or use
|
134 |
+
the SDK.
|
135 |
+
|
136 |
+
You agree to use the SDK only for purposes that are permitted
|
137 |
+
by (a) this Agreement, and (b) any applicable law, regulation
|
138 |
+
or generally accepted practices or guidelines in the relevant
|
139 |
+
jurisdictions.
|
140 |
+
|
141 |
+
|
142 |
+
1.1. License
|
143 |
+
|
144 |
+
|
145 |
+
1.1.1. License Grant
|
146 |
+
|
147 |
+
Subject to the terms of this Agreement, NVIDIA hereby grants
|
148 |
+
you a non-exclusive, non-transferable license, without the
|
149 |
+
right to sublicense (except as expressly provided in this
|
150 |
+
Agreement) to:
|
151 |
+
|
152 |
+
1. Install and use the SDK,
|
153 |
+
|
154 |
+
2. Modify and create derivative works of sample source code
|
155 |
+
delivered in the SDK, and
|
156 |
+
|
157 |
+
3. Distribute those portions of the SDK that are identified
|
158 |
+
in this Agreement as distributable, as incorporated in
|
159 |
+
object code format into a software application that meets
|
160 |
+
the distribution requirements indicated in this Agreement.
|
161 |
+
|
162 |
+
|
163 |
+
1.1.2. Distribution Requirements
|
164 |
+
|
165 |
+
These are the distribution requirements for you to exercise
|
166 |
+
the distribution grant:
|
167 |
+
|
168 |
+
1. Your application must have material additional
|
169 |
+
functionality, beyond the included portions of the SDK.
|
170 |
+
|
171 |
+
2. The distributable portions of the SDK shall only be
|
172 |
+
accessed by your application.
|
173 |
+
|
174 |
+
3. The following notice shall be included in modifications
|
175 |
+
and derivative works of sample source code distributed:
|
176 |
+
“This software contains source code provided by NVIDIA
|
177 |
+
Corporation.”
|
178 |
+
|
179 |
+
4. Unless a developer tool is identified in this Agreement
|
180 |
+
as distributable, it is delivered for your internal use
|
181 |
+
only.
|
182 |
+
|
183 |
+
5. The terms under which you distribute your application
|
184 |
+
must be consistent with the terms of this Agreement,
|
185 |
+
including (without limitation) terms relating to the
|
186 |
+
license grant and license restrictions and protection of
|
187 |
+
NVIDIA’s intellectual property rights. Additionally, you
|
188 |
+
agree that you will protect the privacy, security and
|
189 |
+
legal rights of your application users.
|
190 |
+
|
191 |
+
6. You agree to notify NVIDIA in writing of any known or
|
192 |
+
suspected distribution or use of the SDK not in compliance
|
193 |
+
with the requirements of this Agreement, and to enforce
|
194 |
+
the terms of your agreements with respect to distributed
|
195 |
+
SDK.
|
196 |
+
|
197 |
+
|
198 |
+
1.1.3. Authorized Users
|
199 |
+
|
200 |
+
You may allow employees and contractors of your entity or of
|
201 |
+
your subsidiary(ies) to access and use the SDK from your
|
202 |
+
secure network to perform work on your behalf.
|
203 |
+
|
204 |
+
If you are an academic institution you may allow users
|
205 |
+
enrolled or employed by the academic institution to access and
|
206 |
+
use the SDK from your secure network.
|
207 |
+
|
208 |
+
You are responsible for the compliance with the terms of this
|
209 |
+
Agreement by your authorized users. If you become aware that
|
210 |
+
your authorized users didn’t follow the terms of this
|
211 |
+
Agreement, you agree to take reasonable steps to resolve the
|
212 |
+
non-compliance and prevent new occurrences.
|
213 |
+
|
214 |
+
|
215 |
+
1.1.4. Pre-Release SDK
|
216 |
+
|
217 |
+
The SDK versions identified as alpha, beta, preview or
|
218 |
+
otherwise as pre-release, may not be fully functional, may
|
219 |
+
contain errors or design flaws, and may have reduced or
|
220 |
+
different security, privacy, accessibility, availability, and
|
221 |
+
reliability standards relative to commercial versions of
|
222 |
+
NVIDIA software and materials. Use of a pre-release SDK may
|
223 |
+
result in unexpected results, loss of data, project delays or
|
224 |
+
other unpredictable damage or loss.
|
225 |
+
|
226 |
+
You may use a pre-release SDK at your own risk, understanding
|
227 |
+
that pre-release SDKs are not intended for use in production
|
228 |
+
or business-critical systems.
|
229 |
+
|
230 |
+
NVIDIA may choose not to make available a commercial version
|
231 |
+
of any pre-release SDK. NVIDIA may also choose to abandon
|
232 |
+
development and terminate the availability of a pre-release
|
233 |
+
SDK at any time without liability.
|
234 |
+
|
235 |
+
|
236 |
+
1.1.5. Updates
|
237 |
+
|
238 |
+
NVIDIA may, at its option, make available patches, workarounds
|
239 |
+
or other updates to this SDK. Unless the updates are provided
|
240 |
+
with their separate governing terms, they are deemed part of
|
241 |
+
the SDK licensed to you as provided in this Agreement. You
|
242 |
+
agree that the form and content of the SDK that NVIDIA
|
243 |
+
provides may change without prior notice to you. While NVIDIA
|
244 |
+
generally maintains compatibility between versions, NVIDIA may
|
245 |
+
in some cases make changes that introduce incompatibilities in
|
246 |
+
future versions of the SDK.
|
247 |
+
|
248 |
+
|
249 |
+
1.1.6. Third Party Licenses
|
250 |
+
|
251 |
+
The SDK may come bundled with, or otherwise include or be
|
252 |
+
distributed with, third party software licensed by a NVIDIA
|
253 |
+
supplier and/or open source software provided under an open
|
254 |
+
source license. Use of third party software is subject to the
|
255 |
+
third-party license terms, or in the absence of third party
|
256 |
+
terms, the terms of this Agreement. Copyright to third party
|
257 |
+
software is held by the copyright holders indicated in the
|
258 |
+
third-party software or license.
|
259 |
+
|
260 |
+
|
261 |
+
1.1.7. Reservation of Rights
|
262 |
+
|
263 |
+
NVIDIA reserves all rights, title, and interest in and to the
|
264 |
+
SDK, not expressly granted to you under this Agreement.
|
265 |
+
|
266 |
+
|
267 |
+
1.2. Limitations
|
268 |
+
|
269 |
+
The following license limitations apply to your use of the
|
270 |
+
SDK:
|
271 |
+
|
272 |
+
1. You may not reverse engineer, decompile or disassemble,
|
273 |
+
or remove copyright or other proprietary notices from any
|
274 |
+
portion of the SDK or copies of the SDK.
|
275 |
+
|
276 |
+
2. Except as expressly provided in this Agreement, you may
|
277 |
+
not copy, sell, rent, sublicense, transfer, distribute,
|
278 |
+
modify, or create derivative works of any portion of the
|
279 |
+
SDK. For clarity, you may not distribute or sublicense the
|
280 |
+
SDK as a stand-alone product.
|
281 |
+
|
282 |
+
3. Unless you have an agreement with NVIDIA for this
|
283 |
+
purpose, you may not indicate that an application created
|
284 |
+
with the SDK is sponsored or endorsed by NVIDIA.
|
285 |
+
|
286 |
+
4. You may not bypass, disable, or circumvent any
|
287 |
+
encryption, security, digital rights management or
|
288 |
+
authentication mechanism in the SDK.
|
289 |
+
|
290 |
+
5. You may not use the SDK in any manner that would cause it
|
291 |
+
to become subject to an open source software license. As
|
292 |
+
examples, licenses that require as a condition of use,
|
293 |
+
modification, and/or distribution that the SDK be:
|
294 |
+
|
295 |
+
a. Disclosed or distributed in source code form;
|
296 |
+
|
297 |
+
b. Licensed for the purpose of making derivative works;
|
298 |
+
or
|
299 |
+
|
300 |
+
c. Redistributable at no charge.
|
301 |
+
|
302 |
+
6. Unless you have an agreement with NVIDIA for this
|
303 |
+
purpose, you may not use the SDK with any system or
|
304 |
+
application where the use or failure of the system or
|
305 |
+
application can reasonably be expected to threaten or
|
306 |
+
result in personal injury, death, or catastrophic loss.
|
307 |
+
Examples include use in avionics, navigation, military,
|
308 |
+
medical, life support or other life critical applications.
|
309 |
+
NVIDIA does not design, test or manufacture the SDK for
|
310 |
+
these critical uses and NVIDIA shall not be liable to you
|
311 |
+
or any third party, in whole or in part, for any claims or
|
312 |
+
damages arising from such uses.
|
313 |
+
|
314 |
+
7. You agree to defend, indemnify and hold harmless NVIDIA
|
315 |
+
and its affiliates, and their respective employees,
|
316 |
+
contractors, agents, officers and directors, from and
|
317 |
+
against any and all claims, damages, obligations, losses,
|
318 |
+
liabilities, costs or debt, fines, restitutions and
|
319 |
+
expenses (including but not limited to attorney’s fees
|
320 |
+
and costs incident to establishing the right of
|
321 |
+
indemnification) arising out of or related to your use of
|
322 |
+
the SDK outside of the scope of this Agreement, or not in
|
323 |
+
compliance with its terms.
|
324 |
+
|
325 |
+
|
326 |
+
1.3. Ownership
|
327 |
+
|
328 |
+
1. NVIDIA or its licensors hold all rights, title and
|
329 |
+
interest in and to the SDK and its modifications and
|
330 |
+
derivative works, including their respective intellectual
|
331 |
+
property rights, subject to your rights described in this
|
332 |
+
section. This SDK may include software and materials from
|
333 |
+
NVIDIA’s licensors, and these licensors are intended
|
334 |
+
third party beneficiaries that may enforce this Agreement
|
335 |
+
with respect to their intellectual property rights.
|
336 |
+
|
337 |
+
2. You hold all rights, title and interest in and to your
|
338 |
+
applications and your derivative works of the sample
|
339 |
+
source code delivered in the SDK, including their
|
340 |
+
respective intellectual property rights, subject to
|
341 |
+
NVIDIA’s rights described in this section.
|
342 |
+
|
343 |
+
3. You may, but don’t have to, provide to NVIDIA
|
344 |
+
suggestions, feature requests or other feedback regarding
|
345 |
+
the SDK, including possible enhancements or modifications
|
346 |
+
to the SDK. For any feedback that you voluntarily provide,
|
347 |
+
you hereby grant NVIDIA and its affiliates a perpetual,
|
348 |
+
non-exclusive, worldwide, irrevocable license to use,
|
349 |
+
reproduce, modify, license, sublicense (through multiple
|
350 |
+
tiers of sublicensees), and distribute (through multiple
|
351 |
+
tiers of distributors) it without the payment of any
|
352 |
+
royalties or fees to you. NVIDIA will use feedback at its
|
353 |
+
choice. NVIDIA is constantly looking for ways to improve
|
354 |
+
its products, so you may send feedback to NVIDIA through
|
355 |
+
the developer portal at https://developer.nvidia.com.
|
356 |
+
|
357 |
+
|
358 |
+
1.4. No Warranties
|
359 |
+
|
360 |
+
THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
|
361 |
+
FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
|
362 |
+
ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
|
363 |
+
OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
|
364 |
+
BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
|
365 |
+
FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
|
366 |
+
ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
|
367 |
+
WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
|
368 |
+
DEALING OR COURSE OF TRADE.
|
369 |
+
|
370 |
+
|
371 |
+
1.5. Limitation of Liability
|
372 |
+
|
373 |
+
TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
|
374 |
+
AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
375 |
+
PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
|
376 |
+
OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
|
377 |
+
PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
|
378 |
+
WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
|
379 |
+
WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
|
380 |
+
OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
|
381 |
+
PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
|
382 |
+
LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
|
383 |
+
TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
|
384 |
+
AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
|
385 |
+
NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
|
386 |
+
LIMIT.
|
387 |
+
|
388 |
+
These exclusions and limitations of liability shall apply
|
389 |
+
regardless if NVIDIA or its affiliates have been advised of
|
390 |
+
the possibility of such damages, and regardless of whether a
|
391 |
+
remedy fails its essential purpose. These exclusions and
|
392 |
+
limitations of liability form an essential basis of the
|
393 |
+
bargain between the parties, and, absent any of these
|
394 |
+
exclusions or limitations of liability, the provisions of this
|
395 |
+
Agreement, including, without limitation, the economic terms,
|
396 |
+
would be substantially different.
|
397 |
+
|
398 |
+
|
399 |
+
1.6. Termination
|
400 |
+
|
401 |
+
1. This Agreement will continue to apply until terminated by
|
402 |
+
either you or NVIDIA as described below.
|
403 |
+
|
404 |
+
2. If you want to terminate this Agreement, you may do so by
|
405 |
+
stopping to use the SDK.
|
406 |
+
|
407 |
+
3. NVIDIA may, at any time, terminate this Agreement if:
|
408 |
+
|
409 |
+
a. (i) you fail to comply with any term of this
|
410 |
+
Agreement and the non-compliance is not fixed within
|
411 |
+
thirty (30) days following notice from NVIDIA (or
|
412 |
+
immediately if you violate NVIDIA’s intellectual
|
413 |
+
property rights);
|
414 |
+
|
415 |
+
b. (ii) you commence or participate in any legal
|
416 |
+
proceeding against NVIDIA with respect to the SDK; or
|
417 |
+
|
418 |
+
c. (iii) NVIDIA decides to no longer provide the SDK in
|
419 |
+
a country or, in NVIDIA’s sole discretion, the
|
420 |
+
continued use of it is no longer commercially viable.
|
421 |
+
|
422 |
+
4. Upon any termination of this Agreement, you agree to
|
423 |
+
promptly discontinue use of the SDK and destroy all copies
|
424 |
+
in your possession or control. Your prior distributions in
|
425 |
+
accordance with this Agreement are not affected by the
|
426 |
+
termination of this Agreement. Upon written request, you
|
427 |
+
will certify in writing that you have complied with your
|
428 |
+
commitments under this section. Upon any termination of
|
429 |
+
this Agreement all provisions survive except for the
|
430 |
+
license grant provisions.
|
431 |
+
|
432 |
+
|
433 |
+
1.7. General
|
434 |
+
|
435 |
+
If you wish to assign this Agreement or your rights and
|
436 |
+
obligations, including by merger, consolidation, dissolution
|
437 |
+
or operation of law, contact NVIDIA to ask for permission. Any
|
438 |
+
attempted assignment not approved by NVIDIA in writing shall
|
439 |
+
be void and of no effect. NVIDIA may assign, delegate or
|
440 |
+
transfer this Agreement and its rights and obligations, and if
|
441 |
+
to a non-affiliate you will be notified.
|
442 |
+
|
443 |
+
You agree to cooperate with NVIDIA and provide reasonably
|
444 |
+
requested information to verify your compliance with this
|
445 |
+
Agreement.
|
446 |
+
|
447 |
+
This Agreement will be governed in all respects by the laws of
|
448 |
+
the United States and of the State of Delaware as those laws
|
449 |
+
are applied to contracts entered into and performed entirely
|
450 |
+
within Delaware by Delaware residents, without regard to the
|
451 |
+
conflicts of laws principles. The United Nations Convention on
|
452 |
+
Contracts for the International Sale of Goods is specifically
|
453 |
+
disclaimed. You agree to all terms of this Agreement in the
|
454 |
+
English language.
|
455 |
+
|
456 |
+
The state or federal courts residing in Santa Clara County,
|
457 |
+
California shall have exclusive jurisdiction over any dispute
|
458 |
+
or claim arising out of this Agreement. Notwithstanding this,
|
459 |
+
you agree that NVIDIA shall still be allowed to apply for
|
460 |
+
injunctive remedies or an equivalent type of urgent legal
|
461 |
+
relief in any jurisdiction.
|
462 |
+
|
463 |
+
If any court of competent jurisdiction determines that any
|
464 |
+
provision of this Agreement is illegal, invalid or
|
465 |
+
unenforceable, such provision will be construed as limited to
|
466 |
+
the extent necessary to be consistent with and fully
|
467 |
+
enforceable under the law and the remaining provisions will
|
468 |
+
remain in full force and effect. Unless otherwise specified,
|
469 |
+
remedies are cumulative.
|
470 |
+
|
471 |
+
Each party acknowledges and agrees that the other is an
|
472 |
+
independent contractor in the performance of this Agreement.
|
473 |
+
|
474 |
+
The SDK has been developed entirely at private expense and is
|
475 |
+
“commercial items” consisting of “commercial computer
|
476 |
+
software” and “commercial computer software
|
477 |
+
documentation” provided with RESTRICTED RIGHTS. Use,
|
478 |
+
duplication or disclosure by the U.S. Government or a U.S.
|
479 |
+
Government subcontractor is subject to the restrictions in
|
480 |
+
this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
|
481 |
+
in subparagraphs (c)(1) and (2) of the Commercial Computer
|
482 |
+
Software - Restricted Rights clause at FAR 52.227-19, as
|
483 |
+
applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
|
484 |
+
Expressway, Santa Clara, CA 95051.
|
485 |
+
|
486 |
+
The SDK is subject to United States export laws and
|
487 |
+
regulations. You agree that you will not ship, transfer or
|
488 |
+
export the SDK into any country, or use the SDK in any manner,
|
489 |
+
prohibited by the United States Bureau of Industry and
|
490 |
+
Security or economic sanctions regulations administered by the
|
491 |
+
U.S. Department of Treasury’s Office of Foreign Assets
|
492 |
+
Control (OFAC), or any applicable export laws, restrictions or
|
493 |
+
regulations. These laws include restrictions on destinations,
|
494 |
+
end users and end use. By accepting this Agreement, you
|
495 |
+
confirm that you are not a resident or citizen of any country
|
496 |
+
currently embargoed by the U.S. and that you are not otherwise
|
497 |
+
prohibited from receiving the SDK.
|
498 |
+
|
499 |
+
Any notice delivered by NVIDIA to you under this Agreement
|
500 |
+
will be delivered via mail, email or fax. You agree that any
|
501 |
+
notices that NVIDIA sends you electronically will satisfy any
|
502 |
+
legal communication requirements. Please direct your legal
|
503 |
+
notices or other correspondence to NVIDIA Corporation, 2788
|
504 |
+
San Tomas Expressway, Santa Clara, California 95051, United
|
505 |
+
States of America, Attention: Legal Department.
|
506 |
+
|
507 |
+
This Agreement and any exhibits incorporated into this
|
508 |
+
Agreement constitute the entire agreement of the parties with
|
509 |
+
respect to the subject matter of this Agreement and supersede
|
510 |
+
all prior negotiations or documentation exchanged between the
|
511 |
+
parties relating to this SDK license. Any additional and/or
|
512 |
+
conflicting terms on documents issued by you are null, void,
|
513 |
+
and invalid. Any amendment or waiver under this Agreement
|
514 |
+
shall be in writing and signed by representatives of both
|
515 |
+
parties.
|
516 |
+
|
517 |
+
|
518 |
+
2. CUDA Toolkit Supplement to Software License Agreement for
|
519 |
+
NVIDIA Software Development Kits
|
520 |
+
------------------------------------------------------------
|
521 |
+
|
522 |
+
|
523 |
+
Release date: August 16, 2018
|
524 |
+
-----------------------------
|
525 |
+
|
526 |
+
The terms in this supplement govern your use of the NVIDIA
|
527 |
+
CUDA Toolkit SDK under the terms of your license agreement
|
528 |
+
(“Agreement”) as modified by this supplement. Capitalized
|
529 |
+
terms used but not defined below have the meaning assigned to
|
530 |
+
them in the Agreement.
|
531 |
+
|
532 |
+
This supplement is an exhibit to the Agreement and is
|
533 |
+
incorporated as an integral part of the Agreement. In the
|
534 |
+
event of conflict between the terms in this supplement and the
|
535 |
+
terms in the Agreement, the terms in this supplement govern.
|
536 |
+
|
537 |
+
|
538 |
+
2.1. License Scope
|
539 |
+
|
540 |
+
The SDK is licensed for you to develop applications only for
|
541 |
+
use in systems with NVIDIA GPUs.
|
542 |
+
|
543 |
+
|
544 |
+
2.2. Distribution
|
545 |
+
|
546 |
+
The portions of the SDK that are distributable under the
|
547 |
+
Agreement are listed in Attachment A.
|
548 |
+
|
549 |
+
|
550 |
+
2.3. Operating Systems
|
551 |
+
|
552 |
+
Those portions of the SDK designed exclusively for use on the
|
553 |
+
Linux or FreeBSD operating systems, or other operating systems
|
554 |
+
derived from the source code to these operating systems, may
|
555 |
+
be copied and redistributed for use in accordance with this
|
556 |
+
Agreement, provided that the object code files are not
|
557 |
+
modified in any way (except for unzipping of compressed
|
558 |
+
files).
|
559 |
+
|
560 |
+
|
561 |
+
2.4. Audio and Video Encoders and Decoders
|
562 |
+
|
563 |
+
You acknowledge and agree that it is your sole responsibility
|
564 |
+
to obtain any additional third-party licenses required to
|
565 |
+
make, have made, use, have used, sell, import, and offer for
|
566 |
+
sale your products or services that include or incorporate any
|
567 |
+
third-party software and content relating to audio and/or
|
568 |
+
video encoders and decoders from, including but not limited
|
569 |
+
to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
|
570 |
+
MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
|
571 |
+
under this Agreement any necessary patent or other rights with
|
572 |
+
respect to any audio and/or video encoders and decoders.
|
573 |
+
|
574 |
+
|
575 |
+
2.5. Licensing
|
576 |
+
|
577 |
+
If the distribution terms in this Agreement are not suitable
|
578 |
+
for your organization, or for any questions regarding this
|
579 |
+
Agreement, please contact NVIDIA at
|
580 | |
581 |
+
|
582 |
+
|
583 |
+
2.6. Attachment A
|
584 |
+
|
585 |
+
The following portions of the SDK are distributable under the
|
586 |
+
Agreement:
|
587 |
+
|
588 |
+
Component
|
589 |
+
|
590 |
+
CUDA Runtime
|
591 |
+
|
592 |
+
Windows
|
593 |
+
|
594 |
+
cudart.dll, cudart_static.lib, cudadevrt.lib
|
595 |
+
|
596 |
+
Mac OSX
|
597 |
+
|
598 |
+
libcudart.dylib, libcudart_static.a, libcudadevrt.a
|
599 |
+
|
600 |
+
Linux
|
601 |
+
|
602 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
603 |
+
|
604 |
+
Android
|
605 |
+
|
606 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
607 |
+
|
608 |
+
Component
|
609 |
+
|
610 |
+
CUDA FFT Library
|
611 |
+
|
612 |
+
Windows
|
613 |
+
|
614 |
+
cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
|
615 |
+
|
616 |
+
Mac OSX
|
617 |
+
|
618 |
+
libcufft.dylib, libcufft_static.a, libcufftw.dylib,
|
619 |
+
libcufftw_static.a
|
620 |
+
|
621 |
+
Linux
|
622 |
+
|
623 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
624 |
+
libcufftw_static.a
|
625 |
+
|
626 |
+
Android
|
627 |
+
|
628 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
629 |
+
libcufftw_static.a
|
630 |
+
|
631 |
+
Component
|
632 |
+
|
633 |
+
CUDA BLAS Library
|
634 |
+
|
635 |
+
Windows
|
636 |
+
|
637 |
+
cublas.dll, cublasLt.dll
|
638 |
+
|
639 |
+
Mac OSX
|
640 |
+
|
641 |
+
libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
|
642 |
+
libcublasLt_static.a
|
643 |
+
|
644 |
+
Linux
|
645 |
+
|
646 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
647 |
+
libcublasLt_static.a
|
648 |
+
|
649 |
+
Android
|
650 |
+
|
651 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
652 |
+
libcublasLt_static.a
|
653 |
+
|
654 |
+
Component
|
655 |
+
|
656 |
+
NVIDIA "Drop-in" BLAS Library
|
657 |
+
|
658 |
+
Windows
|
659 |
+
|
660 |
+
nvblas.dll
|
661 |
+
|
662 |
+
Mac OSX
|
663 |
+
|
664 |
+
libnvblas.dylib
|
665 |
+
|
666 |
+
Linux
|
667 |
+
|
668 |
+
libnvblas.so
|
669 |
+
|
670 |
+
Component
|
671 |
+
|
672 |
+
CUDA Sparse Matrix Library
|
673 |
+
|
674 |
+
Windows
|
675 |
+
|
676 |
+
cusparse.dll, cusparse.lib
|
677 |
+
|
678 |
+
Mac OSX
|
679 |
+
|
680 |
+
libcusparse.dylib, libcusparse_static.a
|
681 |
+
|
682 |
+
Linux
|
683 |
+
|
684 |
+
libcusparse.so, libcusparse_static.a
|
685 |
+
|
686 |
+
Android
|
687 |
+
|
688 |
+
libcusparse.so, libcusparse_static.a
|
689 |
+
|
690 |
+
Component
|
691 |
+
|
692 |
+
CUDA Linear Solver Library
|
693 |
+
|
694 |
+
Windows
|
695 |
+
|
696 |
+
cusolver.dll, cusolver.lib
|
697 |
+
|
698 |
+
Mac OSX
|
699 |
+
|
700 |
+
libcusolver.dylib, libcusolver_static.a
|
701 |
+
|
702 |
+
Linux
|
703 |
+
|
704 |
+
libcusolver.so, libcusolver_static.a
|
705 |
+
|
706 |
+
Android
|
707 |
+
|
708 |
+
libcusolver.so, libcusolver_static.a
|
709 |
+
|
710 |
+
Component
|
711 |
+
|
712 |
+
CUDA Random Number Generation Library
|
713 |
+
|
714 |
+
Windows
|
715 |
+
|
716 |
+
curand.dll, curand.lib
|
717 |
+
|
718 |
+
Mac OSX
|
719 |
+
|
720 |
+
libcurand.dylib, libcurand_static.a
|
721 |
+
|
722 |
+
Linux
|
723 |
+
|
724 |
+
libcurand.so, libcurand_static.a
|
725 |
+
|
726 |
+
Android
|
727 |
+
|
728 |
+
libcurand.so, libcurand_static.a
|
729 |
+
|
730 |
+
Component
|
731 |
+
|
732 |
+
CUDA Accelerated Graph Library
|
733 |
+
|
734 |
+
Component
|
735 |
+
|
736 |
+
NVIDIA Performance Primitives Library
|
737 |
+
|
738 |
+
Windows
|
739 |
+
|
740 |
+
nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
|
741 |
+
nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
|
742 |
+
nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
|
743 |
+
nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
|
744 |
+
nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
|
745 |
+
|
746 |
+
Mac OSX
|
747 |
+
|
748 |
+
libnppc.dylib, libnppc_static.a, libnppial.dylib,
|
749 |
+
libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
|
750 |
+
libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
|
751 |
+
libnppidei_static.a, libnppif.dylib, libnppif_static.a,
|
752 |
+
libnppig.dylib, libnppig_static.a, libnppim.dylib,
|
753 |
+
libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
|
754 |
+
libnpps.dylib, libnpps_static.a
|
755 |
+
|
756 |
+
Linux
|
757 |
+
|
758 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
759 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
760 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
761 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
762 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
763 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
764 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
765 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
766 |
+
|
767 |
+
Android
|
768 |
+
|
769 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
770 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
771 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
772 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
773 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
774 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
775 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
776 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
777 |
+
|
778 |
+
Component
|
779 |
+
|
780 |
+
NVIDIA JPEG Library
|
781 |
+
|
782 |
+
Linux
|
783 |
+
|
784 |
+
libnvjpeg.so, libnvjpeg_static.a
|
785 |
+
|
786 |
+
Component
|
787 |
+
|
788 |
+
Internal common library required for statically linking to
|
789 |
+
cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
|
790 |
+
|
791 |
+
Mac OSX
|
792 |
+
|
793 |
+
libculibos.a
|
794 |
+
|
795 |
+
Linux
|
796 |
+
|
797 |
+
libculibos.a
|
798 |
+
|
799 |
+
Component
|
800 |
+
|
801 |
+
NVIDIA Runtime Compilation Library and Header
|
802 |
+
|
803 |
+
All
|
804 |
+
|
805 |
+
nvrtc.h
|
806 |
+
|
807 |
+
Windows
|
808 |
+
|
809 |
+
nvrtc.dll, nvrtc-builtins.dll
|
810 |
+
|
811 |
+
Mac OSX
|
812 |
+
|
813 |
+
libnvrtc.dylib, libnvrtc-builtins.dylib
|
814 |
+
|
815 |
+
Linux
|
816 |
+
|
817 |
+
libnvrtc.so, libnvrtc-builtins.so
|
818 |
+
|
819 |
+
Component
|
820 |
+
|
821 |
+
NVIDIA Optimizing Compiler Library
|
822 |
+
|
823 |
+
Windows
|
824 |
+
|
825 |
+
nvvm.dll
|
826 |
+
|
827 |
+
Mac OSX
|
828 |
+
|
829 |
+
libnvvm.dylib
|
830 |
+
|
831 |
+
Linux
|
832 |
+
|
833 |
+
libnvvm.so
|
834 |
+
|
835 |
+
Component
|
836 |
+
|
837 |
+
NVIDIA Common Device Math Functions Library
|
838 |
+
|
839 |
+
Windows
|
840 |
+
|
841 |
+
libdevice.10.bc
|
842 |
+
|
843 |
+
Mac OSX
|
844 |
+
|
845 |
+
libdevice.10.bc
|
846 |
+
|
847 |
+
Linux
|
848 |
+
|
849 |
+
libdevice.10.bc
|
850 |
+
|
851 |
+
Component
|
852 |
+
|
853 |
+
CUDA Occupancy Calculation Header Library
|
854 |
+
|
855 |
+
All
|
856 |
+
|
857 |
+
cuda_occupancy.h
|
858 |
+
|
859 |
+
Component
|
860 |
+
|
861 |
+
CUDA Half Precision Headers
|
862 |
+
|
863 |
+
All
|
864 |
+
|
865 |
+
cuda_fp16.h, cuda_fp16.hpp
|
866 |
+
|
867 |
+
Component
|
868 |
+
|
869 |
+
CUDA Profiling Tools Interface (CUPTI) Library
|
870 |
+
|
871 |
+
Windows
|
872 |
+
|
873 |
+
cupti.dll
|
874 |
+
|
875 |
+
Mac OSX
|
876 |
+
|
877 |
+
libcupti.dylib
|
878 |
+
|
879 |
+
Linux
|
880 |
+
|
881 |
+
libcupti.so
|
882 |
+
|
883 |
+
Component
|
884 |
+
|
885 |
+
NVIDIA Tools Extension Library
|
886 |
+
|
887 |
+
Windows
|
888 |
+
|
889 |
+
nvToolsExt.dll, nvToolsExt.lib
|
890 |
+
|
891 |
+
Mac OSX
|
892 |
+
|
893 |
+
libnvToolsExt.dylib
|
894 |
+
|
895 |
+
Linux
|
896 |
+
|
897 |
+
libnvToolsExt.so
|
898 |
+
|
899 |
+
Component
|
900 |
+
|
901 |
+
NVIDIA CUDA Driver Libraries
|
902 |
+
|
903 |
+
Linux
|
904 |
+
|
905 |
+
libcuda.so, libnvidia-fatbinaryloader.so,
|
906 |
+
libnvidia-ptxjitcompiler.so
|
907 |
+
|
908 |
+
The NVIDIA CUDA Driver Libraries are only distributable in
|
909 |
+
applications that meet this criteria:
|
910 |
+
|
911 |
+
1. The application was developed starting from a NVIDIA CUDA
|
912 |
+
container obtained from Docker Hub or the NVIDIA GPU
|
913 |
+
Cloud, and
|
914 |
+
|
915 |
+
2. The resulting application is packaged as a Docker
|
916 |
+
container and distributed to users on Docker Hub or the
|
917 |
+
NVIDIA GPU Cloud only.
|
918 |
+
|
919 |
+
|
920 |
+
2.7. Attachment B
|
921 |
+
|
922 |
+
|
923 |
+
Additional Licensing Obligations
|
924 |
+
|
925 |
+
The following third party components included in the SOFTWARE
|
926 |
+
are licensed to Licensee pursuant to the following terms and
|
927 |
+
conditions:
|
928 |
+
|
929 |
+
1. Licensee's use of the GDB third party component is
|
930 |
+
subject to the terms and conditions of GNU GPL v3:
|
931 |
+
|
932 |
+
This product includes copyrighted third-party software licensed
|
933 |
+
under the terms of the GNU General Public License v3 ("GPL v3").
|
934 |
+
All third-party software packages are copyright by their respective
|
935 |
+
authors. GPL v3 terms and conditions are hereby incorporated into
|
936 |
+
the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
|
937 |
+
|
938 |
+
Consistent with these licensing requirements, the software
|
939 |
+
listed below is provided under the terms of the specified
|
940 |
+
open source software licenses. To obtain source code for
|
941 |
+
software provided under licenses that require
|
942 |
+
redistribution of source code, including the GNU General
|
943 |
+
Public License (GPL) and GNU Lesser General Public License
|
944 |
+
(LGPL), contact [email protected]. This offer is
|
945 |
+
valid for a period of three (3) years from the date of the
|
946 |
+
distribution of this product by NVIDIA CORPORATION.
|
947 |
+
|
948 |
+
Component License
|
949 |
+
CUDA-GDB GPL v3
|
950 |
+
|
951 |
+
2. Licensee represents and warrants that any and all third
|
952 |
+
party licensing and/or royalty payment obligations in
|
953 |
+
connection with Licensee's use of the H.264 video codecs
|
954 |
+
are solely the responsibility of Licensee.
|
955 |
+
|
956 |
+
3. Licensee's use of the Thrust library is subject to the
|
957 |
+
terms and conditions of the Apache License Version 2.0.
|
958 |
+
All third-party software packages are copyright by their
|
959 |
+
respective authors. Apache License Version 2.0 terms and
|
960 |
+
conditions are hereby incorporated into the Agreement by
|
961 |
+
this reference.
|
962 |
+
http://www.apache.org/licenses/LICENSE-2.0.html
|
963 |
+
|
964 |
+
In addition, Licensee acknowledges the following notice:
|
965 |
+
Thrust includes source code from the Boost Iterator,
|
966 |
+
Tuple, System, and Random Number libraries.
|
967 |
+
|
968 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
969 |
+
. . . .
|
970 |
+
|
971 |
+
Permission is hereby granted, free of charge, to any person or
|
972 |
+
organization obtaining a copy of the software and accompanying
|
973 |
+
documentation covered by this license (the "Software") to use,
|
974 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
975 |
+
and to prepare derivative works of the Software, and to permit
|
976 |
+
third-parties to whom the Software is furnished to do so, all
|
977 |
+
subject to the following:
|
978 |
+
|
979 |
+
The copyright notices in the Software and this entire statement,
|
980 |
+
including the above license grant, this restriction and the following
|
981 |
+
disclaimer, must be included in all copies of the Software, in whole
|
982 |
+
or in part, and all derivative works of the Software, unless such
|
983 |
+
copies or derivative works are solely in the form of machine-executable
|
984 |
+
object code generated by a source language processor.
|
985 |
+
|
986 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
987 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
988 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
989 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
990 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
991 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
992 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
993 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
994 |
+
|
995 |
+
4. Licensee's use of the LLVM third party component is
|
996 |
+
subject to the following terms and conditions:
|
997 |
+
|
998 |
+
======================================================
|
999 |
+
LLVM Release License
|
1000 |
+
======================================================
|
1001 |
+
University of Illinois/NCSA
|
1002 |
+
Open Source License
|
1003 |
+
|
1004 |
+
Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
|
1005 |
+
All rights reserved.
|
1006 |
+
|
1007 |
+
Developed by:
|
1008 |
+
|
1009 |
+
LLVM Team
|
1010 |
+
|
1011 |
+
University of Illinois at Urbana-Champaign
|
1012 |
+
|
1013 |
+
http://llvm.org
|
1014 |
+
|
1015 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
1016 |
+
of this software and associated documentation files (the "Software"), to
|
1017 |
+
deal with the Software without restriction, including without limitation the
|
1018 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
1019 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
1020 |
+
furnished to do so, subject to the following conditions:
|
1021 |
+
|
1022 |
+
* Redistributions of source code must retain the above copyright notice,
|
1023 |
+
this list of conditions and the following disclaimers.
|
1024 |
+
|
1025 |
+
* Redistributions in binary form must reproduce the above copyright
|
1026 |
+
notice, this list of conditions and the following disclaimers in the
|
1027 |
+
documentation and/or other materials provided with the distribution.
|
1028 |
+
|
1029 |
+
* Neither the names of the LLVM Team, University of Illinois at Urbana-
|
1030 |
+
Champaign, nor the names of its contributors may be used to endorse or
|
1031 |
+
promote products derived from this Software without specific prior
|
1032 |
+
written permission.
|
1033 |
+
|
1034 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
1035 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
1036 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
1037 |
+
THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
1038 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
1039 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
1040 |
+
DEALINGS WITH THE SOFTWARE.
|
1041 |
+
|
1042 |
+
5. Licensee's use (e.g. nvprof) of the PCRE third party
|
1043 |
+
component is subject to the following terms and
|
1044 |
+
conditions:
|
1045 |
+
|
1046 |
+
------------
|
1047 |
+
PCRE LICENCE
|
1048 |
+
------------
|
1049 |
+
PCRE is a library of functions to support regular expressions whose syntax
|
1050 |
+
and semantics are as close as possible to those of the Perl 5 language.
|
1051 |
+
Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
|
1052 |
+
specified below. The documentation for PCRE, supplied in the "doc"
|
1053 |
+
directory, is distributed under the same terms as the software itself. The
|
1054 |
+
basic library functions are written in C and are freestanding. Also
|
1055 |
+
included in the distribution is a set of C++ wrapper functions, and a just-
|
1056 |
+
in-time compiler that can be used to optimize pattern matching. These are
|
1057 |
+
both optional features that can be omitted when the library is built.
|
1058 |
+
|
1059 |
+
THE BASIC LIBRARY FUNCTIONS
|
1060 |
+
---------------------------
|
1061 |
+
Written by: Philip Hazel
|
1062 |
+
Email local part: ph10
|
1063 |
+
Email domain: cam.ac.uk
|
1064 |
+
University of Cambridge Computing Service,
|
1065 |
+
Cambridge, England.
|
1066 |
+
Copyright (c) 1997-2012 University of Cambridge
|
1067 |
+
All rights reserved.
|
1068 |
+
|
1069 |
+
PCRE JUST-IN-TIME COMPILATION SUPPORT
|
1070 |
+
-------------------------------------
|
1071 |
+
Written by: Zoltan Herczeg
|
1072 |
+
Email local part: hzmester
|
1073 |
+
Emain domain: freemail.hu
|
1074 |
+
Copyright(c) 2010-2012 Zoltan Herczeg
|
1075 |
+
All rights reserved.
|
1076 |
+
|
1077 |
+
STACK-LESS JUST-IN-TIME COMPILER
|
1078 |
+
--------------------------------
|
1079 |
+
Written by: Zoltan Herczeg
|
1080 |
+
Email local part: hzmester
|
1081 |
+
Emain domain: freemail.hu
|
1082 |
+
Copyright(c) 2009-2012 Zoltan Herczeg
|
1083 |
+
All rights reserved.
|
1084 |
+
|
1085 |
+
THE C++ WRAPPER FUNCTIONS
|
1086 |
+
-------------------------
|
1087 |
+
Contributed by: Google Inc.
|
1088 |
+
Copyright (c) 2007-2012, Google Inc.
|
1089 |
+
All rights reserved.
|
1090 |
+
|
1091 |
+
THE "BSD" LICENCE
|
1092 |
+
-----------------
|
1093 |
+
Redistribution and use in source and binary forms, with or without
|
1094 |
+
modification, are permitted provided that the following conditions are met:
|
1095 |
+
|
1096 |
+
* Redistributions of source code must retain the above copyright notice,
|
1097 |
+
this list of conditions and the following disclaimer.
|
1098 |
+
|
1099 |
+
* Redistributions in binary form must reproduce the above copyright
|
1100 |
+
notice, this list of conditions and the following disclaimer in the
|
1101 |
+
documentation and/or other materials provided with the distribution.
|
1102 |
+
|
1103 |
+
* Neither the name of the University of Cambridge nor the name of Google
|
1104 |
+
Inc. nor the names of their contributors may be used to endorse or
|
1105 |
+
promote products derived from this software without specific prior
|
1106 |
+
written permission.
|
1107 |
+
|
1108 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
1109 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
1110 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
1111 |
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
1112 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
1113 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
1114 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
1115 |
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
1116 |
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
1117 |
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
1118 |
+
POSSIBILITY OF SUCH DAMAGE.
|
1119 |
+
|
1120 |
+
6. Some of the cuBLAS library routines were written by or
|
1121 |
+
derived from code written by Vasily Volkov and are subject
|
1122 |
+
to the Modified Berkeley Software Distribution License as
|
1123 |
+
follows:
|
1124 |
+
|
1125 |
+
Copyright (c) 2007-2009, Regents of the University of California
|
1126 |
+
|
1127 |
+
All rights reserved.
|
1128 |
+
|
1129 |
+
Redistribution and use in source and binary forms, with or without
|
1130 |
+
modification, are permitted provided that the following conditions are
|
1131 |
+
met:
|
1132 |
+
* Redistributions of source code must retain the above copyright
|
1133 |
+
notice, this list of conditions and the following disclaimer.
|
1134 |
+
* Redistributions in binary form must reproduce the above
|
1135 |
+
copyright notice, this list of conditions and the following
|
1136 |
+
disclaimer in the documentation and/or other materials provided
|
1137 |
+
with the distribution.
|
1138 |
+
* Neither the name of the University of California, Berkeley nor
|
1139 |
+
the names of its contributors may be used to endorse or promote
|
1140 |
+
products derived from this software without specific prior
|
1141 |
+
written permission.
|
1142 |
+
|
1143 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
1144 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
1145 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
1146 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
1147 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
1148 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
1149 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
1150 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
1151 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
1152 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
1153 |
+
POSSIBILITY OF SUCH DAMAGE.
|
1154 |
+
|
1155 |
+
7. Some of the cuBLAS library routines were written by or
|
1156 |
+
derived from code written by Davide Barbieri and are
|
1157 |
+
subject to the Modified Berkeley Software Distribution
|
1158 |
+
License as follows:
|
1159 |
+
|
1160 |
+
Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
|
1161 |
+
|
1162 |
+
All rights reserved.
|
1163 |
+
|
1164 |
+
Redistribution and use in source and binary forms, with or without
|
1165 |
+
modification, are permitted provided that the following conditions are
|
1166 |
+
met:
|
1167 |
+
* Redistributions of source code must retain the above copyright
|
1168 |
+
notice, this list of conditions and the following disclaimer.
|
1169 |
+
* Redistributions in binary form must reproduce the above
|
1170 |
+
copyright notice, this list of conditions and the following
|
1171 |
+
disclaimer in the documentation and/or other materials provided
|
1172 |
+
with the distribution.
|
1173 |
+
* The name of the author may not be used to endorse or promote
|
1174 |
+
products derived from this software without specific prior
|
1175 |
+
written permission.
|
1176 |
+
|
1177 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
1178 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
1179 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
1180 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
1181 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
1182 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
1183 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
1184 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
1185 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
1186 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
1187 |
+
POSSIBILITY OF SUCH DAMAGE.
|
1188 |
+
|
1189 |
+
8. Some of the cuBLAS library routines were derived from
|
1190 |
+
code developed by the University of Tennessee and are
|
1191 |
+
subject to the Modified Berkeley Software Distribution
|
1192 |
+
License as follows:
|
1193 |
+
|
1194 |
+
Copyright (c) 2010 The University of Tennessee.
|
1195 |
+
|
1196 |
+
All rights reserved.
|
1197 |
+
|
1198 |
+
Redistribution and use in source and binary forms, with or without
|
1199 |
+
modification, are permitted provided that the following conditions are
|
1200 |
+
met:
|
1201 |
+
* Redistributions of source code must retain the above copyright
|
1202 |
+
notice, this list of conditions and the following disclaimer.
|
1203 |
+
* Redistributions in binary form must reproduce the above
|
1204 |
+
copyright notice, this list of conditions and the following
|
1205 |
+
disclaimer listed in this license in the documentation and/or
|
1206 |
+
other materials provided with the distribution.
|
1207 |
+
* Neither the name of the copyright holders nor the names of its
|
1208 |
+
contributors may be used to endorse or promote products derived
|
1209 |
+
from this software without specific prior written permission.
|
1210 |
+
|
1211 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1212 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1213 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1214 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
1215 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
1216 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
1217 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
1218 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
1219 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
1220 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
1221 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1222 |
+
|
1223 |
+
9. Some of the cuBLAS library routines were written by or
|
1224 |
+
derived from code written by Jonathan Hogg and are subject
|
1225 |
+
to the Modified Berkeley Software Distribution License as
|
1226 |
+
follows:
|
1227 |
+
|
1228 |
+
Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
|
1229 |
+
|
1230 |
+
All rights reserved.
|
1231 |
+
|
1232 |
+
Redistribution and use in source and binary forms, with or without
|
1233 |
+
modification, are permitted provided that the following conditions are
|
1234 |
+
met:
|
1235 |
+
* Redistributions of source code must retain the above copyright
|
1236 |
+
notice, this list of conditions and the following disclaimer.
|
1237 |
+
* Redistributions in binary form must reproduce the above
|
1238 |
+
copyright notice, this list of conditions and the following
|
1239 |
+
disclaimer in the documentation and/or other materials provided
|
1240 |
+
with the distribution.
|
1241 |
+
* Neither the name of the STFC nor the names of its contributors
|
1242 |
+
may be used to endorse or promote products derived from this
|
1243 |
+
software without specific prior written permission.
|
1244 |
+
|
1245 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1246 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1247 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1248 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
|
1249 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
1250 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
1251 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
1252 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
1253 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
1254 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
1255 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1256 |
+
|
1257 |
+
10. Some of the cuBLAS library routines were written by or
|
1258 |
+
derived from code written by Ahmad M. Abdelfattah, David
|
1259 |
+
Keyes, and Hatem Ltaief, and are subject to the Apache
|
1260 |
+
License, Version 2.0, as follows:
|
1261 |
+
|
1262 |
+
-- (C) Copyright 2013 King Abdullah University of Science and Technology
|
1263 |
+
Authors:
|
1264 |
+
Ahmad Abdelfattah ([email protected])
|
1265 |
+
David Keyes ([email protected])
|
1266 |
+
Hatem Ltaief ([email protected])
|
1267 |
+
|
1268 |
+
Redistribution and use in source and binary forms, with or without
|
1269 |
+
modification, are permitted provided that the following conditions
|
1270 |
+
are met:
|
1271 |
+
|
1272 |
+
* Redistributions of source code must retain the above copyright
|
1273 |
+
notice, this list of conditions and the following disclaimer.
|
1274 |
+
* Redistributions in binary form must reproduce the above copyright
|
1275 |
+
notice, this list of conditions and the following disclaimer in the
|
1276 |
+
documentation and/or other materials provided with the distribution.
|
1277 |
+
* Neither the name of the King Abdullah University of Science and
|
1278 |
+
Technology nor the names of its contributors may be used to endorse
|
1279 |
+
or promote products derived from this software without specific prior
|
1280 |
+
written permission.
|
1281 |
+
|
1282 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1283 |
+
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1284 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1285 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
1286 |
+
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
1287 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
1288 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
1289 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
1290 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
1291 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
1292 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
|
1293 |
+
|
1294 |
+
11. Some of the cuSPARSE library routines were written by or
|
1295 |
+
derived from code written by Li-Wen Chang and are subject
|
1296 |
+
to the NCSA Open Source License as follows:
|
1297 |
+
|
1298 |
+
Copyright (c) 2012, University of Illinois.
|
1299 |
+
|
1300 |
+
All rights reserved.
|
1301 |
+
|
1302 |
+
Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
|
1303 |
+
|
1304 |
+
Permission is hereby granted, free of charge, to any person obtaining
|
1305 |
+
a copy of this software and associated documentation files (the
|
1306 |
+
"Software"), to deal with the Software without restriction, including
|
1307 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
1308 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
1309 |
+
permit persons to whom the Software is furnished to do so, subject to
|
1310 |
+
the following conditions:
|
1311 |
+
* Redistributions of source code must retain the above copyright
|
1312 |
+
notice, this list of conditions and the following disclaimer.
|
1313 |
+
* Redistributions in binary form must reproduce the above
|
1314 |
+
copyright notice, this list of conditions and the following
|
1315 |
+
disclaimers in the documentation and/or other materials provided
|
1316 |
+
with the distribution.
|
1317 |
+
* Neither the names of IMPACT Group, University of Illinois, nor
|
1318 |
+
the names of its contributors may be used to endorse or promote
|
1319 |
+
products derived from this Software without specific prior
|
1320 |
+
written permission.
|
1321 |
+
|
1322 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
1323 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
1324 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
1325 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
|
1326 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
1327 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
1328 |
+
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
|
1329 |
+
SOFTWARE.
|
1330 |
+
|
1331 |
+
12. Some of the cuRAND library routines were written by or
|
1332 |
+
derived from code written by Mutsuo Saito and Makoto
|
1333 |
+
Matsumoto and are subject to the following license:
|
1334 |
+
|
1335 |
+
Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
|
1336 |
+
University. All rights reserved.
|
1337 |
+
|
1338 |
+
Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
|
1339 |
+
University and University of Tokyo. All rights reserved.
|
1340 |
+
|
1341 |
+
Redistribution and use in source and binary forms, with or without
|
1342 |
+
modification, are permitted provided that the following conditions are
|
1343 |
+
met:
|
1344 |
+
* Redistributions of source code must retain the above copyright
|
1345 |
+
notice, this list of conditions and the following disclaimer.
|
1346 |
+
* Redistributions in binary form must reproduce the above
|
1347 |
+
copyright notice, this list of conditions and the following
|
1348 |
+
disclaimer in the documentation and/or other materials provided
|
1349 |
+
with the distribution.
|
1350 |
+
* Neither the name of the Hiroshima University nor the names of
|
1351 |
+
its contributors may be used to endorse or promote products
|
1352 |
+
derived from this software without specific prior written
|
1353 |
+
permission.
|
1354 |
+
|
1355 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1356 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1357 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1358 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
1359 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
1360 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
1361 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
1362 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
1363 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
1364 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
1365 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1366 |
+
|
1367 |
+
13. Some of the cuRAND library routines were derived from
|
1368 |
+
code developed by D. E. Shaw Research and are subject to
|
1369 |
+
the following license:
|
1370 |
+
|
1371 |
+
Copyright 2010-2011, D. E. Shaw Research.
|
1372 |
+
|
1373 |
+
All rights reserved.
|
1374 |
+
|
1375 |
+
Redistribution and use in source and binary forms, with or without
|
1376 |
+
modification, are permitted provided that the following conditions are
|
1377 |
+
met:
|
1378 |
+
* Redistributions of source code must retain the above copyright
|
1379 |
+
notice, this list of conditions, and the following disclaimer.
|
1380 |
+
* Redistributions in binary form must reproduce the above
|
1381 |
+
copyright notice, this list of conditions, and the following
|
1382 |
+
disclaimer in the documentation and/or other materials provided
|
1383 |
+
with the distribution.
|
1384 |
+
* Neither the name of D. E. Shaw Research nor the names of its
|
1385 |
+
contributors may be used to endorse or promote products derived
|
1386 |
+
from this software without specific prior written permission.
|
1387 |
+
|
1388 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1389 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1390 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1391 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
1392 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
1393 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
1394 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
1395 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
1396 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
1397 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
1398 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1399 |
+
|
1400 |
+
14. Some of the Math library routines were written by or
|
1401 |
+
derived from code developed by Norbert Juffa and are
|
1402 |
+
subject to the following license:
|
1403 |
+
|
1404 |
+
Copyright (c) 2015-2017, Norbert Juffa
|
1405 |
+
All rights reserved.
|
1406 |
+
|
1407 |
+
Redistribution and use in source and binary forms, with or without
|
1408 |
+
modification, are permitted provided that the following conditions
|
1409 |
+
are met:
|
1410 |
+
|
1411 |
+
1. Redistributions of source code must retain the above copyright
|
1412 |
+
notice, this list of conditions and the following disclaimer.
|
1413 |
+
|
1414 |
+
2. Redistributions in binary form must reproduce the above copyright
|
1415 |
+
notice, this list of conditions and the following disclaimer in the
|
1416 |
+
documentation and/or other materials provided with the distribution.
|
1417 |
+
|
1418 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1419 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1420 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1421 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
1422 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
1423 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
1424 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
1425 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
1426 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
1427 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
1428 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1429 |
+
|
1430 |
+
15. Licensee's use of the lz4 third party component is
|
1431 |
+
subject to the following terms and conditions:
|
1432 |
+
|
1433 |
+
Copyright (C) 2011-2013, Yann Collet.
|
1434 |
+
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
|
1435 |
+
|
1436 |
+
Redistribution and use in source and binary forms, with or without
|
1437 |
+
modification, are permitted provided that the following conditions are
|
1438 |
+
met:
|
1439 |
+
|
1440 |
+
* Redistributions of source code must retain the above copyright
|
1441 |
+
notice, this list of conditions and the following disclaimer.
|
1442 |
+
* Redistributions in binary form must reproduce the above
|
1443 |
+
copyright notice, this list of conditions and the following disclaimer
|
1444 |
+
in the documentation and/or other materials provided with the
|
1445 |
+
distribution.
|
1446 |
+
|
1447 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
1448 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
1449 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
1450 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
1451 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
1452 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
1453 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
1454 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
1455 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
1456 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
1457 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1458 |
+
|
1459 |
+
16. The NPP library uses code from the Boost Math Toolkit,
|
1460 |
+
and is subject to the following license:
|
1461 |
+
|
1462 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
1463 |
+
. . . .
|
1464 |
+
|
1465 |
+
Permission is hereby granted, free of charge, to any person or
|
1466 |
+
organization obtaining a copy of the software and accompanying
|
1467 |
+
documentation covered by this license (the "Software") to use,
|
1468 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
1469 |
+
and to prepare derivative works of the Software, and to permit
|
1470 |
+
third-parties to whom the Software is furnished to do so, all
|
1471 |
+
subject to the following:
|
1472 |
+
|
1473 |
+
The copyright notices in the Software and this entire statement,
|
1474 |
+
including the above license grant, this restriction and the following
|
1475 |
+
disclaimer, must be included in all copies of the Software, in whole
|
1476 |
+
or in part, and all derivative works of the Software, unless such
|
1477 |
+
copies or derivative works are solely in the form of machine-executable
|
1478 |
+
object code generated by a source language processor.
|
1479 |
+
|
1480 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
1481 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
1482 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
1483 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
1484 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
1485 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
1486 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
1487 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
1488 |
+
|
1489 |
+
17. Portions of the Nsight Eclipse Edition is subject to the
|
1490 |
+
following license:
|
1491 |
+
|
1492 |
+
The Eclipse Foundation makes available all content in this plug-in
|
1493 |
+
("Content"). Unless otherwise indicated below, the Content is provided
|
1494 |
+
to you under the terms and conditions of the Eclipse Public License
|
1495 |
+
Version 1.0 ("EPL"). A copy of the EPL is available at http://
|
1496 |
+
www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
|
1497 |
+
will mean the Content.
|
1498 |
+
|
1499 |
+
If you did not receive this Content directly from the Eclipse
|
1500 |
+
Foundation, the Content is being redistributed by another party
|
1501 |
+
("Redistributor") and different terms and conditions may apply to your
|
1502 |
+
use of any object code in the Content. Check the Redistributor's
|
1503 |
+
license that was provided with the Content. If no such license exists,
|
1504 |
+
contact the Redistributor. Unless otherwise indicated below, the terms
|
1505 |
+
and conditions of the EPL still apply to any source code in the
|
1506 |
+
Content and such source code may be obtained at http://www.eclipse.org.
|
1507 |
+
|
1508 |
+
18. Some of the cuBLAS library routines uses code from
|
1509 |
+
OpenAI, which is subject to the following license:
|
1510 |
+
|
1511 |
+
License URL
|
1512 |
+
https://github.com/openai/openai-gemm/blob/master/LICENSE
|
1513 |
+
|
1514 |
+
License Text
|
1515 |
+
The MIT License
|
1516 |
+
|
1517 |
+
Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
|
1518 |
+
|
1519 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
1520 |
+
of this software and associated documentation files (the "Software"), to deal
|
1521 |
+
in the Software without restriction, including without limitation the rights
|
1522 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
1523 |
+
copies of the Software, and to permit persons to whom the Software is
|
1524 |
+
furnished to do so, subject to the following conditions:
|
1525 |
+
|
1526 |
+
The above copyright notice and this permission notice shall be included in
|
1527 |
+
all copies or substantial portions of the Software.
|
1528 |
+
|
1529 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
1530 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
1531 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
1532 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
1533 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
1534 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
1535 |
+
THE SOFTWARE.
|
1536 |
+
|
1537 |
+
19. Licensee's use of the Visual Studio Setup Configuration
|
1538 |
+
Samples is subject to the following license:
|
1539 |
+
|
1540 |
+
The MIT License (MIT)
|
1541 |
+
Copyright (C) Microsoft Corporation. All rights reserved.
|
1542 |
+
|
1543 |
+
Permission is hereby granted, free of charge, to any person
|
1544 |
+
obtaining a copy of this software and associated documentation
|
1545 |
+
files (the "Software"), to deal in the Software without restriction,
|
1546 |
+
including without limitation the rights to use, copy, modify, merge,
|
1547 |
+
publish, distribute, sublicense, and/or sell copies of the Software,
|
1548 |
+
and to permit persons to whom the Software is furnished to do so,
|
1549 |
+
subject to the following conditions:
|
1550 |
+
|
1551 |
+
The above copyright notice and this permission notice shall be included
|
1552 |
+
in all copies or substantial portions of the Software.
|
1553 |
+
|
1554 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
1555 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
1556 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
1557 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
1558 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
1559 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
1560 |
+
|
1561 |
+
20. Licensee's use of linmath.h header for CPU functions for
|
1562 |
+
GL vector/matrix operations from lunarG is subject to the
|
1563 |
+
Apache License Version 2.0.
|
1564 |
+
|
1565 |
+
21. The DX12-CUDA sample uses the d3dx12.h header, which is
|
1566 |
+
subject to the MIT license .
|
1567 |
+
|
1568 |
+
-----------------
|
env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/METADATA
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: nvidia-cufft-cu12
|
3 |
+
Version: 11.0.2.54
|
4 |
+
Summary: CUFFT native runtime libraries
|
5 |
+
Home-page: https://developer.nvidia.com/cuda-zone
|
6 |
+
Author: Nvidia CUDA Installer Team
|
7 |
+
Author-email: [email protected]
|
8 |
+
License: NVIDIA Proprietary Software
|
9 |
+
Keywords: cuda,nvidia,runtime,machine learning,deep learning
|
10 |
+
Classifier: Development Status :: 4 - Beta
|
11 |
+
Classifier: Intended Audience :: Developers
|
12 |
+
Classifier: Intended Audience :: Education
|
13 |
+
Classifier: Intended Audience :: Science/Research
|
14 |
+
Classifier: License :: Other/Proprietary License
|
15 |
+
Classifier: Natural Language :: English
|
16 |
+
Classifier: Programming Language :: Python :: 3
|
17 |
+
Classifier: Programming Language :: Python :: 3.5
|
18 |
+
Classifier: Programming Language :: Python :: 3.6
|
19 |
+
Classifier: Programming Language :: Python :: 3.7
|
20 |
+
Classifier: Programming Language :: Python :: 3.8
|
21 |
+
Classifier: Programming Language :: Python :: 3.9
|
22 |
+
Classifier: Programming Language :: Python :: 3.10
|
23 |
+
Classifier: Programming Language :: Python :: 3.11
|
24 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
25 |
+
Classifier: Topic :: Scientific/Engineering
|
26 |
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
27 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
28 |
+
Classifier: Topic :: Software Development
|
29 |
+
Classifier: Topic :: Software Development :: Libraries
|
30 |
+
Classifier: Operating System :: Microsoft :: Windows
|
31 |
+
Classifier: Operating System :: POSIX :: Linux
|
32 |
+
Requires-Python: >=3
|
33 |
+
License-File: License.txt
|
34 |
+
|
35 |
+
CUFFT native runtime libraries
|
env-llmeval/lib/python3.10/site-packages/nvidia_cufft_cu12-11.0.2.54.dist-info/RECORD
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2 |
+
nvidia/__pycache__/__init__.cpython-310.pyc,,
|
3 |
+
nvidia/cufft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4 |
+
nvidia/cufft/__pycache__/__init__.cpython-310.pyc,,
|
5 |
+
nvidia/cufft/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6 |
+
nvidia/cufft/include/__pycache__/__init__.cpython-310.pyc,,
|
7 |
+
nvidia/cufft/include/cudalibxt.h,sha256=9GDuRiOzJuO61zRDhIpWpF7XHp8FXSOIlHJNoIMwOZQ,4105
|
8 |
+
nvidia/cufft/include/cufft.h,sha256=MHsb41y_Lj55kQTeXt5pn9cgia_7u3hSH4jQm08U8k0,12420
|
9 |
+
nvidia/cufft/include/cufftXt.h,sha256=0hetmhszhh_RDjw3vmxDvRncX-jrD0NgbWPRq-oJFeA,11746
|
10 |
+
nvidia/cufft/include/cufftw.h,sha256=DBrJQf-dnCWD-OYgdhnEzn8OiAX0U3xdteEaNdhs7mU,19412
|
11 |
+
nvidia/cufft/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
12 |
+
nvidia/cufft/lib/__pycache__/__init__.cpython-310.pyc,,
|
13 |
+
nvidia/cufft/lib/libcufft.so.11,sha256=kgPaQUhKk8HaaEVnv21zJoe5sTB-MJglbLaSJdb1deQ,192713344
|
14 |
+
nvidia/cufft/lib/libcufftw.so.11,sha256=qwbZ38-viOwrz7TBa3b_C_OycoNw0hLihgf1Ph1A7_U,1614344
|
15 |
+
nvidia_cufft_cu12-11.0.2.54.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
16 |
+
nvidia_cufft_cu12-11.0.2.54.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
|
17 |
+
nvidia_cufft_cu12-11.0.2.54.dist-info/METADATA,sha256=6txhpGTOdHVhh--CJT3Tk3_RD1Q3OwfmJaJYyWndqjg,1503
|
18 |
+
nvidia_cufft_cu12-11.0.2.54.dist-info/RECORD,,
|
19 |
+
nvidia_cufft_cu12-11.0.2.54.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106
|
20 |
+
nvidia_cufft_cu12-11.0.2.54.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7
|