applied-ai-018 commited on
Commit
6f1f79a
·
verified ·
1 Parent(s): ea2504d

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/aiohttp-3.9.4.dist-info/INSTALLER +1 -0
  2. env-llmeval/lib/python3.10/site-packages/aiohttp-3.9.4.dist-info/LICENSE.txt +13 -0
  3. env-llmeval/lib/python3.10/site-packages/attr/__init__.py +134 -0
  4. env-llmeval/lib/python3.10/site-packages/attr/_cmp.py +150 -0
  5. env-llmeval/lib/python3.10/site-packages/attr/_funcs.py +483 -0
  6. env-llmeval/lib/python3.10/site-packages/attr/_make.py +0 -0
  7. env-llmeval/lib/python3.10/site-packages/attr/_version_info.pyi +9 -0
  8. env-llmeval/lib/python3.10/site-packages/attr/converters.pyi +13 -0
  9. env-llmeval/lib/python3.10/site-packages/attr/exceptions.py +95 -0
  10. env-llmeval/lib/python3.10/site-packages/attr/exceptions.pyi +17 -0
  11. env-llmeval/lib/python3.10/site-packages/attr/filters.pyi +6 -0
  12. env-llmeval/lib/python3.10/site-packages/attr/py.typed +0 -0
  13. env-llmeval/lib/python3.10/site-packages/attr/setters.py +73 -0
  14. env-llmeval/lib/python3.10/site-packages/attr/validators.pyi +88 -0
  15. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__init__.py +20 -0
  16. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/__init__.cpython-310.pyc +0 -0
  17. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_base.cpython-310.pyc +0 -0
  18. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_numpy.cpython-310.pyc +0 -0
  19. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_tensorflow.cpython-310.pyc +0 -0
  20. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_torch.cpython-310.pyc +0 -0
  21. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_base.py +169 -0
  22. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_numpy.py +68 -0
  23. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_tensorflow.py +94 -0
  24. env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_torch.py +200 -0
  25. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_hf_folder.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_paths.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_runtime.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_safetensors.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-310.pyc +0 -0
  34. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/sha.cpython-310.pyc +0 -0
  35. env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/tqdm.cpython-310.pyc +0 -0
  36. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__about__.py +26 -0
  37. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__init__.py +25 -0
  38. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-310.pyc +0 -0
  39. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-310.pyc +0 -0
  40. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc +0 -0
  41. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/_manylinux.py +301 -0
  42. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/_structures.py +67 -0
  43. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/markers.py +304 -0
  44. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/requirements.py +146 -0
  45. env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/specifiers.py +828 -0
  46. env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__main__.py +45 -0
  47. env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/__main__.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/base.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/iwslt_xml.cpython-310.pyc +0 -0
  50. env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/plain_text.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/aiohttp-3.9.4.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
env-llmeval/lib/python3.10/site-packages/aiohttp-3.9.4.dist-info/LICENSE.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright aio-libs contributors.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
env-llmeval/lib/python3.10/site-packages/attr/__init__.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Classes Without Boilerplate
5
+ """
6
+
7
+ from functools import partial
8
+ from typing import Callable
9
+
10
+ from . import converters, exceptions, filters, setters, validators
11
+ from ._cmp import cmp_using
12
+ from ._compat import Protocol
13
+ from ._config import get_run_validators, set_run_validators
14
+ from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
15
+ from ._make import (
16
+ NOTHING,
17
+ Attribute,
18
+ Factory,
19
+ attrib,
20
+ attrs,
21
+ fields,
22
+ fields_dict,
23
+ make_class,
24
+ validate,
25
+ )
26
+ from ._next_gen import define, field, frozen, mutable
27
+ from ._version_info import VersionInfo
28
+
29
+
30
+ s = attributes = attrs
31
+ ib = attr = attrib
32
+ dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
33
+
34
+
35
+ class AttrsInstance(Protocol):
36
+ pass
37
+
38
+
39
+ __all__ = [
40
+ "Attribute",
41
+ "AttrsInstance",
42
+ "Factory",
43
+ "NOTHING",
44
+ "asdict",
45
+ "assoc",
46
+ "astuple",
47
+ "attr",
48
+ "attrib",
49
+ "attributes",
50
+ "attrs",
51
+ "cmp_using",
52
+ "converters",
53
+ "define",
54
+ "evolve",
55
+ "exceptions",
56
+ "field",
57
+ "fields",
58
+ "fields_dict",
59
+ "filters",
60
+ "frozen",
61
+ "get_run_validators",
62
+ "has",
63
+ "ib",
64
+ "make_class",
65
+ "mutable",
66
+ "resolve_types",
67
+ "s",
68
+ "set_run_validators",
69
+ "setters",
70
+ "validate",
71
+ "validators",
72
+ ]
73
+
74
+
75
+ def _make_getattr(mod_name: str) -> Callable:
76
+ """
77
+ Create a metadata proxy for packaging information that uses *mod_name* in
78
+ its warnings and errors.
79
+ """
80
+
81
+ def __getattr__(name: str) -> str:
82
+ dunder_to_metadata = {
83
+ "__title__": "Name",
84
+ "__copyright__": "",
85
+ "__version__": "version",
86
+ "__version_info__": "version",
87
+ "__description__": "summary",
88
+ "__uri__": "",
89
+ "__url__": "",
90
+ "__author__": "",
91
+ "__email__": "",
92
+ "__license__": "license",
93
+ }
94
+ if name not in dunder_to_metadata:
95
+ msg = f"module {mod_name} has no attribute {name}"
96
+ raise AttributeError(msg)
97
+
98
+ import sys
99
+ import warnings
100
+
101
+ if sys.version_info < (3, 8):
102
+ from importlib_metadata import metadata
103
+ else:
104
+ from importlib.metadata import metadata
105
+
106
+ if name not in ("__version__", "__version_info__"):
107
+ warnings.warn(
108
+ f"Accessing {mod_name}.{name} is deprecated and will be "
109
+ "removed in a future release. Use importlib.metadata directly "
110
+ "to query for attrs's packaging metadata.",
111
+ DeprecationWarning,
112
+ stacklevel=2,
113
+ )
114
+
115
+ meta = metadata("attrs")
116
+ if name == "__license__":
117
+ return "MIT"
118
+ if name == "__copyright__":
119
+ return "Copyright (c) 2015 Hynek Schlawack"
120
+ if name in ("__uri__", "__url__"):
121
+ return meta["Project-URL"].split(" ", 1)[-1]
122
+ if name == "__version_info__":
123
+ return VersionInfo._from_version_string(meta["version"])
124
+ if name == "__author__":
125
+ return meta["Author-email"].rsplit(" ", 1)[0]
126
+ if name == "__email__":
127
+ return meta["Author-email"].rsplit("<", 1)[1][:-1]
128
+
129
+ return meta[dunder_to_metadata[name]]
130
+
131
+ return __getattr__
132
+
133
+
134
+ __getattr__ = _make_getattr(__name__)
env-llmeval/lib/python3.10/site-packages/attr/_cmp.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+
4
+ import functools
5
+ import types
6
+
7
+ from ._make import _make_ne
8
+
9
+
10
+ _operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
11
+
12
+
13
+ def cmp_using(
14
+ eq=None,
15
+ lt=None,
16
+ le=None,
17
+ gt=None,
18
+ ge=None,
19
+ require_same_type=True,
20
+ class_name="Comparable",
21
+ ):
22
+ """
23
+ Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
24
+ and ``cmp`` arguments to customize field comparison.
25
+
26
+ The resulting class will have a full set of ordering methods if at least
27
+ one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
28
+
29
+ :param Optional[callable] eq: `callable` used to evaluate equality of two
30
+ objects.
31
+ :param Optional[callable] lt: `callable` used to evaluate whether one
32
+ object is less than another object.
33
+ :param Optional[callable] le: `callable` used to evaluate whether one
34
+ object is less than or equal to another object.
35
+ :param Optional[callable] gt: `callable` used to evaluate whether one
36
+ object is greater than another object.
37
+ :param Optional[callable] ge: `callable` used to evaluate whether one
38
+ object is greater than or equal to another object.
39
+
40
+ :param bool require_same_type: When `True`, equality and ordering methods
41
+ will return `NotImplemented` if objects are not of the same type.
42
+
43
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
44
+
45
+ See `comparison` for more details.
46
+
47
+ .. versionadded:: 21.1.0
48
+ """
49
+
50
+ body = {
51
+ "__slots__": ["value"],
52
+ "__init__": _make_init(),
53
+ "_requirements": [],
54
+ "_is_comparable_to": _is_comparable_to,
55
+ }
56
+
57
+ # Add operations.
58
+ num_order_functions = 0
59
+ has_eq_function = False
60
+
61
+ if eq is not None:
62
+ has_eq_function = True
63
+ body["__eq__"] = _make_operator("eq", eq)
64
+ body["__ne__"] = _make_ne()
65
+
66
+ if lt is not None:
67
+ num_order_functions += 1
68
+ body["__lt__"] = _make_operator("lt", lt)
69
+
70
+ if le is not None:
71
+ num_order_functions += 1
72
+ body["__le__"] = _make_operator("le", le)
73
+
74
+ if gt is not None:
75
+ num_order_functions += 1
76
+ body["__gt__"] = _make_operator("gt", gt)
77
+
78
+ if ge is not None:
79
+ num_order_functions += 1
80
+ body["__ge__"] = _make_operator("ge", ge)
81
+
82
+ type_ = types.new_class(
83
+ class_name, (object,), {}, lambda ns: ns.update(body)
84
+ )
85
+
86
+ # Add same type requirement.
87
+ if require_same_type:
88
+ type_._requirements.append(_check_same_type)
89
+
90
+ # Add total ordering if at least one operation was defined.
91
+ if 0 < num_order_functions < 4:
92
+ if not has_eq_function:
93
+ # functools.total_ordering requires __eq__ to be defined,
94
+ # so raise early error here to keep a nice stack.
95
+ msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
96
+ raise ValueError(msg)
97
+ type_ = functools.total_ordering(type_)
98
+
99
+ return type_
100
+
101
+
102
+ def _make_init():
103
+ """
104
+ Create __init__ method.
105
+ """
106
+
107
+ def __init__(self, value):
108
+ """
109
+ Initialize object with *value*.
110
+ """
111
+ self.value = value
112
+
113
+ return __init__
114
+
115
+
116
+ def _make_operator(name, func):
117
+ """
118
+ Create operator method.
119
+ """
120
+
121
+ def method(self, other):
122
+ if not self._is_comparable_to(other):
123
+ return NotImplemented
124
+
125
+ result = func(self.value, other.value)
126
+ if result is NotImplemented:
127
+ return NotImplemented
128
+
129
+ return result
130
+
131
+ method.__name__ = f"__{name}__"
132
+ method.__doc__ = (
133
+ f"Return a {_operation_names[name]} b. Computed by attrs."
134
+ )
135
+
136
+ return method
137
+
138
+
139
+ def _is_comparable_to(self, other):
140
+ """
141
+ Check whether `other` is comparable to `self`.
142
+ """
143
+ return all(func(self, other) for func in self._requirements)
144
+
145
+
146
+ def _check_same_type(self, other):
147
+ """
148
+ Return True if *self* and *other* are of the same type, False otherwise.
149
+ """
150
+ return other.value.__class__ is self.value.__class__
env-llmeval/lib/python3.10/site-packages/attr/_funcs.py ADDED
@@ -0,0 +1,483 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+
4
+ import copy
5
+
6
+ from ._compat import PY_3_9_PLUS, get_generic_base
7
+ from ._make import NOTHING, _obj_setattr, fields
8
+ from .exceptions import AttrsAttributeNotFoundError
9
+
10
+
11
+ def asdict(
12
+ inst,
13
+ recurse=True,
14
+ filter=None,
15
+ dict_factory=dict,
16
+ retain_collection_types=False,
17
+ value_serializer=None,
18
+ ):
19
+ """
20
+ Return the *attrs* attribute values of *inst* as a dict.
21
+
22
+ Optionally recurse into other *attrs*-decorated classes.
23
+
24
+ :param inst: Instance of an *attrs*-decorated class.
25
+ :param bool recurse: Recurse into classes that are also
26
+ *attrs*-decorated.
27
+ :param callable filter: A callable whose return code determines whether an
28
+ attribute or element is included (``True``) or dropped (``False``). Is
29
+ called with the `attrs.Attribute` as the first argument and the
30
+ value as the second argument.
31
+ :param callable dict_factory: A callable to produce dictionaries from. For
32
+ example, to produce ordered dictionaries instead of normal Python
33
+ dictionaries, pass in ``collections.OrderedDict``.
34
+ :param bool retain_collection_types: Do not convert to ``list`` when
35
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
36
+ meaningful if ``recurse`` is ``True``.
37
+ :param Optional[callable] value_serializer: A hook that is called for every
38
+ attribute or dict key/value. It receives the current instance, field
39
+ and value and must return the (updated) value. The hook is run *after*
40
+ the optional *filter* has been applied.
41
+
42
+ :rtype: return type of *dict_factory*
43
+
44
+ :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs*
45
+ class.
46
+
47
+ .. versionadded:: 16.0.0 *dict_factory*
48
+ .. versionadded:: 16.1.0 *retain_collection_types*
49
+ .. versionadded:: 20.3.0 *value_serializer*
50
+ .. versionadded:: 21.3.0 If a dict has a collection for a key, it is
51
+ serialized as a tuple.
52
+ """
53
+ attrs = fields(inst.__class__)
54
+ rv = dict_factory()
55
+ for a in attrs:
56
+ v = getattr(inst, a.name)
57
+ if filter is not None and not filter(a, v):
58
+ continue
59
+
60
+ if value_serializer is not None:
61
+ v = value_serializer(inst, a, v)
62
+
63
+ if recurse is True:
64
+ if has(v.__class__):
65
+ rv[a.name] = asdict(
66
+ v,
67
+ recurse=True,
68
+ filter=filter,
69
+ dict_factory=dict_factory,
70
+ retain_collection_types=retain_collection_types,
71
+ value_serializer=value_serializer,
72
+ )
73
+ elif isinstance(v, (tuple, list, set, frozenset)):
74
+ cf = v.__class__ if retain_collection_types is True else list
75
+ items = [
76
+ _asdict_anything(
77
+ i,
78
+ is_key=False,
79
+ filter=filter,
80
+ dict_factory=dict_factory,
81
+ retain_collection_types=retain_collection_types,
82
+ value_serializer=value_serializer,
83
+ )
84
+ for i in v
85
+ ]
86
+ try:
87
+ rv[a.name] = cf(items)
88
+ except TypeError:
89
+ if not issubclass(cf, tuple):
90
+ raise
91
+ # Workaround for TypeError: cf.__new__() missing 1 required
92
+ # positional argument (which appears, for a namedturle)
93
+ rv[a.name] = cf(*items)
94
+ elif isinstance(v, dict):
95
+ df = dict_factory
96
+ rv[a.name] = df(
97
+ (
98
+ _asdict_anything(
99
+ kk,
100
+ is_key=True,
101
+ filter=filter,
102
+ dict_factory=df,
103
+ retain_collection_types=retain_collection_types,
104
+ value_serializer=value_serializer,
105
+ ),
106
+ _asdict_anything(
107
+ vv,
108
+ is_key=False,
109
+ filter=filter,
110
+ dict_factory=df,
111
+ retain_collection_types=retain_collection_types,
112
+ value_serializer=value_serializer,
113
+ ),
114
+ )
115
+ for kk, vv in v.items()
116
+ )
117
+ else:
118
+ rv[a.name] = v
119
+ else:
120
+ rv[a.name] = v
121
+ return rv
122
+
123
+
124
+ def _asdict_anything(
125
+ val,
126
+ is_key,
127
+ filter,
128
+ dict_factory,
129
+ retain_collection_types,
130
+ value_serializer,
131
+ ):
132
+ """
133
+ ``asdict`` only works on attrs instances, this works on anything.
134
+ """
135
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
136
+ # Attrs class.
137
+ rv = asdict(
138
+ val,
139
+ recurse=True,
140
+ filter=filter,
141
+ dict_factory=dict_factory,
142
+ retain_collection_types=retain_collection_types,
143
+ value_serializer=value_serializer,
144
+ )
145
+ elif isinstance(val, (tuple, list, set, frozenset)):
146
+ if retain_collection_types is True:
147
+ cf = val.__class__
148
+ elif is_key:
149
+ cf = tuple
150
+ else:
151
+ cf = list
152
+
153
+ rv = cf(
154
+ [
155
+ _asdict_anything(
156
+ i,
157
+ is_key=False,
158
+ filter=filter,
159
+ dict_factory=dict_factory,
160
+ retain_collection_types=retain_collection_types,
161
+ value_serializer=value_serializer,
162
+ )
163
+ for i in val
164
+ ]
165
+ )
166
+ elif isinstance(val, dict):
167
+ df = dict_factory
168
+ rv = df(
169
+ (
170
+ _asdict_anything(
171
+ kk,
172
+ is_key=True,
173
+ filter=filter,
174
+ dict_factory=df,
175
+ retain_collection_types=retain_collection_types,
176
+ value_serializer=value_serializer,
177
+ ),
178
+ _asdict_anything(
179
+ vv,
180
+ is_key=False,
181
+ filter=filter,
182
+ dict_factory=df,
183
+ retain_collection_types=retain_collection_types,
184
+ value_serializer=value_serializer,
185
+ ),
186
+ )
187
+ for kk, vv in val.items()
188
+ )
189
+ else:
190
+ rv = val
191
+ if value_serializer is not None:
192
+ rv = value_serializer(None, None, rv)
193
+
194
+ return rv
195
+
196
+
197
+ def astuple(
198
+ inst,
199
+ recurse=True,
200
+ filter=None,
201
+ tuple_factory=tuple,
202
+ retain_collection_types=False,
203
+ ):
204
+ """
205
+ Return the *attrs* attribute values of *inst* as a tuple.
206
+
207
+ Optionally recurse into other *attrs*-decorated classes.
208
+
209
+ :param inst: Instance of an *attrs*-decorated class.
210
+ :param bool recurse: Recurse into classes that are also
211
+ *attrs*-decorated.
212
+ :param callable filter: A callable whose return code determines whether an
213
+ attribute or element is included (``True``) or dropped (``False``). Is
214
+ called with the `attrs.Attribute` as the first argument and the
215
+ value as the second argument.
216
+ :param callable tuple_factory: A callable to produce tuples from. For
217
+ example, to produce lists instead of tuples.
218
+ :param bool retain_collection_types: Do not convert to ``list``
219
+ or ``dict`` when encountering an attribute which type is
220
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
221
+ ``True``.
222
+
223
+ :rtype: return type of *tuple_factory*
224
+
225
+ :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs*
226
+ class.
227
+
228
+ .. versionadded:: 16.2.0
229
+ """
230
+ attrs = fields(inst.__class__)
231
+ rv = []
232
+ retain = retain_collection_types # Very long. :/
233
+ for a in attrs:
234
+ v = getattr(inst, a.name)
235
+ if filter is not None and not filter(a, v):
236
+ continue
237
+ if recurse is True:
238
+ if has(v.__class__):
239
+ rv.append(
240
+ astuple(
241
+ v,
242
+ recurse=True,
243
+ filter=filter,
244
+ tuple_factory=tuple_factory,
245
+ retain_collection_types=retain,
246
+ )
247
+ )
248
+ elif isinstance(v, (tuple, list, set, frozenset)):
249
+ cf = v.__class__ if retain is True else list
250
+ items = [
251
+ astuple(
252
+ j,
253
+ recurse=True,
254
+ filter=filter,
255
+ tuple_factory=tuple_factory,
256
+ retain_collection_types=retain,
257
+ )
258
+ if has(j.__class__)
259
+ else j
260
+ for j in v
261
+ ]
262
+ try:
263
+ rv.append(cf(items))
264
+ except TypeError:
265
+ if not issubclass(cf, tuple):
266
+ raise
267
+ # Workaround for TypeError: cf.__new__() missing 1 required
268
+ # positional argument (which appears, for a namedturle)
269
+ rv.append(cf(*items))
270
+ elif isinstance(v, dict):
271
+ df = v.__class__ if retain is True else dict
272
+ rv.append(
273
+ df(
274
+ (
275
+ astuple(
276
+ kk,
277
+ tuple_factory=tuple_factory,
278
+ retain_collection_types=retain,
279
+ )
280
+ if has(kk.__class__)
281
+ else kk,
282
+ astuple(
283
+ vv,
284
+ tuple_factory=tuple_factory,
285
+ retain_collection_types=retain,
286
+ )
287
+ if has(vv.__class__)
288
+ else vv,
289
+ )
290
+ for kk, vv in v.items()
291
+ )
292
+ )
293
+ else:
294
+ rv.append(v)
295
+ else:
296
+ rv.append(v)
297
+
298
+ return rv if tuple_factory is list else tuple_factory(rv)
299
+
300
+
301
+ def has(cls):
302
+ """
303
+ Check whether *cls* is a class with *attrs* attributes.
304
+
305
+ :param type cls: Class to introspect.
306
+ :raise TypeError: If *cls* is not a class.
307
+
308
+ :rtype: bool
309
+ """
310
+ attrs = getattr(cls, "__attrs_attrs__", None)
311
+ if attrs is not None:
312
+ return True
313
+
314
+ # No attrs, maybe it's a specialized generic (A[str])?
315
+ generic_base = get_generic_base(cls)
316
+ if generic_base is not None:
317
+ generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
318
+ if generic_attrs is not None:
319
+ # Stick it on here for speed next time.
320
+ cls.__attrs_attrs__ = generic_attrs
321
+ return generic_attrs is not None
322
+ return False
323
+
324
+
325
+ def assoc(inst, **changes):
326
+ """
327
+ Copy *inst* and apply *changes*.
328
+
329
+ This is different from `evolve` that applies the changes to the arguments
330
+ that create the new instance.
331
+
332
+ `evolve`'s behavior is preferable, but there are `edge cases`_ where it
333
+ doesn't work. Therefore `assoc` is deprecated, but will not be removed.
334
+
335
+ .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
336
+
337
+ :param inst: Instance of a class with *attrs* attributes.
338
+ :param changes: Keyword changes in the new copy.
339
+
340
+ :return: A copy of inst with *changes* incorporated.
341
+
342
+ :raise attrs.exceptions.AttrsAttributeNotFoundError: If *attr_name*
343
+ couldn't be found on *cls*.
344
+ :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs*
345
+ class.
346
+
347
+ .. deprecated:: 17.1.0
348
+ Use `attrs.evolve` instead if you can.
349
+ This function will not be removed du to the slightly different approach
350
+ compared to `attrs.evolve`.
351
+ """
352
+ new = copy.copy(inst)
353
+ attrs = fields(inst.__class__)
354
+ for k, v in changes.items():
355
+ a = getattr(attrs, k, NOTHING)
356
+ if a is NOTHING:
357
+ msg = f"{k} is not an attrs attribute on {new.__class__}."
358
+ raise AttrsAttributeNotFoundError(msg)
359
+ _obj_setattr(new, k, v)
360
+ return new
361
+
362
+
363
+ def evolve(*args, **changes):
364
+ """
365
+ Create a new instance, based on the first positional argument with
366
+ *changes* applied.
367
+
368
+ :param inst: Instance of a class with *attrs* attributes.
369
+ :param changes: Keyword changes in the new copy.
370
+
371
+ :return: A copy of inst with *changes* incorporated.
372
+
373
+ :raise TypeError: If *attr_name* couldn't be found in the class
374
+ ``__init__``.
375
+ :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs*
376
+ class.
377
+
378
+ .. versionadded:: 17.1.0
379
+ .. deprecated:: 23.1.0
380
+ It is now deprecated to pass the instance using the keyword argument
381
+ *inst*. It will raise a warning until at least April 2024, after which
382
+ it will become an error. Always pass the instance as a positional
383
+ argument.
384
+ """
385
+ # Try to get instance by positional argument first.
386
+ # Use changes otherwise and warn it'll break.
387
+ if args:
388
+ try:
389
+ (inst,) = args
390
+ except ValueError:
391
+ msg = f"evolve() takes 1 positional argument, but {len(args)} were given"
392
+ raise TypeError(msg) from None
393
+ else:
394
+ try:
395
+ inst = changes.pop("inst")
396
+ except KeyError:
397
+ msg = "evolve() missing 1 required positional argument: 'inst'"
398
+ raise TypeError(msg) from None
399
+
400
+ import warnings
401
+
402
+ warnings.warn(
403
+ "Passing the instance per keyword argument is deprecated and "
404
+ "will stop working in, or after, April 2024.",
405
+ DeprecationWarning,
406
+ stacklevel=2,
407
+ )
408
+
409
+ cls = inst.__class__
410
+ attrs = fields(cls)
411
+ for a in attrs:
412
+ if not a.init:
413
+ continue
414
+ attr_name = a.name # To deal with private attributes.
415
+ init_name = a.alias
416
+ if init_name not in changes:
417
+ changes[init_name] = getattr(inst, attr_name)
418
+
419
+ return cls(**changes)
420
+
421
+
422
+ def resolve_types(
423
+ cls, globalns=None, localns=None, attribs=None, include_extras=True
424
+ ):
425
+ """
426
+ Resolve any strings and forward annotations in type annotations.
427
+
428
+ This is only required if you need concrete types in `Attribute`'s *type*
429
+ field. In other words, you don't need to resolve your types if you only
430
+ use them for static type checking.
431
+
432
+ With no arguments, names will be looked up in the module in which the class
433
+ was created. If this is not what you want, e.g. if the name only exists
434
+ inside a method, you may pass *globalns* or *localns* to specify other
435
+ dictionaries in which to look up these names. See the docs of
436
+ `typing.get_type_hints` for more details.
437
+
438
+ :param type cls: Class to resolve.
439
+ :param Optional[dict] globalns: Dictionary containing global variables.
440
+ :param Optional[dict] localns: Dictionary containing local variables.
441
+ :param Optional[list] attribs: List of attribs for the given class.
442
+ This is necessary when calling from inside a ``field_transformer``
443
+ since *cls* is not an *attrs* class yet.
444
+ :param bool include_extras: Resolve more accurately, if possible.
445
+ Pass ``include_extras`` to ``typing.get_hints``, if supported by the
446
+ typing module. On supported Python versions (3.9+), this resolves the
447
+ types more accurately.
448
+
449
+ :raise TypeError: If *cls* is not a class.
450
+ :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs*
451
+ class and you didn't pass any attribs.
452
+ :raise NameError: If types cannot be resolved because of missing variables.
453
+
454
+ :returns: *cls* so you can use this function also as a class decorator.
455
+ Please note that you have to apply it **after** `attrs.define`. That
456
+ means the decorator has to come in the line **before** `attrs.define`.
457
+
458
+ .. versionadded:: 20.1.0
459
+ .. versionadded:: 21.1.0 *attribs*
460
+ .. versionadded:: 23.1.0 *include_extras*
461
+
462
+ """
463
+ # Since calling get_type_hints is expensive we cache whether we've
464
+ # done it already.
465
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
466
+ import typing
467
+
468
+ kwargs = {"globalns": globalns, "localns": localns}
469
+
470
+ if PY_3_9_PLUS:
471
+ kwargs["include_extras"] = include_extras
472
+
473
+ hints = typing.get_type_hints(cls, **kwargs)
474
+ for field in fields(cls) if attribs is None else attribs:
475
+ if field.name in hints:
476
+ # Since fields have been frozen we must work around it.
477
+ _obj_setattr(field, "type", hints[field.name])
478
+ # We store the class we resolved so that subclasses know they haven't
479
+ # been resolved.
480
+ cls.__attrs_types_resolved__ = cls
481
+
482
+ # Return the class so you can use it as a decorator too.
483
+ return cls
env-llmeval/lib/python3.10/site-packages/attr/_make.py ADDED
The diff for this file is too large to render. See raw diff
 
env-llmeval/lib/python3.10/site-packages/attr/_version_info.pyi ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ class VersionInfo:
2
+ @property
3
+ def year(self) -> int: ...
4
+ @property
5
+ def minor(self) -> int: ...
6
+ @property
7
+ def micro(self) -> int: ...
8
+ @property
9
+ def releaselevel(self) -> str: ...
env-llmeval/lib/python3.10/site-packages/attr/converters.pyi ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Callable, TypeVar, overload
2
+
3
+ from . import _ConverterType
4
+
5
+ _T = TypeVar("_T")
6
+
7
+ def pipe(*validators: _ConverterType) -> _ConverterType: ...
8
+ def optional(converter: _ConverterType) -> _ConverterType: ...
9
+ @overload
10
+ def default_if_none(default: _T) -> _ConverterType: ...
11
+ @overload
12
+ def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
13
+ def to_bool(val: str) -> bool: ...
env-llmeval/lib/python3.10/site-packages/attr/exceptions.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import ClassVar
6
+
7
+
8
+ class FrozenError(AttributeError):
9
+ """
10
+ A frozen/immutable instance or attribute have been attempted to be
11
+ modified.
12
+
13
+ It mirrors the behavior of ``namedtuples`` by using the same error message
14
+ and subclassing `AttributeError`.
15
+
16
+ .. versionadded:: 20.1.0
17
+ """
18
+
19
+ msg = "can't set attribute"
20
+ args: ClassVar[tuple[str]] = [msg]
21
+
22
+
23
+ class FrozenInstanceError(FrozenError):
24
+ """
25
+ A frozen instance has been attempted to be modified.
26
+
27
+ .. versionadded:: 16.1.0
28
+ """
29
+
30
+
31
+ class FrozenAttributeError(FrozenError):
32
+ """
33
+ A frozen attribute has been attempted to be modified.
34
+
35
+ .. versionadded:: 20.1.0
36
+ """
37
+
38
+
39
+ class AttrsAttributeNotFoundError(ValueError):
40
+ """
41
+ An *attrs* function couldn't find an attribute that the user asked for.
42
+
43
+ .. versionadded:: 16.2.0
44
+ """
45
+
46
+
47
+ class NotAnAttrsClassError(ValueError):
48
+ """
49
+ A non-*attrs* class has been passed into an *attrs* function.
50
+
51
+ .. versionadded:: 16.2.0
52
+ """
53
+
54
+
55
+ class DefaultAlreadySetError(RuntimeError):
56
+ """
57
+ A default has been set when defining the field and is attempted to be reset
58
+ using the decorator.
59
+
60
+ .. versionadded:: 17.1.0
61
+ """
62
+
63
+
64
+ class UnannotatedAttributeError(RuntimeError):
65
+ """
66
+ A class with ``auto_attribs=True`` has a field without a type annotation.
67
+
68
+ .. versionadded:: 17.3.0
69
+ """
70
+
71
+
72
+ class PythonTooOldError(RuntimeError):
73
+ """
74
+ It was attempted to use an *attrs* feature that requires a newer Python
75
+ version.
76
+
77
+ .. versionadded:: 18.2.0
78
+ """
79
+
80
+
81
+ class NotCallableError(TypeError):
82
+ """
83
+ A field requiring a callable has been set with a value that is not
84
+ callable.
85
+
86
+ .. versionadded:: 19.2.0
87
+ """
88
+
89
+ def __init__(self, msg, value):
90
+ super(TypeError, self).__init__(msg, value)
91
+ self.msg = msg
92
+ self.value = value
93
+
94
+ def __str__(self):
95
+ return str(self.msg)
env-llmeval/lib/python3.10/site-packages/attr/exceptions.pyi ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any
2
+
3
+ class FrozenError(AttributeError):
4
+ msg: str = ...
5
+
6
+ class FrozenInstanceError(FrozenError): ...
7
+ class FrozenAttributeError(FrozenError): ...
8
+ class AttrsAttributeNotFoundError(ValueError): ...
9
+ class NotAnAttrsClassError(ValueError): ...
10
+ class DefaultAlreadySetError(RuntimeError): ...
11
+ class UnannotatedAttributeError(RuntimeError): ...
12
+ class PythonTooOldError(RuntimeError): ...
13
+
14
+ class NotCallableError(TypeError):
15
+ msg: str = ...
16
+ value: Any = ...
17
+ def __init__(self, msg: str, value: Any) -> None: ...
env-llmeval/lib/python3.10/site-packages/attr/filters.pyi ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from typing import Any, Union
2
+
3
+ from . import Attribute, _FilterType
4
+
5
+ def include(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ...
6
+ def exclude(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ...
env-llmeval/lib/python3.10/site-packages/attr/py.typed ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/attr/setters.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Commonly used hooks for on_setattr.
5
+ """
6
+
7
+
8
+ from . import _config
9
+ from .exceptions import FrozenAttributeError
10
+
11
+
12
+ def pipe(*setters):
13
+ """
14
+ Run all *setters* and return the return value of the last one.
15
+
16
+ .. versionadded:: 20.1.0
17
+ """
18
+
19
+ def wrapped_pipe(instance, attrib, new_value):
20
+ rv = new_value
21
+
22
+ for setter in setters:
23
+ rv = setter(instance, attrib, rv)
24
+
25
+ return rv
26
+
27
+ return wrapped_pipe
28
+
29
+
30
+ def frozen(_, __, ___):
31
+ """
32
+ Prevent an attribute to be modified.
33
+
34
+ .. versionadded:: 20.1.0
35
+ """
36
+ raise FrozenAttributeError()
37
+
38
+
39
+ def validate(instance, attrib, new_value):
40
+ """
41
+ Run *attrib*'s validator on *new_value* if it has one.
42
+
43
+ .. versionadded:: 20.1.0
44
+ """
45
+ if _config._run_validators is False:
46
+ return new_value
47
+
48
+ v = attrib.validator
49
+ if not v:
50
+ return new_value
51
+
52
+ v(instance, attrib, new_value)
53
+
54
+ return new_value
55
+
56
+
57
+ def convert(instance, attrib, new_value):
58
+ """
59
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
60
+ result.
61
+
62
+ .. versionadded:: 20.1.0
63
+ """
64
+ c = attrib.converter
65
+ if c:
66
+ return c(new_value)
67
+
68
+ return new_value
69
+
70
+
71
+ # Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
72
+ # autodata stopped working, so the docstring is inlined in the API docs.
73
+ NO_OP = object()
env-llmeval/lib/python3.10/site-packages/attr/validators.pyi ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ AnyStr,
4
+ Callable,
5
+ Container,
6
+ ContextManager,
7
+ Iterable,
8
+ List,
9
+ Mapping,
10
+ Match,
11
+ Optional,
12
+ Pattern,
13
+ Tuple,
14
+ Type,
15
+ TypeVar,
16
+ Union,
17
+ overload,
18
+ )
19
+
20
+ from . import _ValidatorType
21
+ from . import _ValidatorArgType
22
+
23
+ _T = TypeVar("_T")
24
+ _T1 = TypeVar("_T1")
25
+ _T2 = TypeVar("_T2")
26
+ _T3 = TypeVar("_T3")
27
+ _I = TypeVar("_I", bound=Iterable)
28
+ _K = TypeVar("_K")
29
+ _V = TypeVar("_V")
30
+ _M = TypeVar("_M", bound=Mapping)
31
+
32
+ def set_disabled(run: bool) -> None: ...
33
+ def get_disabled() -> bool: ...
34
+ def disabled() -> ContextManager[None]: ...
35
+
36
+ # To be more precise on instance_of use some overloads.
37
+ # If there are more than 3 items in the tuple then we fall back to Any
38
+ @overload
39
+ def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
40
+ @overload
41
+ def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
42
+ @overload
43
+ def instance_of(
44
+ type: Tuple[Type[_T1], Type[_T2]]
45
+ ) -> _ValidatorType[Union[_T1, _T2]]: ...
46
+ @overload
47
+ def instance_of(
48
+ type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
49
+ ) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
50
+ @overload
51
+ def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
52
+ def provides(interface: Any) -> _ValidatorType[Any]: ...
53
+ def optional(
54
+ validator: Union[
55
+ _ValidatorType[_T], List[_ValidatorType[_T]], Tuple[_ValidatorType[_T]]
56
+ ]
57
+ ) -> _ValidatorType[Optional[_T]]: ...
58
+ def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
59
+ def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
60
+ def matches_re(
61
+ regex: Union[Pattern[AnyStr], AnyStr],
62
+ flags: int = ...,
63
+ func: Optional[
64
+ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
65
+ ] = ...,
66
+ ) -> _ValidatorType[AnyStr]: ...
67
+ def deep_iterable(
68
+ member_validator: _ValidatorArgType[_T],
69
+ iterable_validator: Optional[_ValidatorType[_I]] = ...,
70
+ ) -> _ValidatorType[_I]: ...
71
+ def deep_mapping(
72
+ key_validator: _ValidatorType[_K],
73
+ value_validator: _ValidatorType[_V],
74
+ mapping_validator: Optional[_ValidatorType[_M]] = ...,
75
+ ) -> _ValidatorType[_M]: ...
76
+ def is_callable() -> _ValidatorType[_T]: ...
77
+ def lt(val: _T) -> _ValidatorType[_T]: ...
78
+ def le(val: _T) -> _ValidatorType[_T]: ...
79
+ def ge(val: _T) -> _ValidatorType[_T]: ...
80
+ def gt(val: _T) -> _ValidatorType[_T]: ...
81
+ def max_len(length: int) -> _ValidatorType[_T]: ...
82
+ def min_len(length: int) -> _ValidatorType[_T]: ...
83
+ def not_(
84
+ validator: _ValidatorType[_T],
85
+ *,
86
+ msg: Optional[str] = None,
87
+ exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ...,
88
+ ) -> _ValidatorType[_T]: ...
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__init__.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ruff: noqa: F401
15
+ """Contains helpers to serialize tensors."""
16
+
17
+ from ._base import StateDictSplit, split_state_dict_into_shards_factory
18
+ from ._numpy import split_numpy_state_dict_into_shards
19
+ from ._tensorflow import split_tf_state_dict_into_shards
20
+ from ._torch import split_torch_state_dict_into_shards
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (537 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_base.cpython-310.pyc ADDED
Binary file (4.69 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_numpy.cpython-310.pyc ADDED
Binary file (2.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_tensorflow.cpython-310.pyc ADDED
Binary file (3.09 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/__pycache__/_torch.cpython-310.pyc ADDED
Binary file (6.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_base.py ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains helpers to split tensors into shards."""
15
+
16
+ from dataclasses import dataclass, field
17
+ from typing import Any, Callable, Dict, List, Optional, TypeVar
18
+
19
+ from .. import logging
20
+
21
+
22
+ TensorT = TypeVar("TensorT")
23
+ TensorSizeFn_T = Callable[[TensorT], int]
24
+ StorageIDFn_T = Callable[[TensorT], Optional[Any]]
25
+
26
+ MAX_SHARD_SIZE = 5_000_000_000 # 5GB
27
+ FILENAME_PATTERN = "model{suffix}.safetensors"
28
+
29
+ logger = logging.get_logger(__file__)
30
+
31
+
32
+ @dataclass
33
+ class StateDictSplit:
34
+ is_sharded: bool = field(init=False)
35
+ metadata: Dict[str, Any]
36
+ filename_to_tensors: Dict[str, List[str]]
37
+ tensor_to_filename: Dict[str, str]
38
+
39
+ def __post_init__(self):
40
+ self.is_sharded = len(self.filename_to_tensors) > 1
41
+
42
+
43
+ def split_state_dict_into_shards_factory(
44
+ state_dict: Dict[str, TensorT],
45
+ *,
46
+ get_tensor_size: TensorSizeFn_T,
47
+ get_storage_id: StorageIDFn_T = lambda tensor: None,
48
+ filename_pattern: str = FILENAME_PATTERN,
49
+ max_shard_size: int = MAX_SHARD_SIZE,
50
+ ) -> StateDictSplit:
51
+ """
52
+ Split a model state dictionary in shards so that each shard is smaller than a given size.
53
+
54
+ The shards are determined by iterating through the `state_dict` in the order of its keys. There is no optimization
55
+ made to make each shard as close as possible to the maximum size passed. For example, if the limit is 10GB and we
56
+ have tensors of sizes [6GB, 6GB, 2GB, 6GB, 2GB, 2GB] they will get sharded as [6GB], [6+2GB], [6+2+2GB] and not
57
+ [6+2+2GB], [6+2GB], [6GB].
58
+
59
+ <Tip warning={true}>
60
+
61
+ If one of the model's tensor is bigger than `max_shard_size`, it will end up in its own shard which will have a
62
+ size greater than `max_shard_size`.
63
+
64
+ </Tip>
65
+
66
+ Args:
67
+ state_dict (`Dict[str, Tensor]`):
68
+ The state dictionary to save.
69
+ get_tensor_size (`Callable[[Tensor], int]`):
70
+ A function that returns the size of a tensor in bytes.
71
+ get_storage_id (`Callable[[Tensor], Optional[Any]]`, *optional*):
72
+ A function that returns a unique identifier to a tensor storage. Multiple different tensors can share the
73
+ same underlying storage. This identifier is guaranteed to be unique and constant for this tensor's storage
74
+ during its lifetime. Two tensor storages with non-overlapping lifetimes may have the same id.
75
+ filename_pattern (`str`, *optional*):
76
+ The pattern to generate the files names in which the model will be saved. Pattern must be a string that
77
+ can be formatted with `filename_pattern.format(suffix=...)` and must contain the keyword `suffix`
78
+ Defaults to `"model{suffix}.safetensors"`.
79
+ max_shard_size (`int` or `str`, *optional*):
80
+ The maximum size of each shard, in bytes. Defaults to 5GB.
81
+
82
+ Returns:
83
+ [`StateDictSplit`]: A `StateDictSplit` object containing the shards and the index to retrieve them.
84
+ """
85
+ storage_id_to_tensors: Dict[Any, List[str]] = {}
86
+
87
+ shard_list: List[Dict[str, TensorT]] = []
88
+ current_shard: Dict[str, TensorT] = {}
89
+ current_shard_size = 0
90
+ total_size = 0
91
+
92
+ for key, tensor in state_dict.items():
93
+ # when bnb serialization is used the weights in the state dict can be strings
94
+ # check: https://github.com/huggingface/transformers/pull/24416 for more details
95
+ if isinstance(tensor, str):
96
+ logger.info("Skipping tensor %s as it is a string (bnb serialization)", key)
97
+ continue
98
+
99
+ # If a `tensor` shares the same underlying storage as another tensor, we put `tensor` in the same `block`
100
+ storage_id = get_storage_id(tensor)
101
+ if storage_id is not None:
102
+ if storage_id in storage_id_to_tensors:
103
+ # We skip this tensor for now and will reassign to correct shard later
104
+ storage_id_to_tensors[storage_id].append(key)
105
+ continue
106
+ else:
107
+ # This is the first tensor with this storage_id, we create a new entry
108
+ # in the storage_id_to_tensors dict => we will assign the shard id later
109
+ storage_id_to_tensors[storage_id] = [key]
110
+
111
+ # Compute tensor size
112
+ tensor_size = get_tensor_size(tensor)
113
+
114
+ # If this tensor is bigger than the maximal size, we put it in its own shard
115
+ if tensor_size > max_shard_size:
116
+ total_size += tensor_size
117
+ shard_list.append({key: tensor})
118
+ continue
119
+
120
+ # If this tensor is going to tip up over the maximal size, we split.
121
+ # Current shard already has some tensors, we add it to the list of shards and create a new one.
122
+ if current_shard_size + tensor_size > max_shard_size:
123
+ shard_list.append(current_shard)
124
+ current_shard = {}
125
+ current_shard_size = 0
126
+
127
+ # Add the tensor to the current shard
128
+ current_shard[key] = tensor
129
+ current_shard_size += tensor_size
130
+ total_size += tensor_size
131
+
132
+ # Add the last shard
133
+ if len(current_shard) > 0:
134
+ shard_list.append(current_shard)
135
+ nb_shards = len(shard_list)
136
+
137
+ # Loop over the tensors that share the same storage and assign them together
138
+ for storage_id, keys in storage_id_to_tensors.items():
139
+ # Let's try to find the shard where the first tensor of this storage is and put all tensors in the same shard
140
+ for shard in shard_list:
141
+ if keys[0] in shard:
142
+ for key in keys:
143
+ shard[key] = state_dict[key]
144
+ break
145
+
146
+ # If we only have one shard, we return it => no need to build the index
147
+ if nb_shards == 1:
148
+ filename = filename_pattern.format(suffix="")
149
+ return StateDictSplit(
150
+ metadata={"total_size": total_size},
151
+ filename_to_tensors={filename: list(state_dict.keys())},
152
+ tensor_to_filename={key: filename for key in state_dict.keys()},
153
+ )
154
+
155
+ # Now that each tensor is assigned to a shard, let's assign a filename to each shard
156
+ tensor_name_to_filename = {}
157
+ filename_to_tensors = {}
158
+ for idx, shard in enumerate(shard_list):
159
+ filename = filename_pattern.format(suffix=f"-{idx+1:05d}-of-{nb_shards:05d}")
160
+ for key in shard:
161
+ tensor_name_to_filename[key] = filename
162
+ filename_to_tensors[filename] = list(shard.keys())
163
+
164
+ # Build the index and return
165
+ return StateDictSplit(
166
+ metadata={"total_size": total_size},
167
+ filename_to_tensors=filename_to_tensors,
168
+ tensor_to_filename=tensor_name_to_filename,
169
+ )
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_numpy.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains numpy-specific helpers."""
15
+
16
+ from typing import TYPE_CHECKING, Dict
17
+
18
+ from ._base import FILENAME_PATTERN, MAX_SHARD_SIZE, StateDictSplit, split_state_dict_into_shards_factory
19
+
20
+
21
+ if TYPE_CHECKING:
22
+ import numpy as np
23
+
24
+
25
+ def split_numpy_state_dict_into_shards(
26
+ state_dict: Dict[str, "np.ndarray"],
27
+ *,
28
+ filename_pattern: str = FILENAME_PATTERN,
29
+ max_shard_size: int = MAX_SHARD_SIZE,
30
+ ) -> StateDictSplit:
31
+ """
32
+ Split a model state dictionary in shards so that each shard is smaller than a given size.
33
+
34
+ The shards are determined by iterating through the `state_dict` in the order of its keys. There is no optimization
35
+ made to make each shard as close as possible to the maximum size passed. For example, if the limit is 10GB and we
36
+ have tensors of sizes [6GB, 6GB, 2GB, 6GB, 2GB, 2GB] they will get sharded as [6GB], [6+2GB], [6+2+2GB] and not
37
+ [6+2+2GB], [6+2GB], [6GB].
38
+
39
+ <Tip warning={true}>
40
+
41
+ If one of the model's tensor is bigger than `max_shard_size`, it will end up in its own shard which will have a
42
+ size greater than `max_shard_size`.
43
+
44
+ </Tip>
45
+
46
+ Args:
47
+ state_dict (`Dict[str, np.ndarray]`):
48
+ The state dictionary to save.
49
+ filename_pattern (`str`, *optional*):
50
+ The pattern to generate the files names in which the model will be saved. Pattern must be a string that
51
+ can be formatted with `filename_pattern.format(suffix=...)` and must contain the keyword `suffix`
52
+ Defaults to `"model{suffix}.safetensors"`.
53
+ max_shard_size (`int` or `str`, *optional*):
54
+ The maximum size of each shard, in bytes. Defaults to 5GB.
55
+
56
+ Returns:
57
+ [`StateDictSplit`]: A `StateDictSplit` object containing the shards and the index to retrieve them.
58
+ """
59
+ return split_state_dict_into_shards_factory(
60
+ state_dict,
61
+ max_shard_size=max_shard_size,
62
+ filename_pattern=filename_pattern,
63
+ get_tensor_size=get_tensor_size,
64
+ )
65
+
66
+
67
+ def get_tensor_size(tensor: "np.ndarray") -> int:
68
+ return tensor.nbytes
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_tensorflow.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains tensorflow-specific helpers."""
15
+
16
+ import math
17
+ import re
18
+ from typing import TYPE_CHECKING, Dict
19
+
20
+ from ._base import MAX_SHARD_SIZE, StateDictSplit, split_state_dict_into_shards_factory
21
+
22
+
23
+ if TYPE_CHECKING:
24
+ import tensorflow as tf
25
+
26
+
27
+ def split_tf_state_dict_into_shards(
28
+ state_dict: Dict[str, "tf.Tensor"],
29
+ *,
30
+ filename_pattern: str = "tf_model{suffix}.h5",
31
+ max_shard_size: int = MAX_SHARD_SIZE,
32
+ ) -> StateDictSplit:
33
+ """
34
+ Split a model state dictionary in shards so that each shard is smaller than a given size.
35
+
36
+ The shards are determined by iterating through the `state_dict` in the order of its keys. There is no optimization
37
+ made to make each shard as close as possible to the maximum size passed. For example, if the limit is 10GB and we
38
+ have tensors of sizes [6GB, 6GB, 2GB, 6GB, 2GB, 2GB] they will get sharded as [6GB], [6+2GB], [6+2+2GB] and not
39
+ [6+2+2GB], [6+2GB], [6GB].
40
+
41
+ <Tip warning={true}>
42
+
43
+ If one of the model's tensor is bigger than `max_shard_size`, it will end up in its own shard which will have a
44
+ size greater than `max_shard_size`.
45
+
46
+ </Tip>
47
+
48
+ Args:
49
+ state_dict (`Dict[str, Tensor]`):
50
+ The state dictionary to save.
51
+ filename_pattern (`str`, *optional*):
52
+ The pattern to generate the files names in which the model will be saved. Pattern must be a string that
53
+ can be formatted with `filename_pattern.format(suffix=...)` and must contain the keyword `suffix`
54
+ Defaults to `"tf_model{suffix}.h5"`.
55
+ max_shard_size (`int` or `str`, *optional*):
56
+ The maximum size of each shard, in bytes. Defaults to 5GB.
57
+
58
+ Returns:
59
+ [`StateDictSplit`]: A `StateDictSplit` object containing the shards and the index to retrieve them.
60
+ """
61
+ return split_state_dict_into_shards_factory(
62
+ state_dict,
63
+ max_shard_size=max_shard_size,
64
+ filename_pattern=filename_pattern,
65
+ get_tensor_size=get_tensor_size,
66
+ )
67
+
68
+
69
+ def get_tensor_size(tensor: "tf.Tensor") -> int:
70
+ # Return `math.ceil` since dtype byte size can be a float (e.g., 0.125 for tf.bool).
71
+ # Better to overestimate than underestimate.
72
+ return math.ceil(tensor.numpy().size * _dtype_byte_size_tf(tensor.dtype))
73
+
74
+
75
+ def _dtype_byte_size_tf(dtype) -> float:
76
+ """
77
+ Returns the size (in bytes) occupied by one parameter of type `dtype`.
78
+ Taken from https://github.com/huggingface/transformers/blob/74d9d0cebb0263a3f8ab9c280569170cc74651d0/src/transformers/modeling_tf_utils.py#L608.
79
+ NOTE: why not `tensor.numpy().nbytes`?
80
+ Example:
81
+ ```py
82
+ >>> _dtype_byte_size(tf.float32)
83
+ 4
84
+ ```
85
+ """
86
+ import tensorflow as tf
87
+
88
+ if dtype == tf.bool:
89
+ return 1 / 8
90
+ bit_search = re.search(r"[^\d](\d+)$", dtype.name)
91
+ if bit_search is None:
92
+ raise ValueError(f"`dtype` is not a valid dtype: {dtype}.")
93
+ bit_size = int(bit_search.groups()[0])
94
+ return bit_size // 8
env-llmeval/lib/python3.10/site-packages/huggingface_hub/serialization/_torch.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains pytorch-specific helpers."""
15
+
16
+ import importlib
17
+ from functools import lru_cache
18
+ from typing import TYPE_CHECKING, Dict, Tuple
19
+
20
+ from ._base import FILENAME_PATTERN, MAX_SHARD_SIZE, StateDictSplit, split_state_dict_into_shards_factory
21
+
22
+
23
+ if TYPE_CHECKING:
24
+ import torch
25
+
26
+
27
+ def split_torch_state_dict_into_shards(
28
+ state_dict: Dict[str, "torch.Tensor"],
29
+ *,
30
+ filename_pattern: str = FILENAME_PATTERN,
31
+ max_shard_size: int = MAX_SHARD_SIZE,
32
+ ) -> StateDictSplit:
33
+ """
34
+ Split a model state dictionary in shards so that each shard is smaller than a given size.
35
+
36
+ The shards are determined by iterating through the `state_dict` in the order of its keys. There is no optimization
37
+ made to make each shard as close as possible to the maximum size passed. For example, if the limit is 10GB and we
38
+ have tensors of sizes [6GB, 6GB, 2GB, 6GB, 2GB, 2GB] they will get sharded as [6GB], [6+2GB], [6+2+2GB] and not
39
+ [6+2+2GB], [6+2GB], [6GB].
40
+
41
+ <Tip warning={true}>
42
+
43
+ If one of the model's tensor is bigger than `max_shard_size`, it will end up in its own shard which will have a
44
+ size greater than `max_shard_size`.
45
+
46
+ </Tip>
47
+
48
+ Args:
49
+ state_dict (`Dict[str, torch.Tensor]`):
50
+ The state dictionary to save.
51
+ filename_pattern (`str`, *optional*):
52
+ The pattern to generate the files names in which the model will be saved. Pattern must be a string that
53
+ can be formatted with `filename_pattern.format(suffix=...)` and must contain the keyword `suffix`
54
+ Defaults to `"model{suffix}.safetensors"`.
55
+ max_shard_size (`int` or `str`, *optional*):
56
+ The maximum size of each shard, in bytes. Defaults to 5GB.
57
+
58
+ Returns:
59
+ [`StateDictSplit`]: A `StateDictSplit` object containing the shards and the index to retrieve them.
60
+
61
+ Example:
62
+ ```py
63
+ >>> import json
64
+ >>> import os
65
+ >>> from safetensors.torch import save_file as safe_save_file
66
+ >>> from huggingface_hub import split_torch_state_dict_into_shards
67
+
68
+ >>> def save_state_dict(state_dict: Dict[str, torch.Tensor], save_directory: str):
69
+ ... state_dict_split = split_torch_state_dict_into_shards(state_dict)
70
+ ... for filename, tensors in state_dict_split.filename_to_tensors.values():
71
+ ... shard = {tensor: state_dict[tensor] for tensor in tensors}
72
+ ... safe_save_file(
73
+ ... shard,
74
+ ... os.path.join(save_directory, filename),
75
+ ... metadata={"format": "pt"},
76
+ ... )
77
+ ... if state_dict_split.is_sharded:
78
+ ... index = {
79
+ ... "metadata": state_dict_split.metadata,
80
+ ... "weight_map": state_dict_split.tensor_to_filename,
81
+ ... }
82
+ ... with open(os.path.join(save_directory, "model.safetensors.index.json"), "w") as f:
83
+ ... f.write(json.dumps(index, indent=2))
84
+ ```
85
+ """
86
+ return split_state_dict_into_shards_factory(
87
+ state_dict,
88
+ max_shard_size=max_shard_size,
89
+ filename_pattern=filename_pattern,
90
+ get_tensor_size=get_tensor_size,
91
+ get_storage_id=get_storage_id,
92
+ )
93
+
94
+
95
+ def get_storage_id(tensor: "torch.Tensor") -> Tuple["torch.device", int, int]:
96
+ """
97
+ Return unique identifier to a tensor storage.
98
+
99
+ Multiple different tensors can share the same underlying storage. For
100
+ example, "meta" tensors all share the same storage, and thus their identifier will all be equal. This identifier is
101
+ guaranteed to be unique and constant for this tensor's storage during its lifetime. Two tensor storages with
102
+ non-overlapping lifetimes may have the same id.
103
+
104
+ Taken from https://github.com/huggingface/transformers/blob/1ecf5f7c982d761b4daaa96719d162c324187c64/src/transformers/pytorch_utils.py#L278.
105
+ """
106
+ if tensor.device.type == "xla" and is_torch_tpu_available():
107
+ # NOTE: xla tensors dont have storage
108
+ # use some other unique id to distinguish.
109
+ # this is a XLA tensor, it must be created using torch_xla's
110
+ # device. So the following import is safe:
111
+ import torch_xla
112
+
113
+ unique_id = torch_xla._XLAC._xla_get_tensor_id(tensor)
114
+ else:
115
+ unique_id = storage_ptr(tensor)
116
+
117
+ return tensor.device, unique_id, get_storage_size(tensor)
118
+
119
+
120
+ def get_tensor_size(tensor: "torch.Tensor") -> int:
121
+ return tensor.numel() * tensor.element_size()
122
+
123
+
124
+ @lru_cache()
125
+ def is_torch_tpu_available(check_device=True):
126
+ """
127
+ Checks if `torch_xla` is installed and potentially if a TPU is in the environment
128
+
129
+ Taken from https://github.com/huggingface/transformers/blob/1ecf5f7c982d761b4daaa96719d162c324187c64/src/transformers/utils/import_utils.py#L463.
130
+ """
131
+ if importlib.util.find_spec("torch_xla") is not None:
132
+ if check_device:
133
+ # We need to check if `xla_device` can be found, will raise a RuntimeError if not
134
+ try:
135
+ import torch_xla.core.xla_model as xm
136
+
137
+ _ = xm.xla_device()
138
+ return True
139
+ except RuntimeError:
140
+ return False
141
+ return True
142
+ return False
143
+
144
+
145
+ def storage_ptr(tensor: "torch.Tensor") -> int:
146
+ """
147
+ Taken from https://github.com/huggingface/safetensors/blob/08db34094e9e59e2f9218f2df133b7b4aaff5a99/bindings/python/py_src/safetensors/torch.py#L11C1-L20C21.
148
+ """
149
+ try:
150
+ return tensor.untyped_storage().data_ptr()
151
+ except Exception:
152
+ # Fallback for torch==1.10
153
+ try:
154
+ return tensor.storage().data_ptr()
155
+ except NotImplementedError:
156
+ # Fallback for meta storage
157
+ return 0
158
+
159
+
160
+ def get_storage_size(tensor: "torch.Tensor") -> int:
161
+ """
162
+ Taken from https://github.com/huggingface/safetensors/blob/08db34094e9e59e2f9218f2df133b7b4aaff5a99/bindings/python/py_src/safetensors/torch.py#L31C1-L41C59
163
+ """
164
+ try:
165
+ return tensor.untyped_storage().nbytes()
166
+ except AttributeError:
167
+ # Fallback for torch==1.10
168
+ try:
169
+ return tensor.storage().size() * _get_dtype_size(tensor.dtype)
170
+ except NotImplementedError:
171
+ # Fallback for meta storage
172
+ # On torch >=2.0 this is the tensor size
173
+ return tensor.nelement() * _get_dtype_size(tensor.dtype)
174
+
175
+
176
+ @lru_cache()
177
+ def _get_dtype_size(dtype: "torch.dtype") -> int:
178
+ """
179
+ Taken from https://github.com/huggingface/safetensors/blob/08db34094e9e59e2f9218f2df133b7b4aaff5a99/bindings/python/py_src/safetensors/torch.py#L344
180
+ """
181
+ import torch
182
+
183
+ # torch.float8 formats require 2.1; we do not support these dtypes on earlier versions
184
+ _float8_e4m3fn = getattr(torch, "float8_e4m3fn", None)
185
+ _float8_e5m2 = getattr(torch, "float8_e5m2", None)
186
+ _SIZE = {
187
+ torch.int64: 8,
188
+ torch.float32: 4,
189
+ torch.int32: 4,
190
+ torch.bfloat16: 2,
191
+ torch.float16: 2,
192
+ torch.int16: 2,
193
+ torch.uint8: 1,
194
+ torch.int8: 1,
195
+ torch.bool: 1,
196
+ torch.float64: 8,
197
+ _float8_e4m3fn: 1,
198
+ _float8_e5m2: 1,
199
+ }
200
+ return _SIZE[dtype]
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_cache_manager.cpython-310.pyc ADDED
Binary file (25 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_experimental.cpython-310.pyc ADDED
Binary file (1.93 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_hf_folder.cpython-310.pyc ADDED
Binary file (2.72 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_paths.cpython-310.pyc ADDED
Binary file (3.87 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_runtime.cpython-310.pyc ADDED
Binary file (9.31 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_safetensors.cpython-310.pyc ADDED
Binary file (5.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-310.pyc ADDED
Binary file (3.93 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-310.pyc ADDED
Binary file (4.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-310.pyc ADDED
Binary file (1.72 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/sha.cpython-310.pyc ADDED
Binary file (1.09 kB). View file
 
env-llmeval/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/tqdm.cpython-310.pyc ADDED
Binary file (5.61 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__about__.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ __all__ = [
6
+ "__title__",
7
+ "__summary__",
8
+ "__uri__",
9
+ "__version__",
10
+ "__author__",
11
+ "__email__",
12
+ "__license__",
13
+ "__copyright__",
14
+ ]
15
+
16
+ __title__ = "packaging"
17
+ __summary__ = "Core utilities for Python packages"
18
+ __uri__ = "https://github.com/pypa/packaging"
19
+
20
+ __version__ = "21.2"
21
+
22
+ __author__ = "Donald Stufft and individual contributors"
23
+ __email__ = "[email protected]"
24
+
25
+ __license__ = "BSD-2-Clause or Apache-2.0"
26
+ __copyright__ = "2014-2019 %s" % __author__
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__init__.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ from .__about__ import (
6
+ __author__,
7
+ __copyright__,
8
+ __email__,
9
+ __license__,
10
+ __summary__,
11
+ __title__,
12
+ __uri__,
13
+ __version__,
14
+ )
15
+
16
+ __all__ = [
17
+ "__title__",
18
+ "__summary__",
19
+ "__uri__",
20
+ "__version__",
21
+ "__author__",
22
+ "__email__",
23
+ "__license__",
24
+ "__copyright__",
25
+ ]
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-310.pyc ADDED
Binary file (2.97 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-310.pyc ADDED
Binary file (3.99 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc ADDED
Binary file (22.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/_manylinux.py ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import functools
3
+ import os
4
+ import re
5
+ import struct
6
+ import sys
7
+ import warnings
8
+ from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
9
+
10
+
11
+ # Python does not provide platform information at sufficient granularity to
12
+ # identify the architecture of the running executable in some cases, so we
13
+ # determine it dynamically by reading the information from the running
14
+ # process. This only applies on Linux, which uses the ELF format.
15
+ class _ELFFileHeader:
16
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
17
+ class _InvalidELFFileHeader(ValueError):
18
+ """
19
+ An invalid ELF file header was found.
20
+ """
21
+
22
+ ELF_MAGIC_NUMBER = 0x7F454C46
23
+ ELFCLASS32 = 1
24
+ ELFCLASS64 = 2
25
+ ELFDATA2LSB = 1
26
+ ELFDATA2MSB = 2
27
+ EM_386 = 3
28
+ EM_S390 = 22
29
+ EM_ARM = 40
30
+ EM_X86_64 = 62
31
+ EF_ARM_ABIMASK = 0xFF000000
32
+ EF_ARM_ABI_VER5 = 0x05000000
33
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
34
+
35
+ def __init__(self, file: IO[bytes]) -> None:
36
+ def unpack(fmt: str) -> int:
37
+ try:
38
+ data = file.read(struct.calcsize(fmt))
39
+ result: Tuple[int, ...] = struct.unpack(fmt, data)
40
+ except struct.error:
41
+ raise _ELFFileHeader._InvalidELFFileHeader()
42
+ return result[0]
43
+
44
+ self.e_ident_magic = unpack(">I")
45
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
46
+ raise _ELFFileHeader._InvalidELFFileHeader()
47
+ self.e_ident_class = unpack("B")
48
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
49
+ raise _ELFFileHeader._InvalidELFFileHeader()
50
+ self.e_ident_data = unpack("B")
51
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
52
+ raise _ELFFileHeader._InvalidELFFileHeader()
53
+ self.e_ident_version = unpack("B")
54
+ self.e_ident_osabi = unpack("B")
55
+ self.e_ident_abiversion = unpack("B")
56
+ self.e_ident_pad = file.read(7)
57
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
58
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
59
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
60
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
61
+ self.e_type = unpack(format_h)
62
+ self.e_machine = unpack(format_h)
63
+ self.e_version = unpack(format_i)
64
+ self.e_entry = unpack(format_p)
65
+ self.e_phoff = unpack(format_p)
66
+ self.e_shoff = unpack(format_p)
67
+ self.e_flags = unpack(format_i)
68
+ self.e_ehsize = unpack(format_h)
69
+ self.e_phentsize = unpack(format_h)
70
+ self.e_phnum = unpack(format_h)
71
+ self.e_shentsize = unpack(format_h)
72
+ self.e_shnum = unpack(format_h)
73
+ self.e_shstrndx = unpack(format_h)
74
+
75
+
76
+ def _get_elf_header() -> Optional[_ELFFileHeader]:
77
+ try:
78
+ with open(sys.executable, "rb") as f:
79
+ elf_header = _ELFFileHeader(f)
80
+ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
81
+ return None
82
+ return elf_header
83
+
84
+
85
+ def _is_linux_armhf() -> bool:
86
+ # hard-float ABI can be detected from the ELF header of the running
87
+ # process
88
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
89
+ elf_header = _get_elf_header()
90
+ if elf_header is None:
91
+ return False
92
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
93
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
94
+ result &= elf_header.e_machine == elf_header.EM_ARM
95
+ result &= (
96
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
97
+ ) == elf_header.EF_ARM_ABI_VER5
98
+ result &= (
99
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
100
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
101
+ return result
102
+
103
+
104
+ def _is_linux_i686() -> bool:
105
+ elf_header = _get_elf_header()
106
+ if elf_header is None:
107
+ return False
108
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
109
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
110
+ result &= elf_header.e_machine == elf_header.EM_386
111
+ return result
112
+
113
+
114
+ def _have_compatible_abi(arch: str) -> bool:
115
+ if arch == "armv7l":
116
+ return _is_linux_armhf()
117
+ if arch == "i686":
118
+ return _is_linux_i686()
119
+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
120
+
121
+
122
+ # If glibc ever changes its major version, we need to know what the last
123
+ # minor version was, so we can build the complete list of all versions.
124
+ # For now, guess what the highest minor version might be, assume it will
125
+ # be 50 for testing. Once this actually happens, update the dictionary
126
+ # with the actual value.
127
+ _LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
128
+
129
+
130
+ class _GLibCVersion(NamedTuple):
131
+ major: int
132
+ minor: int
133
+
134
+
135
+ def _glibc_version_string_confstr() -> Optional[str]:
136
+ """
137
+ Primary implementation of glibc_version_string using os.confstr.
138
+ """
139
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
140
+ # to be broken or missing. This strategy is used in the standard library
141
+ # platform module.
142
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
143
+ try:
144
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
145
+ version_string = os.confstr("CS_GNU_LIBC_VERSION")
146
+ assert version_string is not None
147
+ _, version = version_string.split()
148
+ except (AssertionError, AttributeError, OSError, ValueError):
149
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
150
+ return None
151
+ return version
152
+
153
+
154
+ def _glibc_version_string_ctypes() -> Optional[str]:
155
+ """
156
+ Fallback implementation of glibc_version_string using ctypes.
157
+ """
158
+ try:
159
+ import ctypes
160
+ except ImportError:
161
+ return None
162
+
163
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
164
+ # manpage says, "If filename is NULL, then the returned handle is for the
165
+ # main program". This way we can let the linker do the work to figure out
166
+ # which libc our process is actually using.
167
+ #
168
+ # We must also handle the special case where the executable is not a
169
+ # dynamically linked executable. This can occur when using musl libc,
170
+ # for example. In this situation, dlopen() will error, leading to an
171
+ # OSError. Interestingly, at least in the case of musl, there is no
172
+ # errno set on the OSError. The single string argument used to construct
173
+ # OSError comes from libc itself and is therefore not portable to
174
+ # hard code here. In any case, failure to call dlopen() means we
175
+ # can proceed, so we bail on our attempt.
176
+ try:
177
+ process_namespace = ctypes.CDLL(None)
178
+ except OSError:
179
+ return None
180
+
181
+ try:
182
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
183
+ except AttributeError:
184
+ # Symbol doesn't exist -> therefore, we are not linked to
185
+ # glibc.
186
+ return None
187
+
188
+ # Call gnu_get_libc_version, which returns a string like "2.5"
189
+ gnu_get_libc_version.restype = ctypes.c_char_p
190
+ version_str: str = gnu_get_libc_version()
191
+ # py2 / py3 compatibility:
192
+ if not isinstance(version_str, str):
193
+ version_str = version_str.decode("ascii")
194
+
195
+ return version_str
196
+
197
+
198
+ def _glibc_version_string() -> Optional[str]:
199
+ """Returns glibc version string, or None if not using glibc."""
200
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
201
+
202
+
203
+ def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
204
+ """Parse glibc version.
205
+
206
+ We use a regexp instead of str.split because we want to discard any
207
+ random junk that might come after the minor version -- this might happen
208
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
209
+ uses version strings like "2.20-2014.11"). See gh-3588.
210
+ """
211
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
212
+ if not m:
213
+ warnings.warn(
214
+ "Expected glibc version with 2 components major.minor,"
215
+ " got: %s" % version_str,
216
+ RuntimeWarning,
217
+ )
218
+ return -1, -1
219
+ return int(m.group("major")), int(m.group("minor"))
220
+
221
+
222
+ @functools.lru_cache()
223
+ def _get_glibc_version() -> Tuple[int, int]:
224
+ version_str = _glibc_version_string()
225
+ if version_str is None:
226
+ return (-1, -1)
227
+ return _parse_glibc_version(version_str)
228
+
229
+
230
+ # From PEP 513, PEP 600
231
+ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
232
+ sys_glibc = _get_glibc_version()
233
+ if sys_glibc < version:
234
+ return False
235
+ # Check for presence of _manylinux module.
236
+ try:
237
+ import _manylinux # noqa
238
+ except ImportError:
239
+ return True
240
+ if hasattr(_manylinux, "manylinux_compatible"):
241
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
242
+ if result is not None:
243
+ return bool(result)
244
+ return True
245
+ if version == _GLibCVersion(2, 5):
246
+ if hasattr(_manylinux, "manylinux1_compatible"):
247
+ return bool(_manylinux.manylinux1_compatible)
248
+ if version == _GLibCVersion(2, 12):
249
+ if hasattr(_manylinux, "manylinux2010_compatible"):
250
+ return bool(_manylinux.manylinux2010_compatible)
251
+ if version == _GLibCVersion(2, 17):
252
+ if hasattr(_manylinux, "manylinux2014_compatible"):
253
+ return bool(_manylinux.manylinux2014_compatible)
254
+ return True
255
+
256
+
257
+ _LEGACY_MANYLINUX_MAP = {
258
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
259
+ (2, 17): "manylinux2014",
260
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
261
+ (2, 12): "manylinux2010",
262
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
263
+ (2, 5): "manylinux1",
264
+ }
265
+
266
+
267
+ def platform_tags(linux: str, arch: str) -> Iterator[str]:
268
+ if not _have_compatible_abi(arch):
269
+ return
270
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
271
+ too_old_glibc2 = _GLibCVersion(2, 16)
272
+ if arch in {"x86_64", "i686"}:
273
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
274
+ too_old_glibc2 = _GLibCVersion(2, 4)
275
+ current_glibc = _GLibCVersion(*_get_glibc_version())
276
+ glibc_max_list = [current_glibc]
277
+ # We can assume compatibility across glibc major versions.
278
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
279
+ #
280
+ # Build a list of maximum glibc versions so that we can
281
+ # output the canonical list of all glibc from current_glibc
282
+ # down to too_old_glibc2, including all intermediary versions.
283
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
284
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
285
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
286
+ for glibc_max in glibc_max_list:
287
+ if glibc_max.major == too_old_glibc2.major:
288
+ min_minor = too_old_glibc2.minor
289
+ else:
290
+ # For other glibc major versions oldest supported is (x, 0).
291
+ min_minor = -1
292
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
293
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
294
+ tag = "manylinux_{}_{}".format(*glibc_version)
295
+ if _is_compatible(tag, arch, glibc_version):
296
+ yield linux.replace("linux", tag)
297
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
298
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
299
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
300
+ if _is_compatible(legacy_tag, arch, glibc_version):
301
+ yield linux.replace("linux", legacy_tag)
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/_structures.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+
6
+ class InfinityType:
7
+ def __repr__(self) -> str:
8
+ return "Infinity"
9
+
10
+ def __hash__(self) -> int:
11
+ return hash(repr(self))
12
+
13
+ def __lt__(self, other: object) -> bool:
14
+ return False
15
+
16
+ def __le__(self, other: object) -> bool:
17
+ return False
18
+
19
+ def __eq__(self, other: object) -> bool:
20
+ return isinstance(other, self.__class__)
21
+
22
+ def __ne__(self, other: object) -> bool:
23
+ return not isinstance(other, self.__class__)
24
+
25
+ def __gt__(self, other: object) -> bool:
26
+ return True
27
+
28
+ def __ge__(self, other: object) -> bool:
29
+ return True
30
+
31
+ def __neg__(self: object) -> "NegativeInfinityType":
32
+ return NegativeInfinity
33
+
34
+
35
+ Infinity = InfinityType()
36
+
37
+
38
+ class NegativeInfinityType:
39
+ def __repr__(self) -> str:
40
+ return "-Infinity"
41
+
42
+ def __hash__(self) -> int:
43
+ return hash(repr(self))
44
+
45
+ def __lt__(self, other: object) -> bool:
46
+ return True
47
+
48
+ def __le__(self, other: object) -> bool:
49
+ return True
50
+
51
+ def __eq__(self, other: object) -> bool:
52
+ return isinstance(other, self.__class__)
53
+
54
+ def __ne__(self, other: object) -> bool:
55
+ return not isinstance(other, self.__class__)
56
+
57
+ def __gt__(self, other: object) -> bool:
58
+ return False
59
+
60
+ def __ge__(self, other: object) -> bool:
61
+ return False
62
+
63
+ def __neg__(self: object) -> InfinityType:
64
+ return Infinity
65
+
66
+
67
+ NegativeInfinity = NegativeInfinityType()
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/markers.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import operator
6
+ import os
7
+ import platform
8
+ import sys
9
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
10
+
11
+ from pkg_resources.extern.pyparsing import ( # noqa: N817
12
+ Forward,
13
+ Group,
14
+ Literal as L,
15
+ ParseException,
16
+ ParseResults,
17
+ QuotedString,
18
+ ZeroOrMore,
19
+ stringEnd,
20
+ stringStart,
21
+ )
22
+
23
+ from .specifiers import InvalidSpecifier, Specifier
24
+
25
+ __all__ = [
26
+ "InvalidMarker",
27
+ "UndefinedComparison",
28
+ "UndefinedEnvironmentName",
29
+ "Marker",
30
+ "default_environment",
31
+ ]
32
+
33
+ Operator = Callable[[str, str], bool]
34
+
35
+
36
+ class InvalidMarker(ValueError):
37
+ """
38
+ An invalid marker was found, users should refer to PEP 508.
39
+ """
40
+
41
+
42
+ class UndefinedComparison(ValueError):
43
+ """
44
+ An invalid operation was attempted on a value that doesn't support it.
45
+ """
46
+
47
+
48
+ class UndefinedEnvironmentName(ValueError):
49
+ """
50
+ A name was attempted to be used that does not exist inside of the
51
+ environment.
52
+ """
53
+
54
+
55
+ class Node:
56
+ def __init__(self, value: Any) -> None:
57
+ self.value = value
58
+
59
+ def __str__(self) -> str:
60
+ return str(self.value)
61
+
62
+ def __repr__(self) -> str:
63
+ return f"<{self.__class__.__name__}('{self}')>"
64
+
65
+ def serialize(self) -> str:
66
+ raise NotImplementedError
67
+
68
+
69
+ class Variable(Node):
70
+ def serialize(self) -> str:
71
+ return str(self)
72
+
73
+
74
+ class Value(Node):
75
+ def serialize(self) -> str:
76
+ return f'"{self}"'
77
+
78
+
79
+ class Op(Node):
80
+ def serialize(self) -> str:
81
+ return str(self)
82
+
83
+
84
+ VARIABLE = (
85
+ L("implementation_version")
86
+ | L("platform_python_implementation")
87
+ | L("implementation_name")
88
+ | L("python_full_version")
89
+ | L("platform_release")
90
+ | L("platform_version")
91
+ | L("platform_machine")
92
+ | L("platform_system")
93
+ | L("python_version")
94
+ | L("sys_platform")
95
+ | L("os_name")
96
+ | L("os.name") # PEP-345
97
+ | L("sys.platform") # PEP-345
98
+ | L("platform.version") # PEP-345
99
+ | L("platform.machine") # PEP-345
100
+ | L("platform.python_implementation") # PEP-345
101
+ | L("python_implementation") # undocumented setuptools legacy
102
+ | L("extra") # PEP-508
103
+ )
104
+ ALIASES = {
105
+ "os.name": "os_name",
106
+ "sys.platform": "sys_platform",
107
+ "platform.version": "platform_version",
108
+ "platform.machine": "platform_machine",
109
+ "platform.python_implementation": "platform_python_implementation",
110
+ "python_implementation": "platform_python_implementation",
111
+ }
112
+ VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
113
+
114
+ VERSION_CMP = (
115
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
116
+ )
117
+
118
+ MARKER_OP = VERSION_CMP | L("not in") | L("in")
119
+ MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
120
+
121
+ MARKER_VALUE = QuotedString("'") | QuotedString('"')
122
+ MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
123
+
124
+ BOOLOP = L("and") | L("or")
125
+
126
+ MARKER_VAR = VARIABLE | MARKER_VALUE
127
+
128
+ MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
129
+ MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
130
+
131
+ LPAREN = L("(").suppress()
132
+ RPAREN = L(")").suppress()
133
+
134
+ MARKER_EXPR = Forward()
135
+ MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
136
+ MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
137
+
138
+ MARKER = stringStart + MARKER_EXPR + stringEnd
139
+
140
+
141
+ def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
142
+ if isinstance(results, ParseResults):
143
+ return [_coerce_parse_result(i) for i in results]
144
+ else:
145
+ return results
146
+
147
+
148
+ def _format_marker(
149
+ marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
150
+ ) -> str:
151
+
152
+ assert isinstance(marker, (list, tuple, str))
153
+
154
+ # Sometimes we have a structure like [[...]] which is a single item list
155
+ # where the single item is itself it's own list. In that case we want skip
156
+ # the rest of this function so that we don't get extraneous () on the
157
+ # outside.
158
+ if (
159
+ isinstance(marker, list)
160
+ and len(marker) == 1
161
+ and isinstance(marker[0], (list, tuple))
162
+ ):
163
+ return _format_marker(marker[0])
164
+
165
+ if isinstance(marker, list):
166
+ inner = (_format_marker(m, first=False) for m in marker)
167
+ if first:
168
+ return " ".join(inner)
169
+ else:
170
+ return "(" + " ".join(inner) + ")"
171
+ elif isinstance(marker, tuple):
172
+ return " ".join([m.serialize() for m in marker])
173
+ else:
174
+ return marker
175
+
176
+
177
+ _operators: Dict[str, Operator] = {
178
+ "in": lambda lhs, rhs: lhs in rhs,
179
+ "not in": lambda lhs, rhs: lhs not in rhs,
180
+ "<": operator.lt,
181
+ "<=": operator.le,
182
+ "==": operator.eq,
183
+ "!=": operator.ne,
184
+ ">=": operator.ge,
185
+ ">": operator.gt,
186
+ }
187
+
188
+
189
+ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
190
+ try:
191
+ spec = Specifier("".join([op.serialize(), rhs]))
192
+ except InvalidSpecifier:
193
+ pass
194
+ else:
195
+ return spec.contains(lhs)
196
+
197
+ oper: Optional[Operator] = _operators.get(op.serialize())
198
+ if oper is None:
199
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
200
+
201
+ return oper(lhs, rhs)
202
+
203
+
204
+ class Undefined:
205
+ pass
206
+
207
+
208
+ _undefined = Undefined()
209
+
210
+
211
+ def _get_env(environment: Dict[str, str], name: str) -> str:
212
+ value: Union[str, Undefined] = environment.get(name, _undefined)
213
+
214
+ if isinstance(value, Undefined):
215
+ raise UndefinedEnvironmentName(
216
+ f"{name!r} does not exist in evaluation environment."
217
+ )
218
+
219
+ return value
220
+
221
+
222
+ def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
223
+ groups: List[List[bool]] = [[]]
224
+
225
+ for marker in markers:
226
+ assert isinstance(marker, (list, tuple, str))
227
+
228
+ if isinstance(marker, list):
229
+ groups[-1].append(_evaluate_markers(marker, environment))
230
+ elif isinstance(marker, tuple):
231
+ lhs, op, rhs = marker
232
+
233
+ if isinstance(lhs, Variable):
234
+ lhs_value = _get_env(environment, lhs.value)
235
+ rhs_value = rhs.value
236
+ else:
237
+ lhs_value = lhs.value
238
+ rhs_value = _get_env(environment, rhs.value)
239
+
240
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
241
+ else:
242
+ assert marker in ["and", "or"]
243
+ if marker == "or":
244
+ groups.append([])
245
+
246
+ return any(all(item) for item in groups)
247
+
248
+
249
+ def format_full_version(info: "sys._version_info") -> str:
250
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
251
+ kind = info.releaselevel
252
+ if kind != "final":
253
+ version += kind[0] + str(info.serial)
254
+ return version
255
+
256
+
257
+ def default_environment() -> Dict[str, str]:
258
+ iver = format_full_version(sys.implementation.version)
259
+ implementation_name = sys.implementation.name
260
+ return {
261
+ "implementation_name": implementation_name,
262
+ "implementation_version": iver,
263
+ "os_name": os.name,
264
+ "platform_machine": platform.machine(),
265
+ "platform_release": platform.release(),
266
+ "platform_system": platform.system(),
267
+ "platform_version": platform.version(),
268
+ "python_full_version": platform.python_version(),
269
+ "platform_python_implementation": platform.python_implementation(),
270
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
271
+ "sys_platform": sys.platform,
272
+ }
273
+
274
+
275
+ class Marker:
276
+ def __init__(self, marker: str) -> None:
277
+ try:
278
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
279
+ except ParseException as e:
280
+ raise InvalidMarker(
281
+ f"Invalid marker: {marker!r}, parse error at "
282
+ f"{marker[e.loc : e.loc + 8]!r}"
283
+ )
284
+
285
+ def __str__(self) -> str:
286
+ return _format_marker(self._markers)
287
+
288
+ def __repr__(self) -> str:
289
+ return f"<Marker('{self}')>"
290
+
291
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
292
+ """Evaluate a marker.
293
+
294
+ Return the boolean from evaluating the given marker against the
295
+ environment. environment is an optional argument to override all or
296
+ part of the determined environment.
297
+
298
+ The environment is determined from the current Python process.
299
+ """
300
+ current_environment = default_environment()
301
+ if environment is not None:
302
+ current_environment.update(environment)
303
+
304
+ return _evaluate_markers(self._markers, current_environment)
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/requirements.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import re
6
+ import string
7
+ import urllib.parse
8
+ from typing import List, Optional as TOptional, Set
9
+
10
+ from pkg_resources.extern.pyparsing import ( # noqa
11
+ Combine,
12
+ Literal as L,
13
+ Optional,
14
+ ParseException,
15
+ Regex,
16
+ Word,
17
+ ZeroOrMore,
18
+ originalTextFor,
19
+ stringEnd,
20
+ stringStart,
21
+ )
22
+
23
+ from .markers import MARKER_EXPR, Marker
24
+ from .specifiers import LegacySpecifier, Specifier, SpecifierSet
25
+
26
+
27
+ class InvalidRequirement(ValueError):
28
+ """
29
+ An invalid requirement was found, users should refer to PEP 508.
30
+ """
31
+
32
+
33
+ ALPHANUM = Word(string.ascii_letters + string.digits)
34
+
35
+ LBRACKET = L("[").suppress()
36
+ RBRACKET = L("]").suppress()
37
+ LPAREN = L("(").suppress()
38
+ RPAREN = L(")").suppress()
39
+ COMMA = L(",").suppress()
40
+ SEMICOLON = L(";").suppress()
41
+ AT = L("@").suppress()
42
+
43
+ PUNCTUATION = Word("-_.")
44
+ IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
45
+ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
46
+
47
+ NAME = IDENTIFIER("name")
48
+ EXTRA = IDENTIFIER
49
+
50
+ URI = Regex(r"[^ ]+")("url")
51
+ URL = AT + URI
52
+
53
+ EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
54
+ EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
55
+
56
+ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
57
+ VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
58
+
59
+ VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
60
+ VERSION_MANY = Combine(
61
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
62
+ )("_raw_spec")
63
+ _VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
64
+ _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
65
+
66
+ VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
67
+ VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
68
+
69
+ MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
70
+ MARKER_EXPR.setParseAction(
71
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
72
+ )
73
+ MARKER_SEPARATOR = SEMICOLON
74
+ MARKER = MARKER_SEPARATOR + MARKER_EXPR
75
+
76
+ VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
77
+ URL_AND_MARKER = URL + Optional(MARKER)
78
+
79
+ NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
80
+
81
+ REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
82
+ # pkg_resources.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
83
+ # issue #104
84
+ REQUIREMENT.parseString("x[]")
85
+
86
+
87
+ class Requirement:
88
+ """Parse a requirement.
89
+
90
+ Parse a given requirement string into its parts, such as name, specifier,
91
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
92
+ string.
93
+ """
94
+
95
+ # TODO: Can we test whether something is contained within a requirement?
96
+ # If so how do we do that? Do we need to test against the _name_ of
97
+ # the thing as well as the version? What about the markers?
98
+ # TODO: Can we normalize the name and extra name?
99
+
100
+ def __init__(self, requirement_string: str) -> None:
101
+ try:
102
+ req = REQUIREMENT.parseString(requirement_string)
103
+ except ParseException as e:
104
+ raise InvalidRequirement(
105
+ f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
106
+ )
107
+
108
+ self.name: str = req.name
109
+ if req.url:
110
+ parsed_url = urllib.parse.urlparse(req.url)
111
+ if parsed_url.scheme == "file":
112
+ if urllib.parse.urlunparse(parsed_url) != req.url:
113
+ raise InvalidRequirement("Invalid URL given")
114
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
115
+ not parsed_url.scheme and not parsed_url.netloc
116
+ ):
117
+ raise InvalidRequirement(f"Invalid URL: {req.url}")
118
+ self.url: TOptional[str] = req.url
119
+ else:
120
+ self.url = None
121
+ self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
122
+ self.specifier: SpecifierSet = SpecifierSet(req.specifier)
123
+ self.marker: TOptional[Marker] = req.marker if req.marker else None
124
+
125
+ def __str__(self) -> str:
126
+ parts: List[str] = [self.name]
127
+
128
+ if self.extras:
129
+ formatted_extras = ",".join(sorted(self.extras))
130
+ parts.append(f"[{formatted_extras}]")
131
+
132
+ if self.specifier:
133
+ parts.append(str(self.specifier))
134
+
135
+ if self.url:
136
+ parts.append(f"@ {self.url}")
137
+ if self.marker:
138
+ parts.append(" ")
139
+
140
+ if self.marker:
141
+ parts.append(f"; {self.marker}")
142
+
143
+ return "".join(parts)
144
+
145
+ def __repr__(self) -> str:
146
+ return f"<Requirement('{self}')>"
env-llmeval/lib/python3.10/site-packages/pkg_resources/_vendor/packaging/specifiers.py ADDED
@@ -0,0 +1,828 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import abc
6
+ import functools
7
+ import itertools
8
+ import re
9
+ import warnings
10
+ from typing import (
11
+ Callable,
12
+ Dict,
13
+ Iterable,
14
+ Iterator,
15
+ List,
16
+ Optional,
17
+ Pattern,
18
+ Set,
19
+ Tuple,
20
+ TypeVar,
21
+ Union,
22
+ )
23
+
24
+ from .utils import canonicalize_version
25
+ from .version import LegacyVersion, Version, parse
26
+
27
+ ParsedVersion = Union[Version, LegacyVersion]
28
+ UnparsedVersion = Union[Version, LegacyVersion, str]
29
+ VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
30
+ CallableOperator = Callable[[ParsedVersion, str], bool]
31
+
32
+
33
+ class InvalidSpecifier(ValueError):
34
+ """
35
+ An invalid specifier was found, users should refer to PEP 440.
36
+ """
37
+
38
+
39
+ class BaseSpecifier(metaclass=abc.ABCMeta):
40
+ @abc.abstractmethod
41
+ def __str__(self) -> str:
42
+ """
43
+ Returns the str representation of this Specifier like object. This
44
+ should be representative of the Specifier itself.
45
+ """
46
+
47
+ @abc.abstractmethod
48
+ def __hash__(self) -> int:
49
+ """
50
+ Returns a hash value for this Specifier like object.
51
+ """
52
+
53
+ @abc.abstractmethod
54
+ def __eq__(self, other: object) -> bool:
55
+ """
56
+ Returns a boolean representing whether or not the two Specifier like
57
+ objects are equal.
58
+ """
59
+
60
+ @abc.abstractmethod
61
+ def __ne__(self, other: object) -> bool:
62
+ """
63
+ Returns a boolean representing whether or not the two Specifier like
64
+ objects are not equal.
65
+ """
66
+
67
+ @abc.abstractproperty
68
+ def prereleases(self) -> Optional[bool]:
69
+ """
70
+ Returns whether or not pre-releases as a whole are allowed by this
71
+ specifier.
72
+ """
73
+
74
+ @prereleases.setter
75
+ def prereleases(self, value: bool) -> None:
76
+ """
77
+ Sets whether or not pre-releases as a whole are allowed by this
78
+ specifier.
79
+ """
80
+
81
+ @abc.abstractmethod
82
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
83
+ """
84
+ Determines if the given item is contained within this specifier.
85
+ """
86
+
87
+ @abc.abstractmethod
88
+ def filter(
89
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
90
+ ) -> Iterable[VersionTypeVar]:
91
+ """
92
+ Takes an iterable of items and filters them so that only items which
93
+ are contained within this specifier are allowed in it.
94
+ """
95
+
96
+
97
+ class _IndividualSpecifier(BaseSpecifier):
98
+
99
+ _operators: Dict[str, str] = {}
100
+ _regex: Pattern[str]
101
+
102
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
103
+ match = self._regex.search(spec)
104
+ if not match:
105
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
106
+
107
+ self._spec: Tuple[str, str] = (
108
+ match.group("operator").strip(),
109
+ match.group("version").strip(),
110
+ )
111
+
112
+ # Store whether or not this Specifier should accept prereleases
113
+ self._prereleases = prereleases
114
+
115
+ def __repr__(self) -> str:
116
+ pre = (
117
+ f", prereleases={self.prereleases!r}"
118
+ if self._prereleases is not None
119
+ else ""
120
+ )
121
+
122
+ return "<{}({!r}{})>".format(self.__class__.__name__, str(self), pre)
123
+
124
+ def __str__(self) -> str:
125
+ return "{}{}".format(*self._spec)
126
+
127
+ @property
128
+ def _canonical_spec(self) -> Tuple[str, str]:
129
+ return self._spec[0], canonicalize_version(self._spec[1])
130
+
131
+ def __hash__(self) -> int:
132
+ return hash(self._canonical_spec)
133
+
134
+ def __eq__(self, other: object) -> bool:
135
+ if isinstance(other, str):
136
+ try:
137
+ other = self.__class__(str(other))
138
+ except InvalidSpecifier:
139
+ return NotImplemented
140
+ elif not isinstance(other, self.__class__):
141
+ return NotImplemented
142
+
143
+ return self._canonical_spec == other._canonical_spec
144
+
145
+ def __ne__(self, other: object) -> bool:
146
+ if isinstance(other, str):
147
+ try:
148
+ other = self.__class__(str(other))
149
+ except InvalidSpecifier:
150
+ return NotImplemented
151
+ elif not isinstance(other, self.__class__):
152
+ return NotImplemented
153
+
154
+ return self._spec != other._spec
155
+
156
+ def _get_operator(self, op: str) -> CallableOperator:
157
+ operator_callable: CallableOperator = getattr(
158
+ self, f"_compare_{self._operators[op]}"
159
+ )
160
+ return operator_callable
161
+
162
+ def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
163
+ if not isinstance(version, (LegacyVersion, Version)):
164
+ version = parse(version)
165
+ return version
166
+
167
+ @property
168
+ def operator(self) -> str:
169
+ return self._spec[0]
170
+
171
+ @property
172
+ def version(self) -> str:
173
+ return self._spec[1]
174
+
175
+ @property
176
+ def prereleases(self) -> Optional[bool]:
177
+ return self._prereleases
178
+
179
+ @prereleases.setter
180
+ def prereleases(self, value: bool) -> None:
181
+ self._prereleases = value
182
+
183
+ def __contains__(self, item: str) -> bool:
184
+ return self.contains(item)
185
+
186
+ def contains(
187
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
188
+ ) -> bool:
189
+
190
+ # Determine if prereleases are to be allowed or not.
191
+ if prereleases is None:
192
+ prereleases = self.prereleases
193
+
194
+ # Normalize item to a Version or LegacyVersion, this allows us to have
195
+ # a shortcut for ``"2.0" in Specifier(">=2")
196
+ normalized_item = self._coerce_version(item)
197
+
198
+ # Determine if we should be supporting prereleases in this specifier
199
+ # or not, if we do not support prereleases than we can short circuit
200
+ # logic if this version is a prereleases.
201
+ if normalized_item.is_prerelease and not prereleases:
202
+ return False
203
+
204
+ # Actually do the comparison to determine if this item is contained
205
+ # within this Specifier or not.
206
+ operator_callable: CallableOperator = self._get_operator(self.operator)
207
+ return operator_callable(normalized_item, self.version)
208
+
209
+ def filter(
210
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
211
+ ) -> Iterable[VersionTypeVar]:
212
+
213
+ yielded = False
214
+ found_prereleases = []
215
+
216
+ kw = {"prereleases": prereleases if prereleases is not None else True}
217
+
218
+ # Attempt to iterate over all the values in the iterable and if any of
219
+ # them match, yield them.
220
+ for version in iterable:
221
+ parsed_version = self._coerce_version(version)
222
+
223
+ if self.contains(parsed_version, **kw):
224
+ # If our version is a prerelease, and we were not set to allow
225
+ # prereleases, then we'll store it for later in case nothing
226
+ # else matches this specifier.
227
+ if parsed_version.is_prerelease and not (
228
+ prereleases or self.prereleases
229
+ ):
230
+ found_prereleases.append(version)
231
+ # Either this is not a prerelease, or we should have been
232
+ # accepting prereleases from the beginning.
233
+ else:
234
+ yielded = True
235
+ yield version
236
+
237
+ # Now that we've iterated over everything, determine if we've yielded
238
+ # any values, and if we have not and we have any prereleases stored up
239
+ # then we will go ahead and yield the prereleases.
240
+ if not yielded and found_prereleases:
241
+ for version in found_prereleases:
242
+ yield version
243
+
244
+
245
+ class LegacySpecifier(_IndividualSpecifier):
246
+
247
+ _regex_str = r"""
248
+ (?P<operator>(==|!=|<=|>=|<|>))
249
+ \s*
250
+ (?P<version>
251
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
252
+ # string can be just about anything, we match everything
253
+ # except for whitespace, a semi-colon for marker support,
254
+ # a closing paren since versions can be enclosed in
255
+ # them, and a comma since it's a version separator.
256
+ )
257
+ """
258
+
259
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
260
+
261
+ _operators = {
262
+ "==": "equal",
263
+ "!=": "not_equal",
264
+ "<=": "less_than_equal",
265
+ ">=": "greater_than_equal",
266
+ "<": "less_than",
267
+ ">": "greater_than",
268
+ }
269
+
270
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
271
+ super().__init__(spec, prereleases)
272
+
273
+ warnings.warn(
274
+ "Creating a LegacyVersion has been deprecated and will be "
275
+ "removed in the next major release",
276
+ DeprecationWarning,
277
+ )
278
+
279
+ def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
280
+ if not isinstance(version, LegacyVersion):
281
+ version = LegacyVersion(str(version))
282
+ return version
283
+
284
+ def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
285
+ return prospective == self._coerce_version(spec)
286
+
287
+ def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
288
+ return prospective != self._coerce_version(spec)
289
+
290
+ def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
291
+ return prospective <= self._coerce_version(spec)
292
+
293
+ def _compare_greater_than_equal(
294
+ self, prospective: LegacyVersion, spec: str
295
+ ) -> bool:
296
+ return prospective >= self._coerce_version(spec)
297
+
298
+ def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
299
+ return prospective < self._coerce_version(spec)
300
+
301
+ def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
302
+ return prospective > self._coerce_version(spec)
303
+
304
+
305
+ def _require_version_compare(
306
+ fn: Callable[["Specifier", ParsedVersion, str], bool]
307
+ ) -> Callable[["Specifier", ParsedVersion, str], bool]:
308
+ @functools.wraps(fn)
309
+ def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
310
+ if not isinstance(prospective, Version):
311
+ return False
312
+ return fn(self, prospective, spec)
313
+
314
+ return wrapped
315
+
316
+
317
+ class Specifier(_IndividualSpecifier):
318
+
319
+ _regex_str = r"""
320
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
321
+ (?P<version>
322
+ (?:
323
+ # The identity operators allow for an escape hatch that will
324
+ # do an exact string match of the version you wish to install.
325
+ # This will not be parsed by PEP 440 and we cannot determine
326
+ # any semantic meaning from it. This operator is discouraged
327
+ # but included entirely as an escape hatch.
328
+ (?<====) # Only match for the identity operator
329
+ \s*
330
+ [^\s]* # We just match everything, except for whitespace
331
+ # since we are only testing for strict identity.
332
+ )
333
+ |
334
+ (?:
335
+ # The (non)equality operators allow for wild card and local
336
+ # versions to be specified so we have to define these two
337
+ # operators separately to enable that.
338
+ (?<===|!=) # Only match for equals and not equals
339
+
340
+ \s*
341
+ v?
342
+ (?:[0-9]+!)? # epoch
343
+ [0-9]+(?:\.[0-9]+)* # release
344
+ (?: # pre release
345
+ [-_\.]?
346
+ (a|b|c|rc|alpha|beta|pre|preview)
347
+ [-_\.]?
348
+ [0-9]*
349
+ )?
350
+ (?: # post release
351
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
352
+ )?
353
+
354
+ # You cannot use a wild card and a dev or local version
355
+ # together so group them with a | and make them optional.
356
+ (?:
357
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
358
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
359
+ |
360
+ \.\* # Wild card syntax of .*
361
+ )?
362
+ )
363
+ |
364
+ (?:
365
+ # The compatible operator requires at least two digits in the
366
+ # release segment.
367
+ (?<=~=) # Only match for the compatible operator
368
+
369
+ \s*
370
+ v?
371
+ (?:[0-9]+!)? # epoch
372
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
373
+ (?: # pre release
374
+ [-_\.]?
375
+ (a|b|c|rc|alpha|beta|pre|preview)
376
+ [-_\.]?
377
+ [0-9]*
378
+ )?
379
+ (?: # post release
380
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
381
+ )?
382
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
383
+ )
384
+ |
385
+ (?:
386
+ # All other operators only allow a sub set of what the
387
+ # (non)equality operators do. Specifically they do not allow
388
+ # local versions to be specified nor do they allow the prefix
389
+ # matching wild cards.
390
+ (?<!==|!=|~=) # We have special cases for these
391
+ # operators so we want to make sure they
392
+ # don't match here.
393
+
394
+ \s*
395
+ v?
396
+ (?:[0-9]+!)? # epoch
397
+ [0-9]+(?:\.[0-9]+)* # release
398
+ (?: # pre release
399
+ [-_\.]?
400
+ (a|b|c|rc|alpha|beta|pre|preview)
401
+ [-_\.]?
402
+ [0-9]*
403
+ )?
404
+ (?: # post release
405
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
406
+ )?
407
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
408
+ )
409
+ )
410
+ """
411
+
412
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
413
+
414
+ _operators = {
415
+ "~=": "compatible",
416
+ "==": "equal",
417
+ "!=": "not_equal",
418
+ "<=": "less_than_equal",
419
+ ">=": "greater_than_equal",
420
+ "<": "less_than",
421
+ ">": "greater_than",
422
+ "===": "arbitrary",
423
+ }
424
+
425
+ @_require_version_compare
426
+ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
427
+
428
+ # Compatible releases have an equivalent combination of >= and ==. That
429
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
430
+ # implement this in terms of the other specifiers instead of
431
+ # implementing it ourselves. The only thing we need to do is construct
432
+ # the other specifiers.
433
+
434
+ # We want everything but the last item in the version, but we want to
435
+ # ignore suffix segments.
436
+ prefix = ".".join(
437
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
438
+ )
439
+
440
+ # Add the prefix notation to the end of our string
441
+ prefix += ".*"
442
+
443
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
444
+ prospective, prefix
445
+ )
446
+
447
+ @_require_version_compare
448
+ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
449
+
450
+ # We need special logic to handle prefix matching
451
+ if spec.endswith(".*"):
452
+ # In the case of prefix matching we want to ignore local segment.
453
+ prospective = Version(prospective.public)
454
+ # Split the spec out by dots, and pretend that there is an implicit
455
+ # dot in between a release segment and a pre-release segment.
456
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
457
+
458
+ # Split the prospective version out by dots, and pretend that there
459
+ # is an implicit dot in between a release segment and a pre-release
460
+ # segment.
461
+ split_prospective = _version_split(str(prospective))
462
+
463
+ # Shorten the prospective version to be the same length as the spec
464
+ # so that we can determine if the specifier is a prefix of the
465
+ # prospective version or not.
466
+ shortened_prospective = split_prospective[: len(split_spec)]
467
+
468
+ # Pad out our two sides with zeros so that they both equal the same
469
+ # length.
470
+ padded_spec, padded_prospective = _pad_version(
471
+ split_spec, shortened_prospective
472
+ )
473
+
474
+ return padded_prospective == padded_spec
475
+ else:
476
+ # Convert our spec string into a Version
477
+ spec_version = Version(spec)
478
+
479
+ # If the specifier does not have a local segment, then we want to
480
+ # act as if the prospective version also does not have a local
481
+ # segment.
482
+ if not spec_version.local:
483
+ prospective = Version(prospective.public)
484
+
485
+ return prospective == spec_version
486
+
487
+ @_require_version_compare
488
+ def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
489
+ return not self._compare_equal(prospective, spec)
490
+
491
+ @_require_version_compare
492
+ def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
493
+
494
+ # NB: Local version identifiers are NOT permitted in the version
495
+ # specifier, so local version labels can be universally removed from
496
+ # the prospective version.
497
+ return Version(prospective.public) <= Version(spec)
498
+
499
+ @_require_version_compare
500
+ def _compare_greater_than_equal(
501
+ self, prospective: ParsedVersion, spec: str
502
+ ) -> bool:
503
+
504
+ # NB: Local version identifiers are NOT permitted in the version
505
+ # specifier, so local version labels can be universally removed from
506
+ # the prospective version.
507
+ return Version(prospective.public) >= Version(spec)
508
+
509
+ @_require_version_compare
510
+ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
511
+
512
+ # Convert our spec to a Version instance, since we'll want to work with
513
+ # it as a version.
514
+ spec = Version(spec_str)
515
+
516
+ # Check to see if the prospective version is less than the spec
517
+ # version. If it's not we can short circuit and just return False now
518
+ # instead of doing extra unneeded work.
519
+ if not prospective < spec:
520
+ return False
521
+
522
+ # This special case is here so that, unless the specifier itself
523
+ # includes is a pre-release version, that we do not accept pre-release
524
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
525
+ # not match 3.1.dev0, but should match 3.0.dev0).
526
+ if not spec.is_prerelease and prospective.is_prerelease:
527
+ if Version(prospective.base_version) == Version(spec.base_version):
528
+ return False
529
+
530
+ # If we've gotten to here, it means that prospective version is both
531
+ # less than the spec version *and* it's not a pre-release of the same
532
+ # version in the spec.
533
+ return True
534
+
535
+ @_require_version_compare
536
+ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
537
+
538
+ # Convert our spec to a Version instance, since we'll want to work with
539
+ # it as a version.
540
+ spec = Version(spec_str)
541
+
542
+ # Check to see if the prospective version is greater than the spec
543
+ # version. If it's not we can short circuit and just return False now
544
+ # instead of doing extra unneeded work.
545
+ if not prospective > spec:
546
+ return False
547
+
548
+ # This special case is here so that, unless the specifier itself
549
+ # includes is a post-release version, that we do not accept
550
+ # post-release versions for the version mentioned in the specifier
551
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
552
+ if not spec.is_postrelease and prospective.is_postrelease:
553
+ if Version(prospective.base_version) == Version(spec.base_version):
554
+ return False
555
+
556
+ # Ensure that we do not allow a local version of the version mentioned
557
+ # in the specifier, which is technically greater than, to match.
558
+ if prospective.local is not None:
559
+ if Version(prospective.base_version) == Version(spec.base_version):
560
+ return False
561
+
562
+ # If we've gotten to here, it means that prospective version is both
563
+ # greater than the spec version *and* it's not a pre-release of the
564
+ # same version in the spec.
565
+ return True
566
+
567
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
568
+ return str(prospective).lower() == str(spec).lower()
569
+
570
+ @property
571
+ def prereleases(self) -> bool:
572
+
573
+ # If there is an explicit prereleases set for this, then we'll just
574
+ # blindly use that.
575
+ if self._prereleases is not None:
576
+ return self._prereleases
577
+
578
+ # Look at all of our specifiers and determine if they are inclusive
579
+ # operators, and if they are if they are including an explicit
580
+ # prerelease.
581
+ operator, version = self._spec
582
+ if operator in ["==", ">=", "<=", "~=", "==="]:
583
+ # The == specifier can include a trailing .*, if it does we
584
+ # want to remove before parsing.
585
+ if operator == "==" and version.endswith(".*"):
586
+ version = version[:-2]
587
+
588
+ # Parse the version, and if it is a pre-release than this
589
+ # specifier allows pre-releases.
590
+ if parse(version).is_prerelease:
591
+ return True
592
+
593
+ return False
594
+
595
+ @prereleases.setter
596
+ def prereleases(self, value: bool) -> None:
597
+ self._prereleases = value
598
+
599
+
600
+ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
601
+
602
+
603
+ def _version_split(version: str) -> List[str]:
604
+ result: List[str] = []
605
+ for item in version.split("."):
606
+ match = _prefix_regex.search(item)
607
+ if match:
608
+ result.extend(match.groups())
609
+ else:
610
+ result.append(item)
611
+ return result
612
+
613
+
614
+ def _is_not_suffix(segment: str) -> bool:
615
+ return not any(
616
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
617
+ )
618
+
619
+
620
+ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
621
+ left_split, right_split = [], []
622
+
623
+ # Get the release segment of our versions
624
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
625
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
626
+
627
+ # Get the rest of our versions
628
+ left_split.append(left[len(left_split[0]) :])
629
+ right_split.append(right[len(right_split[0]) :])
630
+
631
+ # Insert our padding
632
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
633
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
634
+
635
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
636
+
637
+
638
+ class SpecifierSet(BaseSpecifier):
639
+ def __init__(
640
+ self, specifiers: str = "", prereleases: Optional[bool] = None
641
+ ) -> None:
642
+
643
+ # Split on , to break each individual specifier into it's own item, and
644
+ # strip each item to remove leading/trailing whitespace.
645
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
646
+
647
+ # Parsed each individual specifier, attempting first to make it a
648
+ # Specifier and falling back to a LegacySpecifier.
649
+ parsed: Set[_IndividualSpecifier] = set()
650
+ for specifier in split_specifiers:
651
+ try:
652
+ parsed.add(Specifier(specifier))
653
+ except InvalidSpecifier:
654
+ parsed.add(LegacySpecifier(specifier))
655
+
656
+ # Turn our parsed specifiers into a frozen set and save them for later.
657
+ self._specs = frozenset(parsed)
658
+
659
+ # Store our prereleases value so we can use it later to determine if
660
+ # we accept prereleases or not.
661
+ self._prereleases = prereleases
662
+
663
+ def __repr__(self) -> str:
664
+ pre = (
665
+ f", prereleases={self.prereleases!r}"
666
+ if self._prereleases is not None
667
+ else ""
668
+ )
669
+
670
+ return "<SpecifierSet({!r}{})>".format(str(self), pre)
671
+
672
+ def __str__(self) -> str:
673
+ return ",".join(sorted(str(s) for s in self._specs))
674
+
675
+ def __hash__(self) -> int:
676
+ return hash(self._specs)
677
+
678
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
679
+ if isinstance(other, str):
680
+ other = SpecifierSet(other)
681
+ elif not isinstance(other, SpecifierSet):
682
+ return NotImplemented
683
+
684
+ specifier = SpecifierSet()
685
+ specifier._specs = frozenset(self._specs | other._specs)
686
+
687
+ if self._prereleases is None and other._prereleases is not None:
688
+ specifier._prereleases = other._prereleases
689
+ elif self._prereleases is not None and other._prereleases is None:
690
+ specifier._prereleases = self._prereleases
691
+ elif self._prereleases == other._prereleases:
692
+ specifier._prereleases = self._prereleases
693
+ else:
694
+ raise ValueError(
695
+ "Cannot combine SpecifierSets with True and False prerelease "
696
+ "overrides."
697
+ )
698
+
699
+ return specifier
700
+
701
+ def __eq__(self, other: object) -> bool:
702
+ if isinstance(other, (str, _IndividualSpecifier)):
703
+ other = SpecifierSet(str(other))
704
+ elif not isinstance(other, SpecifierSet):
705
+ return NotImplemented
706
+
707
+ return self._specs == other._specs
708
+
709
+ def __ne__(self, other: object) -> bool:
710
+ if isinstance(other, (str, _IndividualSpecifier)):
711
+ other = SpecifierSet(str(other))
712
+ elif not isinstance(other, SpecifierSet):
713
+ return NotImplemented
714
+
715
+ return self._specs != other._specs
716
+
717
+ def __len__(self) -> int:
718
+ return len(self._specs)
719
+
720
+ def __iter__(self) -> Iterator[_IndividualSpecifier]:
721
+ return iter(self._specs)
722
+
723
+ @property
724
+ def prereleases(self) -> Optional[bool]:
725
+
726
+ # If we have been given an explicit prerelease modifier, then we'll
727
+ # pass that through here.
728
+ if self._prereleases is not None:
729
+ return self._prereleases
730
+
731
+ # If we don't have any specifiers, and we don't have a forced value,
732
+ # then we'll just return None since we don't know if this should have
733
+ # pre-releases or not.
734
+ if not self._specs:
735
+ return None
736
+
737
+ # Otherwise we'll see if any of the given specifiers accept
738
+ # prereleases, if any of them do we'll return True, otherwise False.
739
+ return any(s.prereleases for s in self._specs)
740
+
741
+ @prereleases.setter
742
+ def prereleases(self, value: bool) -> None:
743
+ self._prereleases = value
744
+
745
+ def __contains__(self, item: UnparsedVersion) -> bool:
746
+ return self.contains(item)
747
+
748
+ def contains(
749
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
750
+ ) -> bool:
751
+
752
+ # Ensure that our item is a Version or LegacyVersion instance.
753
+ if not isinstance(item, (LegacyVersion, Version)):
754
+ item = parse(item)
755
+
756
+ # Determine if we're forcing a prerelease or not, if we're not forcing
757
+ # one for this particular filter call, then we'll use whatever the
758
+ # SpecifierSet thinks for whether or not we should support prereleases.
759
+ if prereleases is None:
760
+ prereleases = self.prereleases
761
+
762
+ # We can determine if we're going to allow pre-releases by looking to
763
+ # see if any of the underlying items supports them. If none of them do
764
+ # and this item is a pre-release then we do not allow it and we can
765
+ # short circuit that here.
766
+ # Note: This means that 1.0.dev1 would not be contained in something
767
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
768
+ if not prereleases and item.is_prerelease:
769
+ return False
770
+
771
+ # We simply dispatch to the underlying specs here to make sure that the
772
+ # given version is contained within all of them.
773
+ # Note: This use of all() here means that an empty set of specifiers
774
+ # will always return True, this is an explicit design decision.
775
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
776
+
777
+ def filter(
778
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
779
+ ) -> Iterable[VersionTypeVar]:
780
+
781
+ # Determine if we're forcing a prerelease or not, if we're not forcing
782
+ # one for this particular filter call, then we'll use whatever the
783
+ # SpecifierSet thinks for whether or not we should support prereleases.
784
+ if prereleases is None:
785
+ prereleases = self.prereleases
786
+
787
+ # If we have any specifiers, then we want to wrap our iterable in the
788
+ # filter method for each one, this will act as a logical AND amongst
789
+ # each specifier.
790
+ if self._specs:
791
+ for spec in self._specs:
792
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
793
+ return iterable
794
+ # If we do not have any specifiers, then we need to have a rough filter
795
+ # which will filter out any pre-releases, unless there are no final
796
+ # releases, and which will filter out LegacyVersion in general.
797
+ else:
798
+ filtered: List[VersionTypeVar] = []
799
+ found_prereleases: List[VersionTypeVar] = []
800
+
801
+ item: UnparsedVersion
802
+ parsed_version: Union[Version, LegacyVersion]
803
+
804
+ for item in iterable:
805
+ # Ensure that we some kind of Version class for this item.
806
+ if not isinstance(item, (LegacyVersion, Version)):
807
+ parsed_version = parse(item)
808
+ else:
809
+ parsed_version = item
810
+
811
+ # Filter out any item which is parsed as a LegacyVersion
812
+ if isinstance(parsed_version, LegacyVersion):
813
+ continue
814
+
815
+ # Store any item which is a pre-release for later unless we've
816
+ # already found a final version or we are accepting prereleases
817
+ if parsed_version.is_prerelease and not prereleases:
818
+ if not filtered:
819
+ found_prereleases.append(item)
820
+ else:
821
+ filtered.append(item)
822
+
823
+ # If we've found no items except for pre-releases, then we'll go
824
+ # ahead and use the pre-releases
825
+ if not filtered and found_prereleases and prereleases is None:
826
+ return found_prereleases
827
+
828
+ return filtered
env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__main__.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ from . import DATASETS
4
+
5
+ try:
6
+ cmd = sys.argv[1]
7
+ except IndexError:
8
+ print(f"Usage: {sys.argv[0]} --check | --dump")
9
+ sys.exit(1)
10
+
11
+ if cmd == "--check":
12
+ import hashlib
13
+ import urllib.request
14
+
15
+ url_md5 = {}
16
+
17
+ for item in DATASETS.values():
18
+ if item.md5 is not None:
19
+ assert item.data
20
+ assert item.md5
21
+ assert len(item.data) == len(item.md5)
22
+ pairs = zip(item.data, item.md5)
23
+ for url, md5_hash in pairs:
24
+ url_md5[url] = md5_hash
25
+
26
+ for url, md5_hash in url_md5.items():
27
+ try:
28
+ print("Downloading ", url)
29
+ with urllib.request.urlopen(url) as f:
30
+ data = f.read()
31
+ except Exception as exc:
32
+ raise (exc)
33
+
34
+ if hashlib.md5(data).hexdigest() != md5_hash:
35
+ print("MD5 check failed for", url)
36
+ elif cmd == "--dump":
37
+ import re
38
+
39
+ # Dumps a table in markdown format
40
+ print(f'| {"Dataset":<30} | {"Description":<115} |')
41
+ header = "| " + "-" * 30 + " | " + "-" * 115 + " |"
42
+ print(header)
43
+ for name, item in DATASETS.items():
44
+ desc = re.sub(r"(http[s]?:\/\/\S+)", r"[URL](\1)", str(item.description))
45
+ print(f"| {name:<30} | {desc:<115} |")
env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/__main__.cpython-310.pyc ADDED
Binary file (1.34 kB). View file
 
env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/base.cpython-310.pyc ADDED
Binary file (7.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/iwslt_xml.cpython-310.pyc ADDED
Binary file (472 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/sacrebleu/dataset/__pycache__/plain_text.cpython-310.pyc ADDED
Binary file (1.57 kB). View file