applied-ai-018 commited on
Commit
d3c0637
·
verified ·
1 Parent(s): 93e2cf6

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/attr/__init__.pyi +555 -0
  2. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/__init__.cpython-310.pyc +0 -0
  3. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_cmp.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_make.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/converters.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc +0 -0
  14. env-llmeval/lib/python3.10/site-packages/attr/__pycache__/validators.cpython-310.pyc +0 -0
  15. env-llmeval/lib/python3.10/site-packages/attr/_cmp.pyi +13 -0
  16. env-llmeval/lib/python3.10/site-packages/attr/_compat.py +87 -0
  17. env-llmeval/lib/python3.10/site-packages/attr/_config.py +31 -0
  18. env-llmeval/lib/python3.10/site-packages/attr/_next_gen.py +229 -0
  19. env-llmeval/lib/python3.10/site-packages/attr/_typing_compat.pyi +15 -0
  20. env-llmeval/lib/python3.10/site-packages/attr/_version_info.py +86 -0
  21. env-llmeval/lib/python3.10/site-packages/attr/converters.py +144 -0
  22. env-llmeval/lib/python3.10/site-packages/attr/filters.py +66 -0
  23. env-llmeval/lib/python3.10/site-packages/attr/setters.pyi +19 -0
  24. env-llmeval/lib/python3.10/site-packages/attr/validators.py +681 -0
  25. env-llmeval/lib/python3.10/site-packages/huggingface_hub/_login.py +396 -0
  26. env-llmeval/lib/python3.10/site-packages/huggingface_hub/_tensorboard_logger.py +169 -0
  27. env-llmeval/lib/python3.10/site-packages/huggingface_hub/_webhooks_server.py +380 -0
  28. env-llmeval/lib/python3.10/site-packages/huggingface_hub/fastai_utils.py +425 -0
  29. env-llmeval/lib/python3.10/site-packages/huggingface_hub/hub_mixin.py +704 -0
  30. env-llmeval/lib/python3.10/site-packages/huggingface_hub/keras_mixin.py +502 -0
  31. env-llmeval/lib/python3.10/site-packages/huggingface_hub/repocard_data.py +729 -0
  32. env-llmeval/lib/python3.10/site-packages/more_itertools/__init__.py +6 -0
  33. env-llmeval/lib/python3.10/site-packages/more_itertools/__init__.pyi +2 -0
  34. env-llmeval/lib/python3.10/site-packages/more_itertools/__pycache__/__init__.cpython-310.pyc +0 -0
  35. env-llmeval/lib/python3.10/site-packages/more_itertools/__pycache__/more.cpython-310.pyc +0 -0
  36. env-llmeval/lib/python3.10/site-packages/more_itertools/__pycache__/recipes.cpython-310.pyc +0 -0
  37. env-llmeval/lib/python3.10/site-packages/more_itertools/more.py +0 -0
  38. env-llmeval/lib/python3.10/site-packages/more_itertools/more.pyi +695 -0
  39. env-llmeval/lib/python3.10/site-packages/more_itertools/py.typed +0 -0
  40. env-llmeval/lib/python3.10/site-packages/more_itertools/recipes.py +1012 -0
  41. env-llmeval/lib/python3.10/site-packages/more_itertools/recipes.pyi +128 -0
  42. env-llmeval/lib/python3.10/site-packages/multidict/__init__.py +48 -0
  43. env-llmeval/lib/python3.10/site-packages/multidict/__init__.pyi +152 -0
  44. env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/__init__.cpython-310.pyc +0 -0
  45. env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_abc.cpython-310.pyc +0 -0
  46. env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_compat.cpython-310.pyc +0 -0
  47. env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_multidict_base.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_multidict_py.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/multidict/_abc.py +48 -0
  50. env-llmeval/lib/python3.10/site-packages/multidict/_compat.py +14 -0
env-llmeval/lib/python3.10/site-packages/attr/__init__.pyi ADDED
@@ -0,0 +1,555 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import enum
2
+ import sys
3
+
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ Dict,
8
+ Generic,
9
+ List,
10
+ Mapping,
11
+ Optional,
12
+ Protocol,
13
+ Sequence,
14
+ Tuple,
15
+ Type,
16
+ TypeVar,
17
+ Union,
18
+ overload,
19
+ )
20
+
21
+ # `import X as X` is required to make these public
22
+ from . import converters as converters
23
+ from . import exceptions as exceptions
24
+ from . import filters as filters
25
+ from . import setters as setters
26
+ from . import validators as validators
27
+ from ._cmp import cmp_using as cmp_using
28
+ from ._typing_compat import AttrsInstance_
29
+ from ._version_info import VersionInfo
30
+
31
+ if sys.version_info >= (3, 10):
32
+ from typing import TypeGuard
33
+ else:
34
+ from typing_extensions import TypeGuard
35
+
36
+ if sys.version_info >= (3, 11):
37
+ from typing import dataclass_transform
38
+ else:
39
+ from typing_extensions import dataclass_transform
40
+
41
+ __version__: str
42
+ __version_info__: VersionInfo
43
+ __title__: str
44
+ __description__: str
45
+ __url__: str
46
+ __uri__: str
47
+ __author__: str
48
+ __email__: str
49
+ __license__: str
50
+ __copyright__: str
51
+
52
+ _T = TypeVar("_T")
53
+ _C = TypeVar("_C", bound=type)
54
+
55
+ _EqOrderType = Union[bool, Callable[[Any], Any]]
56
+ _ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
57
+ _ConverterType = Callable[[Any], Any]
58
+ _FilterType = Callable[["Attribute[_T]", _T], bool]
59
+ _ReprType = Callable[[Any], str]
60
+ _ReprArgType = Union[bool, _ReprType]
61
+ _OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
62
+ _OnSetAttrArgType = Union[
63
+ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
64
+ ]
65
+ _FieldTransformer = Callable[
66
+ [type, List["Attribute[Any]"]], List["Attribute[Any]"]
67
+ ]
68
+ # FIXME: in reality, if multiple validators are passed they must be in a list
69
+ # or tuple, but those are invariant and so would prevent subtypes of
70
+ # _ValidatorType from working when passed in a list or tuple.
71
+ _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
72
+
73
+ # We subclass this here to keep the protocol's qualified name clean.
74
+ class AttrsInstance(AttrsInstance_, Protocol):
75
+ pass
76
+
77
+ _A = TypeVar("_A", bound=type[AttrsInstance])
78
+
79
+ class _Nothing(enum.Enum):
80
+ NOTHING = enum.auto()
81
+
82
+ NOTHING = _Nothing.NOTHING
83
+
84
+ # NOTE: Factory lies about its return type to make this possible:
85
+ # `x: List[int] # = Factory(list)`
86
+ # Work around mypy issue #4554 in the common case by using an overload.
87
+ if sys.version_info >= (3, 8):
88
+ from typing import Literal
89
+ @overload
90
+ def Factory(factory: Callable[[], _T]) -> _T: ...
91
+ @overload
92
+ def Factory(
93
+ factory: Callable[[Any], _T],
94
+ takes_self: Literal[True],
95
+ ) -> _T: ...
96
+ @overload
97
+ def Factory(
98
+ factory: Callable[[], _T],
99
+ takes_self: Literal[False],
100
+ ) -> _T: ...
101
+
102
+ else:
103
+ @overload
104
+ def Factory(factory: Callable[[], _T]) -> _T: ...
105
+ @overload
106
+ def Factory(
107
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
108
+ takes_self: bool = ...,
109
+ ) -> _T: ...
110
+
111
+ class Attribute(Generic[_T]):
112
+ name: str
113
+ default: Optional[_T]
114
+ validator: Optional[_ValidatorType[_T]]
115
+ repr: _ReprArgType
116
+ cmp: _EqOrderType
117
+ eq: _EqOrderType
118
+ order: _EqOrderType
119
+ hash: Optional[bool]
120
+ init: bool
121
+ converter: Optional[_ConverterType]
122
+ metadata: Dict[Any, Any]
123
+ type: Optional[Type[_T]]
124
+ kw_only: bool
125
+ on_setattr: _OnSetAttrType
126
+ alias: Optional[str]
127
+
128
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
129
+
130
+ # NOTE: We had several choices for the annotation to use for type arg:
131
+ # 1) Type[_T]
132
+ # - Pros: Handles simple cases correctly
133
+ # - Cons: Might produce less informative errors in the case of conflicting
134
+ # TypeVars e.g. `attr.ib(default='bad', type=int)`
135
+ # 2) Callable[..., _T]
136
+ # - Pros: Better error messages than #1 for conflicting TypeVars
137
+ # - Cons: Terrible error messages for validator checks.
138
+ # e.g. attr.ib(type=int, validator=validate_str)
139
+ # -> error: Cannot infer function type argument
140
+ # 3) type (and do all of the work in the mypy plugin)
141
+ # - Pros: Simple here, and we could customize the plugin with our own errors.
142
+ # - Cons: Would need to write mypy plugin code to handle all the cases.
143
+ # We chose option #1.
144
+
145
+ # `attr` lies about its return type to make the following possible:
146
+ # attr() -> Any
147
+ # attr(8) -> int
148
+ # attr(validator=<some callable>) -> Whatever the callable expects.
149
+ # This makes this type of assignments possible:
150
+ # x: int = attr(8)
151
+ #
152
+ # This form catches explicit None or no default but with no other arguments
153
+ # returns Any.
154
+ @overload
155
+ def attrib(
156
+ default: None = ...,
157
+ validator: None = ...,
158
+ repr: _ReprArgType = ...,
159
+ cmp: Optional[_EqOrderType] = ...,
160
+ hash: Optional[bool] = ...,
161
+ init: bool = ...,
162
+ metadata: Optional[Mapping[Any, Any]] = ...,
163
+ type: None = ...,
164
+ converter: None = ...,
165
+ factory: None = ...,
166
+ kw_only: bool = ...,
167
+ eq: Optional[_EqOrderType] = ...,
168
+ order: Optional[_EqOrderType] = ...,
169
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
170
+ alias: Optional[str] = ...,
171
+ ) -> Any: ...
172
+
173
+ # This form catches an explicit None or no default and infers the type from the
174
+ # other arguments.
175
+ @overload
176
+ def attrib(
177
+ default: None = ...,
178
+ validator: Optional[_ValidatorArgType[_T]] = ...,
179
+ repr: _ReprArgType = ...,
180
+ cmp: Optional[_EqOrderType] = ...,
181
+ hash: Optional[bool] = ...,
182
+ init: bool = ...,
183
+ metadata: Optional[Mapping[Any, Any]] = ...,
184
+ type: Optional[Type[_T]] = ...,
185
+ converter: Optional[_ConverterType] = ...,
186
+ factory: Optional[Callable[[], _T]] = ...,
187
+ kw_only: bool = ...,
188
+ eq: Optional[_EqOrderType] = ...,
189
+ order: Optional[_EqOrderType] = ...,
190
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
191
+ alias: Optional[str] = ...,
192
+ ) -> _T: ...
193
+
194
+ # This form catches an explicit default argument.
195
+ @overload
196
+ def attrib(
197
+ default: _T,
198
+ validator: Optional[_ValidatorArgType[_T]] = ...,
199
+ repr: _ReprArgType = ...,
200
+ cmp: Optional[_EqOrderType] = ...,
201
+ hash: Optional[bool] = ...,
202
+ init: bool = ...,
203
+ metadata: Optional[Mapping[Any, Any]] = ...,
204
+ type: Optional[Type[_T]] = ...,
205
+ converter: Optional[_ConverterType] = ...,
206
+ factory: Optional[Callable[[], _T]] = ...,
207
+ kw_only: bool = ...,
208
+ eq: Optional[_EqOrderType] = ...,
209
+ order: Optional[_EqOrderType] = ...,
210
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
211
+ alias: Optional[str] = ...,
212
+ ) -> _T: ...
213
+
214
+ # This form covers type=non-Type: e.g. forward references (str), Any
215
+ @overload
216
+ def attrib(
217
+ default: Optional[_T] = ...,
218
+ validator: Optional[_ValidatorArgType[_T]] = ...,
219
+ repr: _ReprArgType = ...,
220
+ cmp: Optional[_EqOrderType] = ...,
221
+ hash: Optional[bool] = ...,
222
+ init: bool = ...,
223
+ metadata: Optional[Mapping[Any, Any]] = ...,
224
+ type: object = ...,
225
+ converter: Optional[_ConverterType] = ...,
226
+ factory: Optional[Callable[[], _T]] = ...,
227
+ kw_only: bool = ...,
228
+ eq: Optional[_EqOrderType] = ...,
229
+ order: Optional[_EqOrderType] = ...,
230
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
231
+ alias: Optional[str] = ...,
232
+ ) -> Any: ...
233
+ @overload
234
+ def field(
235
+ *,
236
+ default: None = ...,
237
+ validator: None = ...,
238
+ repr: _ReprArgType = ...,
239
+ hash: Optional[bool] = ...,
240
+ init: bool = ...,
241
+ metadata: Optional[Mapping[Any, Any]] = ...,
242
+ converter: None = ...,
243
+ factory: None = ...,
244
+ kw_only: bool = ...,
245
+ eq: Optional[bool] = ...,
246
+ order: Optional[bool] = ...,
247
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
248
+ alias: Optional[str] = ...,
249
+ type: Optional[type] = ...,
250
+ ) -> Any: ...
251
+
252
+ # This form catches an explicit None or no default and infers the type from the
253
+ # other arguments.
254
+ @overload
255
+ def field(
256
+ *,
257
+ default: None = ...,
258
+ validator: Optional[_ValidatorArgType[_T]] = ...,
259
+ repr: _ReprArgType = ...,
260
+ hash: Optional[bool] = ...,
261
+ init: bool = ...,
262
+ metadata: Optional[Mapping[Any, Any]] = ...,
263
+ converter: Optional[_ConverterType] = ...,
264
+ factory: Optional[Callable[[], _T]] = ...,
265
+ kw_only: bool = ...,
266
+ eq: Optional[_EqOrderType] = ...,
267
+ order: Optional[_EqOrderType] = ...,
268
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
269
+ alias: Optional[str] = ...,
270
+ type: Optional[type] = ...,
271
+ ) -> _T: ...
272
+
273
+ # This form catches an explicit default argument.
274
+ @overload
275
+ def field(
276
+ *,
277
+ default: _T,
278
+ validator: Optional[_ValidatorArgType[_T]] = ...,
279
+ repr: _ReprArgType = ...,
280
+ hash: Optional[bool] = ...,
281
+ init: bool = ...,
282
+ metadata: Optional[Mapping[Any, Any]] = ...,
283
+ converter: Optional[_ConverterType] = ...,
284
+ factory: Optional[Callable[[], _T]] = ...,
285
+ kw_only: bool = ...,
286
+ eq: Optional[_EqOrderType] = ...,
287
+ order: Optional[_EqOrderType] = ...,
288
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
289
+ alias: Optional[str] = ...,
290
+ type: Optional[type] = ...,
291
+ ) -> _T: ...
292
+
293
+ # This form covers type=non-Type: e.g. forward references (str), Any
294
+ @overload
295
+ def field(
296
+ *,
297
+ default: Optional[_T] = ...,
298
+ validator: Optional[_ValidatorArgType[_T]] = ...,
299
+ repr: _ReprArgType = ...,
300
+ hash: Optional[bool] = ...,
301
+ init: bool = ...,
302
+ metadata: Optional[Mapping[Any, Any]] = ...,
303
+ converter: Optional[_ConverterType] = ...,
304
+ factory: Optional[Callable[[], _T]] = ...,
305
+ kw_only: bool = ...,
306
+ eq: Optional[_EqOrderType] = ...,
307
+ order: Optional[_EqOrderType] = ...,
308
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
309
+ alias: Optional[str] = ...,
310
+ type: Optional[type] = ...,
311
+ ) -> Any: ...
312
+ @overload
313
+ @dataclass_transform(order_default=True, field_specifiers=(attrib, field))
314
+ def attrs(
315
+ maybe_cls: _C,
316
+ these: Optional[Dict[str, Any]] = ...,
317
+ repr_ns: Optional[str] = ...,
318
+ repr: bool = ...,
319
+ cmp: Optional[_EqOrderType] = ...,
320
+ hash: Optional[bool] = ...,
321
+ init: bool = ...,
322
+ slots: bool = ...,
323
+ frozen: bool = ...,
324
+ weakref_slot: bool = ...,
325
+ str: bool = ...,
326
+ auto_attribs: bool = ...,
327
+ kw_only: bool = ...,
328
+ cache_hash: bool = ...,
329
+ auto_exc: bool = ...,
330
+ eq: Optional[_EqOrderType] = ...,
331
+ order: Optional[_EqOrderType] = ...,
332
+ auto_detect: bool = ...,
333
+ collect_by_mro: bool = ...,
334
+ getstate_setstate: Optional[bool] = ...,
335
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
336
+ field_transformer: Optional[_FieldTransformer] = ...,
337
+ match_args: bool = ...,
338
+ unsafe_hash: Optional[bool] = ...,
339
+ ) -> _C: ...
340
+ @overload
341
+ @dataclass_transform(order_default=True, field_specifiers=(attrib, field))
342
+ def attrs(
343
+ maybe_cls: None = ...,
344
+ these: Optional[Dict[str, Any]] = ...,
345
+ repr_ns: Optional[str] = ...,
346
+ repr: bool = ...,
347
+ cmp: Optional[_EqOrderType] = ...,
348
+ hash: Optional[bool] = ...,
349
+ init: bool = ...,
350
+ slots: bool = ...,
351
+ frozen: bool = ...,
352
+ weakref_slot: bool = ...,
353
+ str: bool = ...,
354
+ auto_attribs: bool = ...,
355
+ kw_only: bool = ...,
356
+ cache_hash: bool = ...,
357
+ auto_exc: bool = ...,
358
+ eq: Optional[_EqOrderType] = ...,
359
+ order: Optional[_EqOrderType] = ...,
360
+ auto_detect: bool = ...,
361
+ collect_by_mro: bool = ...,
362
+ getstate_setstate: Optional[bool] = ...,
363
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
364
+ field_transformer: Optional[_FieldTransformer] = ...,
365
+ match_args: bool = ...,
366
+ unsafe_hash: Optional[bool] = ...,
367
+ ) -> Callable[[_C], _C]: ...
368
+ @overload
369
+ @dataclass_transform(field_specifiers=(attrib, field))
370
+ def define(
371
+ maybe_cls: _C,
372
+ *,
373
+ these: Optional[Dict[str, Any]] = ...,
374
+ repr: bool = ...,
375
+ unsafe_hash: Optional[bool] = ...,
376
+ hash: Optional[bool] = ...,
377
+ init: bool = ...,
378
+ slots: bool = ...,
379
+ frozen: bool = ...,
380
+ weakref_slot: bool = ...,
381
+ str: bool = ...,
382
+ auto_attribs: bool = ...,
383
+ kw_only: bool = ...,
384
+ cache_hash: bool = ...,
385
+ auto_exc: bool = ...,
386
+ eq: Optional[bool] = ...,
387
+ order: Optional[bool] = ...,
388
+ auto_detect: bool = ...,
389
+ getstate_setstate: Optional[bool] = ...,
390
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
391
+ field_transformer: Optional[_FieldTransformer] = ...,
392
+ match_args: bool = ...,
393
+ ) -> _C: ...
394
+ @overload
395
+ @dataclass_transform(field_specifiers=(attrib, field))
396
+ def define(
397
+ maybe_cls: None = ...,
398
+ *,
399
+ these: Optional[Dict[str, Any]] = ...,
400
+ repr: bool = ...,
401
+ unsafe_hash: Optional[bool] = ...,
402
+ hash: Optional[bool] = ...,
403
+ init: bool = ...,
404
+ slots: bool = ...,
405
+ frozen: bool = ...,
406
+ weakref_slot: bool = ...,
407
+ str: bool = ...,
408
+ auto_attribs: bool = ...,
409
+ kw_only: bool = ...,
410
+ cache_hash: bool = ...,
411
+ auto_exc: bool = ...,
412
+ eq: Optional[bool] = ...,
413
+ order: Optional[bool] = ...,
414
+ auto_detect: bool = ...,
415
+ getstate_setstate: Optional[bool] = ...,
416
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
417
+ field_transformer: Optional[_FieldTransformer] = ...,
418
+ match_args: bool = ...,
419
+ ) -> Callable[[_C], _C]: ...
420
+
421
+ mutable = define
422
+
423
+ @overload
424
+ @dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
425
+ def frozen(
426
+ maybe_cls: _C,
427
+ *,
428
+ these: Optional[Dict[str, Any]] = ...,
429
+ repr: bool = ...,
430
+ unsafe_hash: Optional[bool] = ...,
431
+ hash: Optional[bool] = ...,
432
+ init: bool = ...,
433
+ slots: bool = ...,
434
+ frozen: bool = ...,
435
+ weakref_slot: bool = ...,
436
+ str: bool = ...,
437
+ auto_attribs: bool = ...,
438
+ kw_only: bool = ...,
439
+ cache_hash: bool = ...,
440
+ auto_exc: bool = ...,
441
+ eq: Optional[bool] = ...,
442
+ order: Optional[bool] = ...,
443
+ auto_detect: bool = ...,
444
+ getstate_setstate: Optional[bool] = ...,
445
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
446
+ field_transformer: Optional[_FieldTransformer] = ...,
447
+ match_args: bool = ...,
448
+ ) -> _C: ...
449
+ @overload
450
+ @dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
451
+ def frozen(
452
+ maybe_cls: None = ...,
453
+ *,
454
+ these: Optional[Dict[str, Any]] = ...,
455
+ repr: bool = ...,
456
+ unsafe_hash: Optional[bool] = ...,
457
+ hash: Optional[bool] = ...,
458
+ init: bool = ...,
459
+ slots: bool = ...,
460
+ frozen: bool = ...,
461
+ weakref_slot: bool = ...,
462
+ str: bool = ...,
463
+ auto_attribs: bool = ...,
464
+ kw_only: bool = ...,
465
+ cache_hash: bool = ...,
466
+ auto_exc: bool = ...,
467
+ eq: Optional[bool] = ...,
468
+ order: Optional[bool] = ...,
469
+ auto_detect: bool = ...,
470
+ getstate_setstate: Optional[bool] = ...,
471
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
472
+ field_transformer: Optional[_FieldTransformer] = ...,
473
+ match_args: bool = ...,
474
+ ) -> Callable[[_C], _C]: ...
475
+ def fields(cls: Type[AttrsInstance]) -> Any: ...
476
+ def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
477
+ def validate(inst: AttrsInstance) -> None: ...
478
+ def resolve_types(
479
+ cls: _A,
480
+ globalns: Optional[Dict[str, Any]] = ...,
481
+ localns: Optional[Dict[str, Any]] = ...,
482
+ attribs: Optional[List[Attribute[Any]]] = ...,
483
+ include_extras: bool = ...,
484
+ ) -> _A: ...
485
+
486
+ # TODO: add support for returning a proper attrs class from the mypy plugin
487
+ # we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
488
+ # [attr.ib()])` is valid
489
+ def make_class(
490
+ name: str,
491
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
492
+ bases: Tuple[type, ...] = ...,
493
+ class_body: Optional[Dict[str, Any]] = ...,
494
+ repr_ns: Optional[str] = ...,
495
+ repr: bool = ...,
496
+ cmp: Optional[_EqOrderType] = ...,
497
+ hash: Optional[bool] = ...,
498
+ init: bool = ...,
499
+ slots: bool = ...,
500
+ frozen: bool = ...,
501
+ weakref_slot: bool = ...,
502
+ str: bool = ...,
503
+ auto_attribs: bool = ...,
504
+ kw_only: bool = ...,
505
+ cache_hash: bool = ...,
506
+ auto_exc: bool = ...,
507
+ eq: Optional[_EqOrderType] = ...,
508
+ order: Optional[_EqOrderType] = ...,
509
+ collect_by_mro: bool = ...,
510
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
511
+ field_transformer: Optional[_FieldTransformer] = ...,
512
+ ) -> type: ...
513
+
514
+ # _funcs --
515
+
516
+ # TODO: add support for returning TypedDict from the mypy plugin
517
+ # FIXME: asdict/astuple do not honor their factory args. Waiting on one of
518
+ # these:
519
+ # https://github.com/python/mypy/issues/4236
520
+ # https://github.com/python/typing/issues/253
521
+ # XXX: remember to fix attrs.asdict/astuple too!
522
+ def asdict(
523
+ inst: AttrsInstance,
524
+ recurse: bool = ...,
525
+ filter: Optional[_FilterType[Any]] = ...,
526
+ dict_factory: Type[Mapping[Any, Any]] = ...,
527
+ retain_collection_types: bool = ...,
528
+ value_serializer: Optional[
529
+ Callable[[type, Attribute[Any], Any], Any]
530
+ ] = ...,
531
+ tuple_keys: Optional[bool] = ...,
532
+ ) -> Dict[str, Any]: ...
533
+
534
+ # TODO: add support for returning NamedTuple from the mypy plugin
535
+ def astuple(
536
+ inst: AttrsInstance,
537
+ recurse: bool = ...,
538
+ filter: Optional[_FilterType[Any]] = ...,
539
+ tuple_factory: Type[Sequence[Any]] = ...,
540
+ retain_collection_types: bool = ...,
541
+ ) -> Tuple[Any, ...]: ...
542
+ def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ...
543
+ def assoc(inst: _T, **changes: Any) -> _T: ...
544
+ def evolve(inst: _T, **changes: Any) -> _T: ...
545
+
546
+ # _config --
547
+
548
+ def set_run_validators(run: bool) -> None: ...
549
+ def get_run_validators() -> bool: ...
550
+
551
+ # aliases --
552
+
553
+ s = attributes = attrs
554
+ ib = attr = attrib
555
+ dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (3.01 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_cmp.cpython-310.pyc ADDED
Binary file (3.93 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_compat.cpython-310.pyc ADDED
Binary file (2.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_config.cpython-310.pyc ADDED
Binary file (1 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_funcs.cpython-310.pyc ADDED
Binary file (11.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_make.cpython-310.pyc ADDED
Binary file (75.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_next_gen.cpython-310.pyc ADDED
Binary file (5.42 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/_version_info.cpython-310.pyc ADDED
Binary file (2.32 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/converters.cpython-310.pyc ADDED
Binary file (3.54 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (3.16 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc ADDED
Binary file (1.91 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/setters.cpython-310.pyc ADDED
Binary file (1.51 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/__pycache__/validators.cpython-310.pyc ADDED
Binary file (19.7 kB). View file
 
env-llmeval/lib/python3.10/site-packages/attr/_cmp.pyi ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Callable, Optional, Type
2
+
3
+ _CompareWithType = Callable[[Any, Any], bool]
4
+
5
+ def cmp_using(
6
+ eq: Optional[_CompareWithType] = ...,
7
+ lt: Optional[_CompareWithType] = ...,
8
+ le: Optional[_CompareWithType] = ...,
9
+ gt: Optional[_CompareWithType] = ...,
10
+ ge: Optional[_CompareWithType] = ...,
11
+ require_same_type: bool = ...,
12
+ class_name: str = ...,
13
+ ) -> Type: ...
env-llmeval/lib/python3.10/site-packages/attr/_compat.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ import inspect
4
+ import platform
5
+ import sys
6
+ import threading
7
+
8
+ from collections.abc import Mapping, Sequence # noqa: F401
9
+ from typing import _GenericAlias
10
+
11
+
12
+ PYPY = platform.python_implementation() == "PyPy"
13
+ PY_3_8_PLUS = sys.version_info[:2] >= (3, 8)
14
+ PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
15
+ PY310 = sys.version_info[:2] >= (3, 10)
16
+ PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
17
+
18
+
19
+ if sys.version_info < (3, 8):
20
+ try:
21
+ from typing_extensions import Protocol
22
+ except ImportError: # pragma: no cover
23
+ Protocol = object
24
+ else:
25
+ from typing import Protocol # noqa: F401
26
+
27
+
28
+ class _AnnotationExtractor:
29
+ """
30
+ Extract type annotations from a callable, returning None whenever there
31
+ is none.
32
+ """
33
+
34
+ __slots__ = ["sig"]
35
+
36
+ def __init__(self, callable):
37
+ try:
38
+ self.sig = inspect.signature(callable)
39
+ except (ValueError, TypeError): # inspect failed
40
+ self.sig = None
41
+
42
+ def get_first_param_type(self):
43
+ """
44
+ Return the type annotation of the first argument if it's not empty.
45
+ """
46
+ if not self.sig:
47
+ return None
48
+
49
+ params = list(self.sig.parameters.values())
50
+ if params and params[0].annotation is not inspect.Parameter.empty:
51
+ return params[0].annotation
52
+
53
+ return None
54
+
55
+ def get_return_type(self):
56
+ """
57
+ Return the return type if it's not empty.
58
+ """
59
+ if (
60
+ self.sig
61
+ and self.sig.return_annotation is not inspect.Signature.empty
62
+ ):
63
+ return self.sig.return_annotation
64
+
65
+ return None
66
+
67
+
68
+ # Thread-local global to track attrs instances which are already being repr'd.
69
+ # This is needed because there is no other (thread-safe) way to pass info
70
+ # about the instances that are already being repr'd through the call stack
71
+ # in order to ensure we don't perform infinite recursion.
72
+ #
73
+ # For instance, if an instance contains a dict which contains that instance,
74
+ # we need to know that we're already repr'ing the outside instance from within
75
+ # the dict's repr() call.
76
+ #
77
+ # This lives here rather than in _make.py so that the functions in _make.py
78
+ # don't have a direct reference to the thread-local in their globals dict.
79
+ # If they have such a reference, it breaks cloudpickle.
80
+ repr_context = threading.local()
81
+
82
+
83
+ def get_generic_base(cl):
84
+ """If this is a generic class (A[str]), return the generic base for it."""
85
+ if cl.__class__ is _GenericAlias:
86
+ return cl.__origin__
87
+ return None
env-llmeval/lib/python3.10/site-packages/attr/_config.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ __all__ = ["set_run_validators", "get_run_validators"]
4
+
5
+ _run_validators = True
6
+
7
+
8
+ def set_run_validators(run):
9
+ """
10
+ Set whether or not validators are run. By default, they are run.
11
+
12
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
13
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
14
+ instead.
15
+ """
16
+ if not isinstance(run, bool):
17
+ msg = "'run' must be bool."
18
+ raise TypeError(msg)
19
+ global _run_validators
20
+ _run_validators = run
21
+
22
+
23
+ def get_run_validators():
24
+ """
25
+ Return whether or not validators are run.
26
+
27
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
28
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
29
+ instead.
30
+ """
31
+ return _run_validators
env-llmeval/lib/python3.10/site-packages/attr/_next_gen.py ADDED
@@ -0,0 +1,229 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ These are keyword-only APIs that call `attr.s` and `attr.ib` with different
5
+ default values.
6
+ """
7
+
8
+
9
+ from functools import partial
10
+
11
+ from . import setters
12
+ from ._funcs import asdict as _asdict
13
+ from ._funcs import astuple as _astuple
14
+ from ._make import (
15
+ NOTHING,
16
+ _frozen_setattrs,
17
+ _ng_default_on_setattr,
18
+ attrib,
19
+ attrs,
20
+ )
21
+ from .exceptions import UnannotatedAttributeError
22
+
23
+
24
+ def define(
25
+ maybe_cls=None,
26
+ *,
27
+ these=None,
28
+ repr=None,
29
+ unsafe_hash=None,
30
+ hash=None,
31
+ init=None,
32
+ slots=True,
33
+ frozen=False,
34
+ weakref_slot=True,
35
+ str=False,
36
+ auto_attribs=None,
37
+ kw_only=False,
38
+ cache_hash=False,
39
+ auto_exc=True,
40
+ eq=None,
41
+ order=False,
42
+ auto_detect=True,
43
+ getstate_setstate=None,
44
+ on_setattr=None,
45
+ field_transformer=None,
46
+ match_args=True,
47
+ ):
48
+ r"""
49
+ Define an *attrs* class.
50
+
51
+ Differences to the classic `attr.s` that it uses underneath:
52
+
53
+ - Automatically detect whether or not *auto_attribs* should be `True` (c.f.
54
+ *auto_attribs* parameter).
55
+ - Converters and validators run when attributes are set by default -- if
56
+ *frozen* is `False`.
57
+ - *slots=True*
58
+
59
+ .. caution::
60
+
61
+ Usually this has only upsides and few visible effects in everyday
62
+ programming. But it *can* lead to some surprising behaviors, so please
63
+ make sure to read :term:`slotted classes`.
64
+ - *auto_exc=True*
65
+ - *auto_detect=True*
66
+ - *order=False*
67
+ - Some options that were only relevant on Python 2 or were kept around for
68
+ backwards-compatibility have been removed.
69
+
70
+ Please note that these are all defaults and you can change them as you
71
+ wish.
72
+
73
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
74
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
75
+
76
+ 1. If any attributes are annotated and no unannotated `attrs.fields`\ s
77
+ are found, it assumes *auto_attribs=True*.
78
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
79
+ `attrs.fields`\ s.
80
+
81
+ For now, please refer to `attr.s` for the rest of the parameters.
82
+
83
+ .. versionadded:: 20.1.0
84
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
85
+ .. versionadded:: 22.2.0
86
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
87
+ """
88
+
89
+ def do_it(cls, auto_attribs):
90
+ return attrs(
91
+ maybe_cls=cls,
92
+ these=these,
93
+ repr=repr,
94
+ hash=hash,
95
+ unsafe_hash=unsafe_hash,
96
+ init=init,
97
+ slots=slots,
98
+ frozen=frozen,
99
+ weakref_slot=weakref_slot,
100
+ str=str,
101
+ auto_attribs=auto_attribs,
102
+ kw_only=kw_only,
103
+ cache_hash=cache_hash,
104
+ auto_exc=auto_exc,
105
+ eq=eq,
106
+ order=order,
107
+ auto_detect=auto_detect,
108
+ collect_by_mro=True,
109
+ getstate_setstate=getstate_setstate,
110
+ on_setattr=on_setattr,
111
+ field_transformer=field_transformer,
112
+ match_args=match_args,
113
+ )
114
+
115
+ def wrap(cls):
116
+ """
117
+ Making this a wrapper ensures this code runs during class creation.
118
+
119
+ We also ensure that frozen-ness of classes is inherited.
120
+ """
121
+ nonlocal frozen, on_setattr
122
+
123
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
124
+
125
+ # By default, mutable classes convert & validate on setattr.
126
+ if frozen is False and on_setattr is None:
127
+ on_setattr = _ng_default_on_setattr
128
+
129
+ # However, if we subclass a frozen class, we inherit the immutability
130
+ # and disable on_setattr.
131
+ for base_cls in cls.__bases__:
132
+ if base_cls.__setattr__ is _frozen_setattrs:
133
+ if had_on_setattr:
134
+ msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
135
+ raise ValueError(msg)
136
+
137
+ on_setattr = setters.NO_OP
138
+ break
139
+
140
+ if auto_attribs is not None:
141
+ return do_it(cls, auto_attribs)
142
+
143
+ try:
144
+ return do_it(cls, True)
145
+ except UnannotatedAttributeError:
146
+ return do_it(cls, False)
147
+
148
+ # maybe_cls's type depends on the usage of the decorator. It's a class
149
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
150
+ if maybe_cls is None:
151
+ return wrap
152
+
153
+ return wrap(maybe_cls)
154
+
155
+
156
+ mutable = define
157
+ frozen = partial(define, frozen=True, on_setattr=None)
158
+
159
+
160
+ def field(
161
+ *,
162
+ default=NOTHING,
163
+ validator=None,
164
+ repr=True,
165
+ hash=None,
166
+ init=True,
167
+ metadata=None,
168
+ type=None,
169
+ converter=None,
170
+ factory=None,
171
+ kw_only=False,
172
+ eq=None,
173
+ order=None,
174
+ on_setattr=None,
175
+ alias=None,
176
+ ):
177
+ """
178
+ Identical to `attr.ib`, except keyword-only and with some arguments
179
+ removed.
180
+
181
+ .. versionadded:: 23.1.0
182
+ The *type* parameter has been re-added; mostly for `attrs.make_class`.
183
+ Please note that type checkers ignore this metadata.
184
+ .. versionadded:: 20.1.0
185
+ """
186
+ return attrib(
187
+ default=default,
188
+ validator=validator,
189
+ repr=repr,
190
+ hash=hash,
191
+ init=init,
192
+ metadata=metadata,
193
+ type=type,
194
+ converter=converter,
195
+ factory=factory,
196
+ kw_only=kw_only,
197
+ eq=eq,
198
+ order=order,
199
+ on_setattr=on_setattr,
200
+ alias=alias,
201
+ )
202
+
203
+
204
+ def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
205
+ """
206
+ Same as `attr.asdict`, except that collections types are always retained
207
+ and dict is always used as *dict_factory*.
208
+
209
+ .. versionadded:: 21.3.0
210
+ """
211
+ return _asdict(
212
+ inst=inst,
213
+ recurse=recurse,
214
+ filter=filter,
215
+ value_serializer=value_serializer,
216
+ retain_collection_types=True,
217
+ )
218
+
219
+
220
+ def astuple(inst, *, recurse=True, filter=None):
221
+ """
222
+ Same as `attr.astuple`, except that collections types are always retained
223
+ and `tuple` is always used as the *tuple_factory*.
224
+
225
+ .. versionadded:: 21.3.0
226
+ """
227
+ return _astuple(
228
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
229
+ )
env-llmeval/lib/python3.10/site-packages/attr/_typing_compat.pyi ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, ClassVar, Protocol
2
+
3
+ # MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
4
+ MYPY = False
5
+
6
+ if MYPY:
7
+ # A protocol to be able to statically accept an attrs class.
8
+ class AttrsInstance_(Protocol):
9
+ __attrs_attrs__: ClassVar[Any]
10
+
11
+ else:
12
+ # For type checkers without plug-in support use an empty protocol that
13
+ # will (hopefully) be combined into a union.
14
+ class AttrsInstance_(Protocol):
15
+ pass
env-llmeval/lib/python3.10/site-packages/attr/_version_info.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+
4
+ from functools import total_ordering
5
+
6
+ from ._funcs import astuple
7
+ from ._make import attrib, attrs
8
+
9
+
10
+ @total_ordering
11
+ @attrs(eq=False, order=False, slots=True, frozen=True)
12
+ class VersionInfo:
13
+ """
14
+ A version object that can be compared to tuple of length 1--4:
15
+
16
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
17
+ True
18
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
19
+ True
20
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
21
+ >>> vi < (19, 1, 1)
22
+ False
23
+ >>> vi < (19,)
24
+ False
25
+ >>> vi == (19, 2,)
26
+ True
27
+ >>> vi == (19, 2, 1)
28
+ False
29
+
30
+ .. versionadded:: 19.2
31
+ """
32
+
33
+ year = attrib(type=int)
34
+ minor = attrib(type=int)
35
+ micro = attrib(type=int)
36
+ releaselevel = attrib(type=str)
37
+
38
+ @classmethod
39
+ def _from_version_string(cls, s):
40
+ """
41
+ Parse *s* and return a _VersionInfo.
42
+ """
43
+ v = s.split(".")
44
+ if len(v) == 3:
45
+ v.append("final")
46
+
47
+ return cls(
48
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
49
+ )
50
+
51
+ def _ensure_tuple(self, other):
52
+ """
53
+ Ensure *other* is a tuple of a valid length.
54
+
55
+ Returns a possibly transformed *other* and ourselves as a tuple of
56
+ the same length as *other*.
57
+ """
58
+
59
+ if self.__class__ is other.__class__:
60
+ other = astuple(other)
61
+
62
+ if not isinstance(other, tuple):
63
+ raise NotImplementedError
64
+
65
+ if not (1 <= len(other) <= 4):
66
+ raise NotImplementedError
67
+
68
+ return astuple(self)[: len(other)], other
69
+
70
+ def __eq__(self, other):
71
+ try:
72
+ us, them = self._ensure_tuple(other)
73
+ except NotImplementedError:
74
+ return NotImplemented
75
+
76
+ return us == them
77
+
78
+ def __lt__(self, other):
79
+ try:
80
+ us, them = self._ensure_tuple(other)
81
+ except NotImplementedError:
82
+ return NotImplemented
83
+
84
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
85
+ # have to do anything special with releaselevel for now.
86
+ return us < them
env-llmeval/lib/python3.10/site-packages/attr/converters.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Commonly useful converters.
5
+ """
6
+
7
+
8
+ import typing
9
+
10
+ from ._compat import _AnnotationExtractor
11
+ from ._make import NOTHING, Factory, pipe
12
+
13
+
14
+ __all__ = [
15
+ "default_if_none",
16
+ "optional",
17
+ "pipe",
18
+ "to_bool",
19
+ ]
20
+
21
+
22
+ def optional(converter):
23
+ """
24
+ A converter that allows an attribute to be optional. An optional attribute
25
+ is one which can be set to ``None``.
26
+
27
+ Type annotations will be inferred from the wrapped converter's, if it
28
+ has any.
29
+
30
+ :param callable converter: the converter that is used for non-``None``
31
+ values.
32
+
33
+ .. versionadded:: 17.1.0
34
+ """
35
+
36
+ def optional_converter(val):
37
+ if val is None:
38
+ return None
39
+ return converter(val)
40
+
41
+ xtr = _AnnotationExtractor(converter)
42
+
43
+ t = xtr.get_first_param_type()
44
+ if t:
45
+ optional_converter.__annotations__["val"] = typing.Optional[t]
46
+
47
+ rt = xtr.get_return_type()
48
+ if rt:
49
+ optional_converter.__annotations__["return"] = typing.Optional[rt]
50
+
51
+ return optional_converter
52
+
53
+
54
+ def default_if_none(default=NOTHING, factory=None):
55
+ """
56
+ A converter that allows to replace ``None`` values by *default* or the
57
+ result of *factory*.
58
+
59
+ :param default: Value to be used if ``None`` is passed. Passing an instance
60
+ of `attrs.Factory` is supported, however the ``takes_self`` option
61
+ is *not*.
62
+ :param callable factory: A callable that takes no parameters whose result
63
+ is used if ``None`` is passed.
64
+
65
+ :raises TypeError: If **neither** *default* or *factory* is passed.
66
+ :raises TypeError: If **both** *default* and *factory* are passed.
67
+ :raises ValueError: If an instance of `attrs.Factory` is passed with
68
+ ``takes_self=True``.
69
+
70
+ .. versionadded:: 18.2.0
71
+ """
72
+ if default is NOTHING and factory is None:
73
+ msg = "Must pass either `default` or `factory`."
74
+ raise TypeError(msg)
75
+
76
+ if default is not NOTHING and factory is not None:
77
+ msg = "Must pass either `default` or `factory` but not both."
78
+ raise TypeError(msg)
79
+
80
+ if factory is not None:
81
+ default = Factory(factory)
82
+
83
+ if isinstance(default, Factory):
84
+ if default.takes_self:
85
+ msg = "`takes_self` is not supported by default_if_none."
86
+ raise ValueError(msg)
87
+
88
+ def default_if_none_converter(val):
89
+ if val is not None:
90
+ return val
91
+
92
+ return default.factory()
93
+
94
+ else:
95
+
96
+ def default_if_none_converter(val):
97
+ if val is not None:
98
+ return val
99
+
100
+ return default
101
+
102
+ return default_if_none_converter
103
+
104
+
105
+ def to_bool(val):
106
+ """
107
+ Convert "boolean" strings (e.g., from env. vars.) to real booleans.
108
+
109
+ Values mapping to :code:`True`:
110
+
111
+ - :code:`True`
112
+ - :code:`"true"` / :code:`"t"`
113
+ - :code:`"yes"` / :code:`"y"`
114
+ - :code:`"on"`
115
+ - :code:`"1"`
116
+ - :code:`1`
117
+
118
+ Values mapping to :code:`False`:
119
+
120
+ - :code:`False`
121
+ - :code:`"false"` / :code:`"f"`
122
+ - :code:`"no"` / :code:`"n"`
123
+ - :code:`"off"`
124
+ - :code:`"0"`
125
+ - :code:`0`
126
+
127
+ :raises ValueError: for any other value.
128
+
129
+ .. versionadded:: 21.3.0
130
+ """
131
+ if isinstance(val, str):
132
+ val = val.lower()
133
+ truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
134
+ falsy = {False, "false", "f", "no", "n", "off", "0", 0}
135
+ try:
136
+ if val in truthy:
137
+ return True
138
+ if val in falsy:
139
+ return False
140
+ except TypeError:
141
+ # Raised when "val" is not hashable (e.g., lists)
142
+ pass
143
+ msg = f"Cannot convert value to bool: {val}"
144
+ raise ValueError(msg)
env-llmeval/lib/python3.10/site-packages/attr/filters.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Commonly useful filters for `attr.asdict`.
5
+ """
6
+
7
+ from ._make import Attribute
8
+
9
+
10
+ def _split_what(what):
11
+ """
12
+ Returns a tuple of `frozenset`s of classes and attributes.
13
+ """
14
+ return (
15
+ frozenset(cls for cls in what if isinstance(cls, type)),
16
+ frozenset(cls for cls in what if isinstance(cls, str)),
17
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
18
+ )
19
+
20
+
21
+ def include(*what):
22
+ """
23
+ Include *what*.
24
+
25
+ :param what: What to include.
26
+ :type what: `list` of classes `type`, field names `str` or
27
+ `attrs.Attribute`\\ s
28
+
29
+ :rtype: `callable`
30
+
31
+ .. versionchanged:: 23.1.0 Accept strings with field names.
32
+ """
33
+ cls, names, attrs = _split_what(what)
34
+
35
+ def include_(attribute, value):
36
+ return (
37
+ value.__class__ in cls
38
+ or attribute.name in names
39
+ or attribute in attrs
40
+ )
41
+
42
+ return include_
43
+
44
+
45
+ def exclude(*what):
46
+ """
47
+ Exclude *what*.
48
+
49
+ :param what: What to exclude.
50
+ :type what: `list` of classes `type`, field names `str` or
51
+ `attrs.Attribute`\\ s.
52
+
53
+ :rtype: `callable`
54
+
55
+ .. versionchanged:: 23.3.0 Accept field name string as input argument
56
+ """
57
+ cls, names, attrs = _split_what(what)
58
+
59
+ def exclude_(attribute, value):
60
+ return not (
61
+ value.__class__ in cls
62
+ or attribute.name in names
63
+ or attribute in attrs
64
+ )
65
+
66
+ return exclude_
env-llmeval/lib/python3.10/site-packages/attr/setters.pyi ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, NewType, NoReturn, TypeVar
2
+
3
+ from . import Attribute, _OnSetAttrType
4
+
5
+ _T = TypeVar("_T")
6
+
7
+ def frozen(
8
+ instance: Any, attribute: Attribute[Any], new_value: Any
9
+ ) -> NoReturn: ...
10
+ def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
11
+ def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
12
+
13
+ # convert is allowed to return Any, because they can be chained using pipe.
14
+ def convert(
15
+ instance: Any, attribute: Attribute[Any], new_value: Any
16
+ ) -> Any: ...
17
+
18
+ _NoOpType = NewType("_NoOpType", object)
19
+ NO_OP: _NoOpType
env-llmeval/lib/python3.10/site-packages/attr/validators.py ADDED
@@ -0,0 +1,681 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Commonly useful validators.
5
+ """
6
+
7
+
8
+ import operator
9
+ import re
10
+
11
+ from contextlib import contextmanager
12
+ from re import Pattern
13
+
14
+ from ._config import get_run_validators, set_run_validators
15
+ from ._make import _AndValidator, and_, attrib, attrs
16
+ from .converters import default_if_none
17
+ from .exceptions import NotCallableError
18
+
19
+
20
+ __all__ = [
21
+ "and_",
22
+ "deep_iterable",
23
+ "deep_mapping",
24
+ "disabled",
25
+ "ge",
26
+ "get_disabled",
27
+ "gt",
28
+ "in_",
29
+ "instance_of",
30
+ "is_callable",
31
+ "le",
32
+ "lt",
33
+ "matches_re",
34
+ "max_len",
35
+ "min_len",
36
+ "not_",
37
+ "optional",
38
+ "provides",
39
+ "set_disabled",
40
+ ]
41
+
42
+
43
+ def set_disabled(disabled):
44
+ """
45
+ Globally disable or enable running validators.
46
+
47
+ By default, they are run.
48
+
49
+ :param disabled: If ``True``, disable running all validators.
50
+ :type disabled: bool
51
+
52
+ .. warning::
53
+
54
+ This function is not thread-safe!
55
+
56
+ .. versionadded:: 21.3.0
57
+ """
58
+ set_run_validators(not disabled)
59
+
60
+
61
+ def get_disabled():
62
+ """
63
+ Return a bool indicating whether validators are currently disabled or not.
64
+
65
+ :return: ``True`` if validators are currently disabled.
66
+ :rtype: bool
67
+
68
+ .. versionadded:: 21.3.0
69
+ """
70
+ return not get_run_validators()
71
+
72
+
73
+ @contextmanager
74
+ def disabled():
75
+ """
76
+ Context manager that disables running validators within its context.
77
+
78
+ .. warning::
79
+
80
+ This context manager is not thread-safe!
81
+
82
+ .. versionadded:: 21.3.0
83
+ """
84
+ set_run_validators(False)
85
+ try:
86
+ yield
87
+ finally:
88
+ set_run_validators(True)
89
+
90
+
91
+ @attrs(repr=False, slots=True, hash=True)
92
+ class _InstanceOfValidator:
93
+ type = attrib()
94
+
95
+ def __call__(self, inst, attr, value):
96
+ """
97
+ We use a callable class to be able to change the ``__repr__``.
98
+ """
99
+ if not isinstance(value, self.type):
100
+ msg = "'{name}' must be {type!r} (got {value!r} that is a {actual!r}).".format(
101
+ name=attr.name,
102
+ type=self.type,
103
+ actual=value.__class__,
104
+ value=value,
105
+ )
106
+ raise TypeError(
107
+ msg,
108
+ attr,
109
+ self.type,
110
+ value,
111
+ )
112
+
113
+ def __repr__(self):
114
+ return f"<instance_of validator for type {self.type!r}>"
115
+
116
+
117
+ def instance_of(type):
118
+ """
119
+ A validator that raises a `TypeError` if the initializer is called
120
+ with a wrong type for this particular attribute (checks are performed using
121
+ `isinstance` therefore it's also valid to pass a tuple of types).
122
+
123
+ :param type: The type to check for.
124
+ :type type: type or tuple of type
125
+
126
+ :raises TypeError: With a human readable error message, the attribute
127
+ (of type `attrs.Attribute`), the expected type, and the value it
128
+ got.
129
+ """
130
+ return _InstanceOfValidator(type)
131
+
132
+
133
+ @attrs(repr=False, frozen=True, slots=True)
134
+ class _MatchesReValidator:
135
+ pattern = attrib()
136
+ match_func = attrib()
137
+
138
+ def __call__(self, inst, attr, value):
139
+ """
140
+ We use a callable class to be able to change the ``__repr__``.
141
+ """
142
+ if not self.match_func(value):
143
+ msg = "'{name}' must match regex {pattern!r} ({value!r} doesn't)".format(
144
+ name=attr.name, pattern=self.pattern.pattern, value=value
145
+ )
146
+ raise ValueError(
147
+ msg,
148
+ attr,
149
+ self.pattern,
150
+ value,
151
+ )
152
+
153
+ def __repr__(self):
154
+ return f"<matches_re validator for pattern {self.pattern!r}>"
155
+
156
+
157
+ def matches_re(regex, flags=0, func=None):
158
+ r"""
159
+ A validator that raises `ValueError` if the initializer is called
160
+ with a string that doesn't match *regex*.
161
+
162
+ :param regex: a regex string or precompiled pattern to match against
163
+ :param int flags: flags that will be passed to the underlying re function
164
+ (default 0)
165
+ :param callable func: which underlying `re` function to call. Valid options
166
+ are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
167
+ means `re.fullmatch`. For performance reasons, the pattern is always
168
+ precompiled using `re.compile`.
169
+
170
+ .. versionadded:: 19.2.0
171
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
172
+ """
173
+ valid_funcs = (re.fullmatch, None, re.search, re.match)
174
+ if func not in valid_funcs:
175
+ msg = "'func' must be one of {}.".format(
176
+ ", ".join(
177
+ sorted(e and e.__name__ or "None" for e in set(valid_funcs))
178
+ )
179
+ )
180
+ raise ValueError(msg)
181
+
182
+ if isinstance(regex, Pattern):
183
+ if flags:
184
+ msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
185
+ raise TypeError(msg)
186
+ pattern = regex
187
+ else:
188
+ pattern = re.compile(regex, flags)
189
+
190
+ if func is re.match:
191
+ match_func = pattern.match
192
+ elif func is re.search:
193
+ match_func = pattern.search
194
+ else:
195
+ match_func = pattern.fullmatch
196
+
197
+ return _MatchesReValidator(pattern, match_func)
198
+
199
+
200
+ @attrs(repr=False, slots=True, hash=True)
201
+ class _ProvidesValidator:
202
+ interface = attrib()
203
+
204
+ def __call__(self, inst, attr, value):
205
+ """
206
+ We use a callable class to be able to change the ``__repr__``.
207
+ """
208
+ if not self.interface.providedBy(value):
209
+ msg = "'{name}' must provide {interface!r} which {value!r} doesn't.".format(
210
+ name=attr.name, interface=self.interface, value=value
211
+ )
212
+ raise TypeError(
213
+ msg,
214
+ attr,
215
+ self.interface,
216
+ value,
217
+ )
218
+
219
+ def __repr__(self):
220
+ return f"<provides validator for interface {self.interface!r}>"
221
+
222
+
223
+ def provides(interface):
224
+ """
225
+ A validator that raises a `TypeError` if the initializer is called
226
+ with an object that does not provide the requested *interface* (checks are
227
+ performed using ``interface.providedBy(value)`` (see `zope.interface
228
+ <https://zopeinterface.readthedocs.io/en/latest/>`_).
229
+
230
+ :param interface: The interface to check for.
231
+ :type interface: ``zope.interface.Interface``
232
+
233
+ :raises TypeError: With a human readable error message, the attribute
234
+ (of type `attrs.Attribute`), the expected interface, and the
235
+ value it got.
236
+
237
+ .. deprecated:: 23.1.0
238
+ """
239
+ import warnings
240
+
241
+ warnings.warn(
242
+ "attrs's zope-interface support is deprecated and will be removed in, "
243
+ "or after, April 2024.",
244
+ DeprecationWarning,
245
+ stacklevel=2,
246
+ )
247
+ return _ProvidesValidator(interface)
248
+
249
+
250
+ @attrs(repr=False, slots=True, hash=True)
251
+ class _OptionalValidator:
252
+ validator = attrib()
253
+
254
+ def __call__(self, inst, attr, value):
255
+ if value is None:
256
+ return
257
+
258
+ self.validator(inst, attr, value)
259
+
260
+ def __repr__(self):
261
+ return f"<optional validator for {self.validator!r} or None>"
262
+
263
+
264
+ def optional(validator):
265
+ """
266
+ A validator that makes an attribute optional. An optional attribute is one
267
+ which can be set to ``None`` in addition to satisfying the requirements of
268
+ the sub-validator.
269
+
270
+ :param Callable | tuple[Callable] | list[Callable] validator: A validator
271
+ (or validators) that is used for non-``None`` values.
272
+
273
+ .. versionadded:: 15.1.0
274
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
275
+ .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
276
+ """
277
+ if isinstance(validator, (list, tuple)):
278
+ return _OptionalValidator(_AndValidator(validator))
279
+
280
+ return _OptionalValidator(validator)
281
+
282
+
283
+ @attrs(repr=False, slots=True, hash=True)
284
+ class _InValidator:
285
+ options = attrib()
286
+
287
+ def __call__(self, inst, attr, value):
288
+ try:
289
+ in_options = value in self.options
290
+ except TypeError: # e.g. `1 in "abc"`
291
+ in_options = False
292
+
293
+ if not in_options:
294
+ msg = f"'{attr.name}' must be in {self.options!r} (got {value!r})"
295
+ raise ValueError(
296
+ msg,
297
+ attr,
298
+ self.options,
299
+ value,
300
+ )
301
+
302
+ def __repr__(self):
303
+ return f"<in_ validator with options {self.options!r}>"
304
+
305
+
306
+ def in_(options):
307
+ """
308
+ A validator that raises a `ValueError` if the initializer is called
309
+ with a value that does not belong in the options provided. The check is
310
+ performed using ``value in options``.
311
+
312
+ :param options: Allowed options.
313
+ :type options: list, tuple, `enum.Enum`, ...
314
+
315
+ :raises ValueError: With a human readable error message, the attribute (of
316
+ type `attrs.Attribute`), the expected options, and the value it
317
+ got.
318
+
319
+ .. versionadded:: 17.1.0
320
+ .. versionchanged:: 22.1.0
321
+ The ValueError was incomplete until now and only contained the human
322
+ readable error message. Now it contains all the information that has
323
+ been promised since 17.1.0.
324
+ """
325
+ return _InValidator(options)
326
+
327
+
328
+ @attrs(repr=False, slots=False, hash=True)
329
+ class _IsCallableValidator:
330
+ def __call__(self, inst, attr, value):
331
+ """
332
+ We use a callable class to be able to change the ``__repr__``.
333
+ """
334
+ if not callable(value):
335
+ message = (
336
+ "'{name}' must be callable "
337
+ "(got {value!r} that is a {actual!r})."
338
+ )
339
+ raise NotCallableError(
340
+ msg=message.format(
341
+ name=attr.name, value=value, actual=value.__class__
342
+ ),
343
+ value=value,
344
+ )
345
+
346
+ def __repr__(self):
347
+ return "<is_callable validator>"
348
+
349
+
350
+ def is_callable():
351
+ """
352
+ A validator that raises a `attrs.exceptions.NotCallableError` if the
353
+ initializer is called with a value for this particular attribute
354
+ that is not callable.
355
+
356
+ .. versionadded:: 19.1.0
357
+
358
+ :raises attrs.exceptions.NotCallableError: With a human readable error
359
+ message containing the attribute (`attrs.Attribute`) name,
360
+ and the value it got.
361
+ """
362
+ return _IsCallableValidator()
363
+
364
+
365
+ @attrs(repr=False, slots=True, hash=True)
366
+ class _DeepIterable:
367
+ member_validator = attrib(validator=is_callable())
368
+ iterable_validator = attrib(
369
+ default=None, validator=optional(is_callable())
370
+ )
371
+
372
+ def __call__(self, inst, attr, value):
373
+ """
374
+ We use a callable class to be able to change the ``__repr__``.
375
+ """
376
+ if self.iterable_validator is not None:
377
+ self.iterable_validator(inst, attr, value)
378
+
379
+ for member in value:
380
+ self.member_validator(inst, attr, member)
381
+
382
+ def __repr__(self):
383
+ iterable_identifier = (
384
+ ""
385
+ if self.iterable_validator is None
386
+ else f" {self.iterable_validator!r}"
387
+ )
388
+ return (
389
+ f"<deep_iterable validator for{iterable_identifier}"
390
+ f" iterables of {self.member_validator!r}>"
391
+ )
392
+
393
+
394
+ def deep_iterable(member_validator, iterable_validator=None):
395
+ """
396
+ A validator that performs deep validation of an iterable.
397
+
398
+ :param member_validator: Validator(s) to apply to iterable members
399
+ :param iterable_validator: Validator to apply to iterable itself
400
+ (optional)
401
+
402
+ .. versionadded:: 19.1.0
403
+
404
+ :raises TypeError: if any sub-validators fail
405
+ """
406
+ if isinstance(member_validator, (list, tuple)):
407
+ member_validator = and_(*member_validator)
408
+ return _DeepIterable(member_validator, iterable_validator)
409
+
410
+
411
+ @attrs(repr=False, slots=True, hash=True)
412
+ class _DeepMapping:
413
+ key_validator = attrib(validator=is_callable())
414
+ value_validator = attrib(validator=is_callable())
415
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
416
+
417
+ def __call__(self, inst, attr, value):
418
+ """
419
+ We use a callable class to be able to change the ``__repr__``.
420
+ """
421
+ if self.mapping_validator is not None:
422
+ self.mapping_validator(inst, attr, value)
423
+
424
+ for key in value:
425
+ self.key_validator(inst, attr, key)
426
+ self.value_validator(inst, attr, value[key])
427
+
428
+ def __repr__(self):
429
+ return (
430
+ "<deep_mapping validator for objects mapping {key!r} to {value!r}>"
431
+ ).format(key=self.key_validator, value=self.value_validator)
432
+
433
+
434
+ def deep_mapping(key_validator, value_validator, mapping_validator=None):
435
+ """
436
+ A validator that performs deep validation of a dictionary.
437
+
438
+ :param key_validator: Validator to apply to dictionary keys
439
+ :param value_validator: Validator to apply to dictionary values
440
+ :param mapping_validator: Validator to apply to top-level mapping
441
+ attribute (optional)
442
+
443
+ .. versionadded:: 19.1.0
444
+
445
+ :raises TypeError: if any sub-validators fail
446
+ """
447
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
448
+
449
+
450
+ @attrs(repr=False, frozen=True, slots=True)
451
+ class _NumberValidator:
452
+ bound = attrib()
453
+ compare_op = attrib()
454
+ compare_func = attrib()
455
+
456
+ def __call__(self, inst, attr, value):
457
+ """
458
+ We use a callable class to be able to change the ``__repr__``.
459
+ """
460
+ if not self.compare_func(value, self.bound):
461
+ msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
462
+ raise ValueError(msg)
463
+
464
+ def __repr__(self):
465
+ return f"<Validator for x {self.compare_op} {self.bound}>"
466
+
467
+
468
+ def lt(val):
469
+ """
470
+ A validator that raises `ValueError` if the initializer is called
471
+ with a number larger or equal to *val*.
472
+
473
+ :param val: Exclusive upper bound for values
474
+
475
+ .. versionadded:: 21.3.0
476
+ """
477
+ return _NumberValidator(val, "<", operator.lt)
478
+
479
+
480
+ def le(val):
481
+ """
482
+ A validator that raises `ValueError` if the initializer is called
483
+ with a number greater than *val*.
484
+
485
+ :param val: Inclusive upper bound for values
486
+
487
+ .. versionadded:: 21.3.0
488
+ """
489
+ return _NumberValidator(val, "<=", operator.le)
490
+
491
+
492
+ def ge(val):
493
+ """
494
+ A validator that raises `ValueError` if the initializer is called
495
+ with a number smaller than *val*.
496
+
497
+ :param val: Inclusive lower bound for values
498
+
499
+ .. versionadded:: 21.3.0
500
+ """
501
+ return _NumberValidator(val, ">=", operator.ge)
502
+
503
+
504
+ def gt(val):
505
+ """
506
+ A validator that raises `ValueError` if the initializer is called
507
+ with a number smaller or equal to *val*.
508
+
509
+ :param val: Exclusive lower bound for values
510
+
511
+ .. versionadded:: 21.3.0
512
+ """
513
+ return _NumberValidator(val, ">", operator.gt)
514
+
515
+
516
+ @attrs(repr=False, frozen=True, slots=True)
517
+ class _MaxLengthValidator:
518
+ max_length = attrib()
519
+
520
+ def __call__(self, inst, attr, value):
521
+ """
522
+ We use a callable class to be able to change the ``__repr__``.
523
+ """
524
+ if len(value) > self.max_length:
525
+ msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
526
+ raise ValueError(msg)
527
+
528
+ def __repr__(self):
529
+ return f"<max_len validator for {self.max_length}>"
530
+
531
+
532
+ def max_len(length):
533
+ """
534
+ A validator that raises `ValueError` if the initializer is called
535
+ with a string or iterable that is longer than *length*.
536
+
537
+ :param int length: Maximum length of the string or iterable
538
+
539
+ .. versionadded:: 21.3.0
540
+ """
541
+ return _MaxLengthValidator(length)
542
+
543
+
544
+ @attrs(repr=False, frozen=True, slots=True)
545
+ class _MinLengthValidator:
546
+ min_length = attrib()
547
+
548
+ def __call__(self, inst, attr, value):
549
+ """
550
+ We use a callable class to be able to change the ``__repr__``.
551
+ """
552
+ if len(value) < self.min_length:
553
+ msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
554
+ raise ValueError(msg)
555
+
556
+ def __repr__(self):
557
+ return f"<min_len validator for {self.min_length}>"
558
+
559
+
560
+ def min_len(length):
561
+ """
562
+ A validator that raises `ValueError` if the initializer is called
563
+ with a string or iterable that is shorter than *length*.
564
+
565
+ :param int length: Minimum length of the string or iterable
566
+
567
+ .. versionadded:: 22.1.0
568
+ """
569
+ return _MinLengthValidator(length)
570
+
571
+
572
+ @attrs(repr=False, slots=True, hash=True)
573
+ class _SubclassOfValidator:
574
+ type = attrib()
575
+
576
+ def __call__(self, inst, attr, value):
577
+ """
578
+ We use a callable class to be able to change the ``__repr__``.
579
+ """
580
+ if not issubclass(value, self.type):
581
+ msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
582
+ raise TypeError(
583
+ msg,
584
+ attr,
585
+ self.type,
586
+ value,
587
+ )
588
+
589
+ def __repr__(self):
590
+ return f"<subclass_of validator for type {self.type!r}>"
591
+
592
+
593
+ def _subclass_of(type):
594
+ """
595
+ A validator that raises a `TypeError` if the initializer is called
596
+ with a wrong type for this particular attribute (checks are performed using
597
+ `issubclass` therefore it's also valid to pass a tuple of types).
598
+
599
+ :param type: The type to check for.
600
+ :type type: type or tuple of types
601
+
602
+ :raises TypeError: With a human readable error message, the attribute
603
+ (of type `attrs.Attribute`), the expected type, and the value it
604
+ got.
605
+ """
606
+ return _SubclassOfValidator(type)
607
+
608
+
609
+ @attrs(repr=False, slots=True, hash=True)
610
+ class _NotValidator:
611
+ validator = attrib()
612
+ msg = attrib(
613
+ converter=default_if_none(
614
+ "not_ validator child '{validator!r}' "
615
+ "did not raise a captured error"
616
+ )
617
+ )
618
+ exc_types = attrib(
619
+ validator=deep_iterable(
620
+ member_validator=_subclass_of(Exception),
621
+ iterable_validator=instance_of(tuple),
622
+ ),
623
+ )
624
+
625
+ def __call__(self, inst, attr, value):
626
+ try:
627
+ self.validator(inst, attr, value)
628
+ except self.exc_types:
629
+ pass # suppress error to invert validity
630
+ else:
631
+ raise ValueError(
632
+ self.msg.format(
633
+ validator=self.validator,
634
+ exc_types=self.exc_types,
635
+ ),
636
+ attr,
637
+ self.validator,
638
+ value,
639
+ self.exc_types,
640
+ )
641
+
642
+ def __repr__(self):
643
+ return (
644
+ "<not_ validator wrapping {what!r}, capturing {exc_types!r}>"
645
+ ).format(
646
+ what=self.validator,
647
+ exc_types=self.exc_types,
648
+ )
649
+
650
+
651
+ def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
652
+ """
653
+ A validator that wraps and logically 'inverts' the validator passed to it.
654
+ It will raise a `ValueError` if the provided validator *doesn't* raise a
655
+ `ValueError` or `TypeError` (by default), and will suppress the exception
656
+ if the provided validator *does*.
657
+
658
+ Intended to be used with existing validators to compose logic without
659
+ needing to create inverted variants, for example, ``not_(in_(...))``.
660
+
661
+ :param validator: A validator to be logically inverted.
662
+ :param msg: Message to raise if validator fails.
663
+ Formatted with keys ``exc_types`` and ``validator``.
664
+ :type msg: str
665
+ :param exc_types: Exception type(s) to capture.
666
+ Other types raised by child validators will not be intercepted and
667
+ pass through.
668
+
669
+ :raises ValueError: With a human readable error message,
670
+ the attribute (of type `attrs.Attribute`),
671
+ the validator that failed to raise an exception,
672
+ the value it got,
673
+ and the expected exception types.
674
+
675
+ .. versionadded:: 22.2.0
676
+ """
677
+ try:
678
+ exc_types = tuple(exc_types)
679
+ except TypeError:
680
+ exc_types = (exc_types,)
681
+ return _NotValidator(validator, msg, exc_types)
env-llmeval/lib/python3.10/site-packages/huggingface_hub/_login.py ADDED
@@ -0,0 +1,396 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains methods to login to the Hub."""
15
+
16
+ import os
17
+ import subprocess
18
+ from functools import partial
19
+ from getpass import getpass
20
+ from pathlib import Path
21
+ from typing import Optional
22
+
23
+ from . import constants
24
+ from .commands._cli_utils import ANSI
25
+ from .utils import (
26
+ capture_output,
27
+ get_token,
28
+ is_google_colab,
29
+ is_notebook,
30
+ list_credential_helpers,
31
+ logging,
32
+ run_subprocess,
33
+ set_git_credential,
34
+ unset_git_credential,
35
+ )
36
+ from .utils._token import _get_token_from_environment, _get_token_from_google_colab
37
+
38
+
39
+ logger = logging.get_logger(__name__)
40
+
41
+ _HF_LOGO_ASCII = """
42
+ _| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| _|_|_| _|_|_|_|
43
+ _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
44
+ _|_|_|_| _| _| _| _|_| _| _|_| _| _| _| _| _| _|_| _|_|_| _|_|_|_| _| _|_|_|
45
+ _| _| _| _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
46
+ _| _| _|_| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _| _| _| _|_|_| _|_|_|_|
47
+ """
48
+
49
+
50
+ def login(
51
+ token: Optional[str] = None,
52
+ add_to_git_credential: bool = False,
53
+ new_session: bool = True,
54
+ write_permission: bool = False,
55
+ ) -> None:
56
+ """Login the machine to access the Hub.
57
+
58
+ The `token` is persisted in cache and set as a git credential. Once done, the machine
59
+ is logged in and the access token will be available across all `huggingface_hub`
60
+ components. If `token` is not provided, it will be prompted to the user either with
61
+ a widget (in a notebook) or via the terminal.
62
+
63
+ To login from outside of a script, one can also use `huggingface-cli login` which is
64
+ a cli command that wraps [`login`].
65
+
66
+ <Tip>
67
+
68
+ [`login`] is a drop-in replacement method for [`notebook_login`] as it wraps and
69
+ extends its capabilities.
70
+
71
+ </Tip>
72
+
73
+ <Tip>
74
+
75
+ When the token is not passed, [`login`] will automatically detect if the script runs
76
+ in a notebook or not. However, this detection might not be accurate due to the
77
+ variety of notebooks that exists nowadays. If that is the case, you can always force
78
+ the UI by using [`notebook_login`] or [`interpreter_login`].
79
+
80
+ </Tip>
81
+
82
+ Args:
83
+ token (`str`, *optional*):
84
+ User access token to generate from https://huggingface.co/settings/token.
85
+ add_to_git_credential (`bool`, defaults to `False`):
86
+ If `True`, token will be set as git credential. If no git credential helper
87
+ is configured, a warning will be displayed to the user. If `token` is `None`,
88
+ the value of `add_to_git_credential` is ignored and will be prompted again
89
+ to the end user.
90
+ new_session (`bool`, defaults to `True`):
91
+ If `True`, will request a token even if one is already saved on the machine.
92
+ write_permission (`bool`, defaults to `False`):
93
+ If `True`, requires a token with write permission.
94
+ Raises:
95
+ [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
96
+ If an organization token is passed. Only personal account tokens are valid
97
+ to login.
98
+ [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
99
+ If token is invalid.
100
+ [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
101
+ If running in a notebook but `ipywidgets` is not installed.
102
+ """
103
+ if token is not None:
104
+ if not add_to_git_credential:
105
+ print(
106
+ "Token has not been saved to git credential helper. Pass"
107
+ " `add_to_git_credential=True` if you want to set the git"
108
+ " credential as well."
109
+ )
110
+ _login(token, add_to_git_credential=add_to_git_credential, write_permission=write_permission)
111
+ elif is_notebook():
112
+ notebook_login(new_session=new_session, write_permission=write_permission)
113
+ else:
114
+ interpreter_login(new_session=new_session, write_permission=write_permission)
115
+
116
+
117
+ def logout() -> None:
118
+ """Logout the machine from the Hub.
119
+
120
+ Token is deleted from the machine and removed from git credential.
121
+ """
122
+ if get_token() is None:
123
+ print("Not logged in!")
124
+ return
125
+
126
+ # Delete token from git credentials
127
+ unset_git_credential()
128
+
129
+ # Delete token file
130
+ try:
131
+ Path(constants.HF_TOKEN_PATH).unlink()
132
+ except FileNotFoundError:
133
+ pass
134
+
135
+ # Check if still logged in
136
+ if _get_token_from_google_colab() is not None:
137
+ raise EnvironmentError(
138
+ "You are automatically logged in using a Google Colab secret.\n"
139
+ "To log out, you must unset the `HF_TOKEN` secret in your Colab settings."
140
+ )
141
+ if _get_token_from_environment() is not None:
142
+ raise EnvironmentError(
143
+ "Token has been deleted from your machine but you are still logged in.\n"
144
+ "To log out, you must clear out both `HF_TOKEN` and `HUGGING_FACE_HUB_TOKEN` environment variables."
145
+ )
146
+
147
+ print("Successfully logged out.")
148
+
149
+
150
+ ###
151
+ # Interpreter-based login (text)
152
+ ###
153
+
154
+
155
+ def interpreter_login(new_session: bool = True, write_permission: bool = False) -> None:
156
+ """
157
+ Displays a prompt to login to the HF website and store the token.
158
+
159
+ This is equivalent to [`login`] without passing a token when not run in a notebook.
160
+ [`interpreter_login`] is useful if you want to force the use of the terminal prompt
161
+ instead of a notebook widget.
162
+
163
+ For more details, see [`login`].
164
+
165
+ Args:
166
+ new_session (`bool`, defaults to `True`):
167
+ If `True`, will request a token even if one is already saved on the machine.
168
+ write_permission (`bool`, defaults to `False`):
169
+ If `True`, requires a token with write permission.
170
+
171
+ """
172
+ if not new_session and _current_token_okay(write_permission=write_permission):
173
+ print("User is already logged in.")
174
+ return
175
+
176
+ from .commands.delete_cache import _ask_for_confirmation_no_tui
177
+
178
+ print(_HF_LOGO_ASCII)
179
+ if get_token() is not None:
180
+ print(
181
+ " A token is already saved on your machine. Run `huggingface-cli"
182
+ " whoami` to get more information or `huggingface-cli logout` if you want"
183
+ " to log out."
184
+ )
185
+ print(" Setting a new token will erase the existing one.")
186
+
187
+ print(" To login, `huggingface_hub` requires a token generated from https://huggingface.co/settings/tokens .")
188
+ if os.name == "nt":
189
+ print("Token can be pasted using 'Right-Click'.")
190
+ token = getpass("Enter your token (input will not be visible): ")
191
+ add_to_git_credential = _ask_for_confirmation_no_tui("Add token as git credential?")
192
+
193
+ _login(token=token, add_to_git_credential=add_to_git_credential, write_permission=write_permission)
194
+
195
+
196
+ ###
197
+ # Notebook-based login (widget)
198
+ ###
199
+
200
+ NOTEBOOK_LOGIN_PASSWORD_HTML = """<center> <img
201
+ src=https://huggingface.co/front/assets/huggingface_logo-noborder.svg
202
+ alt='Hugging Face'> <br> Immediately click login after typing your password or
203
+ it might be stored in plain text in this notebook file. </center>"""
204
+
205
+
206
+ NOTEBOOK_LOGIN_TOKEN_HTML_START = """<center> <img
207
+ src=https://huggingface.co/front/assets/huggingface_logo-noborder.svg
208
+ alt='Hugging Face'> <br> Copy a token from <a
209
+ href="https://huggingface.co/settings/tokens" target="_blank">your Hugging Face
210
+ tokens page</a> and paste it below. <br> Immediately click login after copying
211
+ your token or it might be stored in plain text in this notebook file. </center>"""
212
+
213
+
214
+ NOTEBOOK_LOGIN_TOKEN_HTML_END = """
215
+ <b>Pro Tip:</b> If you don't already have one, you can create a dedicated
216
+ 'notebooks' token with 'write' access, that you can then easily reuse for all
217
+ notebooks. </center>"""
218
+
219
+
220
+ def notebook_login(new_session: bool = True, write_permission: bool = False) -> None:
221
+ """
222
+ Displays a widget to login to the HF website and store the token.
223
+
224
+ This is equivalent to [`login`] without passing a token when run in a notebook.
225
+ [`notebook_login`] is useful if you want to force the use of the notebook widget
226
+ instead of a prompt in the terminal.
227
+
228
+ For more details, see [`login`].
229
+
230
+ Args:
231
+ new_session (`bool`, defaults to `True`):
232
+ If `True`, will request a token even if one is already saved on the machine.
233
+ write_permission (`bool`, defaults to `False`):
234
+ If `True`, requires a token with write permission.
235
+ """
236
+ try:
237
+ import ipywidgets.widgets as widgets # type: ignore
238
+ from IPython.display import display # type: ignore
239
+ except ImportError:
240
+ raise ImportError(
241
+ "The `notebook_login` function can only be used in a notebook (Jupyter or"
242
+ " Colab) and you need the `ipywidgets` module: `pip install ipywidgets`."
243
+ )
244
+ if not new_session and _current_token_okay(write_permission=write_permission):
245
+ print("User is already logged in.")
246
+ return
247
+
248
+ box_layout = widgets.Layout(display="flex", flex_flow="column", align_items="center", width="50%")
249
+
250
+ token_widget = widgets.Password(description="Token:")
251
+ git_checkbox_widget = widgets.Checkbox(value=True, description="Add token as git credential?")
252
+ token_finish_button = widgets.Button(description="Login")
253
+
254
+ login_token_widget = widgets.VBox(
255
+ [
256
+ widgets.HTML(NOTEBOOK_LOGIN_TOKEN_HTML_START),
257
+ token_widget,
258
+ git_checkbox_widget,
259
+ token_finish_button,
260
+ widgets.HTML(NOTEBOOK_LOGIN_TOKEN_HTML_END),
261
+ ],
262
+ layout=box_layout,
263
+ )
264
+ display(login_token_widget)
265
+
266
+ # On click events
267
+ def login_token_event(t, write_permission: bool = False):
268
+ """
269
+ Event handler for the login button.
270
+
271
+ Args:
272
+ write_permission (`bool`, defaults to `False`):
273
+ If `True`, requires a token with write permission.
274
+ """
275
+ token = token_widget.value
276
+ add_to_git_credential = git_checkbox_widget.value
277
+ # Erase token and clear value to make sure it's not saved in the notebook.
278
+ token_widget.value = ""
279
+ # Hide inputs
280
+ login_token_widget.children = [widgets.Label("Connecting...")]
281
+ try:
282
+ with capture_output() as captured:
283
+ _login(token, add_to_git_credential=add_to_git_credential, write_permission=write_permission)
284
+ message = captured.getvalue()
285
+ except Exception as error:
286
+ message = str(error)
287
+ # Print result (success message or error)
288
+ login_token_widget.children = [widgets.Label(line) for line in message.split("\n") if line.strip()]
289
+
290
+ token_finish_button.on_click(partial(login_token_event, write_permission=write_permission))
291
+
292
+
293
+ ###
294
+ # Login private helpers
295
+ ###
296
+
297
+
298
+ def _login(token: str, add_to_git_credential: bool, write_permission: bool = False) -> None:
299
+ from .hf_api import get_token_permission # avoid circular import
300
+
301
+ if token.startswith("api_org"):
302
+ raise ValueError("You must use your personal account token, not an organization token.")
303
+
304
+ permission = get_token_permission(token)
305
+ if permission is None:
306
+ raise ValueError("Invalid token passed!")
307
+ elif write_permission and permission != "write":
308
+ raise ValueError(
309
+ "Token is valid but is 'read-only' and a 'write' token is required.\nPlease provide a new token with"
310
+ " correct permission."
311
+ )
312
+ print(f"Token is valid (permission: {permission}).")
313
+
314
+ if add_to_git_credential:
315
+ if _is_git_credential_helper_configured():
316
+ set_git_credential(token)
317
+ print(
318
+ "Your token has been saved in your configured git credential helpers"
319
+ + f" ({','.join(list_credential_helpers())})."
320
+ )
321
+ else:
322
+ print("Token has not been saved to git credential helper.")
323
+
324
+ # Save token
325
+ path = Path(constants.HF_TOKEN_PATH)
326
+ path.parent.mkdir(parents=True, exist_ok=True)
327
+ path.write_text(token)
328
+ print(f"Your token has been saved to {constants.HF_TOKEN_PATH}")
329
+ print("Login successful")
330
+
331
+
332
+ def _current_token_okay(write_permission: bool = False):
333
+ """Check if the current token is valid.
334
+
335
+ Args:
336
+ write_permission (`bool`, defaults to `False`):
337
+ If `True`, requires a token with write permission.
338
+
339
+ Returns:
340
+ `bool`: `True` if the current token is valid, `False` otherwise.
341
+ """
342
+ from .hf_api import get_token_permission # avoid circular import
343
+
344
+ permission = get_token_permission()
345
+ if permission is None or (write_permission and permission != "write"):
346
+ return False
347
+ return True
348
+
349
+
350
+ def _is_git_credential_helper_configured() -> bool:
351
+ """Check if a git credential helper is configured.
352
+
353
+ Warns user if not the case (except for Google Colab where "store" is set by default
354
+ by `huggingface_hub`).
355
+ """
356
+ helpers = list_credential_helpers()
357
+ if len(helpers) > 0:
358
+ return True # Do not warn: at least 1 helper is set
359
+
360
+ # Only in Google Colab to avoid the warning message
361
+ # See https://github.com/huggingface/huggingface_hub/issues/1043#issuecomment-1247010710
362
+ if is_google_colab():
363
+ _set_store_as_git_credential_helper_globally()
364
+ return True # Do not warn: "store" is used by default in Google Colab
365
+
366
+ # Otherwise, warn user
367
+ print(
368
+ ANSI.red(
369
+ "Cannot authenticate through git-credential as no helper is defined on your"
370
+ " machine.\nYou might have to re-authenticate when pushing to the Hugging"
371
+ " Face Hub.\nRun the following command in your terminal in case you want to"
372
+ " set the 'store' credential helper as default.\n\ngit config --global"
373
+ " credential.helper store\n\nRead"
374
+ " https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage for more"
375
+ " details."
376
+ )
377
+ )
378
+ return False
379
+
380
+
381
+ def _set_store_as_git_credential_helper_globally() -> None:
382
+ """Set globally the credential.helper to `store`.
383
+
384
+ To be used only in Google Colab as we assume the user doesn't care about the git
385
+ credential config. It is the only particular case where we don't want to display the
386
+ warning message in [`notebook_login()`].
387
+
388
+ Related:
389
+ - https://github.com/huggingface/huggingface_hub/issues/1043
390
+ - https://github.com/huggingface/huggingface_hub/issues/1051
391
+ - https://git-scm.com/docs/git-credential-store
392
+ """
393
+ try:
394
+ run_subprocess("git config --global credential.helper store")
395
+ except subprocess.CalledProcessError as exc:
396
+ raise EnvironmentError(exc.stderr)
env-llmeval/lib/python3.10/site-packages/huggingface_hub/_tensorboard_logger.py ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2023 The HuggingFace Team. All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Contains a logger to push training logs to the Hub, using Tensorboard."""
15
+
16
+ from pathlib import Path
17
+ from typing import TYPE_CHECKING, List, Optional, Union
18
+
19
+ from huggingface_hub._commit_scheduler import CommitScheduler
20
+
21
+ from .utils import experimental, is_tensorboard_available
22
+
23
+
24
+ if is_tensorboard_available():
25
+ from tensorboardX import SummaryWriter
26
+
27
+ # TODO: clarify: should we import from torch.utils.tensorboard ?
28
+
29
+ else:
30
+ SummaryWriter = object # Dummy class to avoid failing at import. Will raise on instance creation.
31
+
32
+ if TYPE_CHECKING:
33
+ from tensorboardX import SummaryWriter
34
+
35
+
36
+ class HFSummaryWriter(SummaryWriter):
37
+ """
38
+ Wrapper around the tensorboard's `SummaryWriter` to push training logs to the Hub.
39
+
40
+ Data is logged locally and then pushed to the Hub asynchronously. Pushing data to the Hub is done in a separate
41
+ thread to avoid blocking the training script. In particular, if the upload fails for any reason (e.g. a connection
42
+ issue), the main script will not be interrupted. Data is automatically pushed to the Hub every `commit_every`
43
+ minutes (default to every 5 minutes).
44
+
45
+ <Tip warning={true}>
46
+
47
+ `HFSummaryWriter` is experimental. Its API is subject to change in the future without prior notice.
48
+
49
+ </Tip>
50
+
51
+ Args:
52
+ repo_id (`str`):
53
+ The id of the repo to which the logs will be pushed.
54
+ logdir (`str`, *optional*):
55
+ The directory where the logs will be written. If not specified, a local directory will be created by the
56
+ underlying `SummaryWriter` object.
57
+ commit_every (`int` or `float`, *optional*):
58
+ The frequency (in minutes) at which the logs will be pushed to the Hub. Defaults to 5 minutes.
59
+ squash_history (`bool`, *optional*):
60
+ Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
61
+ useful to avoid degraded performances on the repo when it grows too large.
62
+ repo_type (`str`, *optional*):
63
+ The type of the repo to which the logs will be pushed. Defaults to "model".
64
+ repo_revision (`str`, *optional*):
65
+ The revision of the repo to which the logs will be pushed. Defaults to "main".
66
+ repo_private (`bool`, *optional*):
67
+ Whether to create a private repo or not. Defaults to False. This argument is ignored if the repo already
68
+ exists.
69
+ path_in_repo (`str`, *optional*):
70
+ The path to the folder in the repo where the logs will be pushed. Defaults to "tensorboard/".
71
+ repo_allow_patterns (`List[str]` or `str`, *optional*):
72
+ A list of patterns to include in the upload. Defaults to `"*.tfevents.*"`. Check out the
73
+ [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
74
+ repo_ignore_patterns (`List[str]` or `str`, *optional*):
75
+ A list of patterns to exclude in the upload. Check out the
76
+ [upload guide](https://huggingface.co/docs/huggingface_hub/guides/upload#upload-a-folder) for more details.
77
+ token (`str`, *optional*):
78
+ Authentication token. Will default to the stored token. See https://huggingface.co/settings/token for more
79
+ details
80
+ kwargs:
81
+ Additional keyword arguments passed to `SummaryWriter`.
82
+
83
+ Examples:
84
+ ```py
85
+ >>> from huggingface_hub import HFSummaryWriter
86
+
87
+ # Logs are automatically pushed every 15 minutes
88
+ >>> logger = HFSummaryWriter(repo_id="test_hf_logger", commit_every=15)
89
+ >>> logger.add_scalar("a", 1)
90
+ >>> logger.add_scalar("b", 2)
91
+ ...
92
+
93
+ # You can also trigger a push manually
94
+ >>> logger.scheduler.trigger()
95
+ ```
96
+
97
+ ```py
98
+ >>> from huggingface_hub import HFSummaryWriter
99
+
100
+ # Logs are automatically pushed every 5 minutes (default) + when exiting the context manager
101
+ >>> with HFSummaryWriter(repo_id="test_hf_logger") as logger:
102
+ ... logger.add_scalar("a", 1)
103
+ ... logger.add_scalar("b", 2)
104
+ ```
105
+ """
106
+
107
+ @experimental
108
+ def __new__(cls, *args, **kwargs) -> "HFSummaryWriter":
109
+ if not is_tensorboard_available():
110
+ raise ImportError(
111
+ "You must have `tensorboard` installed to use `HFSummaryWriter`. Please run `pip install --upgrade"
112
+ " tensorboardX` first."
113
+ )
114
+ return super().__new__(cls)
115
+
116
+ def __init__(
117
+ self,
118
+ repo_id: str,
119
+ *,
120
+ logdir: Optional[str] = None,
121
+ commit_every: Union[int, float] = 5,
122
+ squash_history: bool = False,
123
+ repo_type: Optional[str] = None,
124
+ repo_revision: Optional[str] = None,
125
+ repo_private: bool = False,
126
+ path_in_repo: Optional[str] = "tensorboard",
127
+ repo_allow_patterns: Optional[Union[List[str], str]] = "*.tfevents.*",
128
+ repo_ignore_patterns: Optional[Union[List[str], str]] = None,
129
+ token: Optional[str] = None,
130
+ **kwargs,
131
+ ):
132
+ # Initialize SummaryWriter
133
+ super().__init__(logdir=logdir, **kwargs)
134
+
135
+ # Check logdir has been correctly initialized and fail early otherwise. In practice, SummaryWriter takes care of it.
136
+ if not isinstance(self.logdir, str):
137
+ raise ValueError(f"`self.logdir` must be a string. Got '{self.logdir}' of type {type(self.logdir)}.")
138
+
139
+ # Append logdir name to `path_in_repo`
140
+ if path_in_repo is None or path_in_repo == "":
141
+ path_in_repo = Path(self.logdir).name
142
+ else:
143
+ path_in_repo = path_in_repo.strip("/") + "/" + Path(self.logdir).name
144
+
145
+ # Initialize scheduler
146
+ self.scheduler = CommitScheduler(
147
+ folder_path=self.logdir,
148
+ path_in_repo=path_in_repo,
149
+ repo_id=repo_id,
150
+ repo_type=repo_type,
151
+ revision=repo_revision,
152
+ private=repo_private,
153
+ token=token,
154
+ allow_patterns=repo_allow_patterns,
155
+ ignore_patterns=repo_ignore_patterns,
156
+ every=commit_every,
157
+ squash_history=squash_history,
158
+ )
159
+
160
+ # Exposing some high-level info at root level
161
+ self.repo_id = self.scheduler.repo_id
162
+ self.repo_type = self.scheduler.repo_type
163
+ self.repo_revision = self.scheduler.revision
164
+
165
+ def __exit__(self, exc_type, exc_val, exc_tb):
166
+ """Push to hub in a non-blocking way when exiting the logger's context manager."""
167
+ super().__exit__(exc_type, exc_val, exc_tb)
168
+ future = self.scheduler.trigger()
169
+ future.result()
env-llmeval/lib/python3.10/site-packages/huggingface_hub/_webhooks_server.py ADDED
@@ -0,0 +1,380 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2023-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Contains `WebhooksServer` and `webhook_endpoint` to create a webhook server easily."""
16
+
17
+ import atexit
18
+ import inspect
19
+ import os
20
+ from functools import wraps
21
+ from typing import TYPE_CHECKING, Any, Callable, Dict, Optional
22
+
23
+ from .utils import experimental, is_gradio_available
24
+ from .utils._deprecation import _deprecate_method
25
+
26
+
27
+ if TYPE_CHECKING:
28
+ import gradio as gr
29
+
30
+
31
+ from fastapi import FastAPI, Request
32
+ from fastapi.responses import JSONResponse
33
+
34
+
35
+ _global_app: Optional["WebhooksServer"] = None
36
+ _is_local = os.getenv("SYSTEM") != "spaces"
37
+
38
+
39
+ @experimental
40
+ class WebhooksServer:
41
+ """
42
+ The [`WebhooksServer`] class lets you create an instance of a Gradio app that can receive Huggingface webhooks.
43
+ These webhooks can be registered using the [`~WebhooksServer.add_webhook`] decorator. Webhook endpoints are added to
44
+ the app as a POST endpoint to the FastAPI router. Once all the webhooks are registered, the `run` method has to be
45
+ called to start the app.
46
+
47
+ It is recommended to accept [`WebhookPayload`] as the first argument of the webhook function. It is a Pydantic
48
+ model that contains all the information about the webhook event. The data will be parsed automatically for you.
49
+
50
+ Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
51
+ WebhooksServer and deploy it on a Space.
52
+
53
+ <Tip warning={true}>
54
+
55
+ `WebhooksServer` is experimental. Its API is subject to change in the future.
56
+
57
+ </Tip>
58
+
59
+ <Tip warning={true}>
60
+
61
+ You must have `gradio` installed to use `WebhooksServer` (`pip install --upgrade gradio`).
62
+
63
+ </Tip>
64
+
65
+ Args:
66
+ ui (`gradio.Blocks`, optional):
67
+ A Gradio UI instance to be used as the Space landing page. If `None`, a UI displaying instructions
68
+ about the configured webhooks is created.
69
+ webhook_secret (`str`, optional):
70
+ A secret key to verify incoming webhook requests. You can set this value to any secret you want as long as
71
+ you also configure it in your [webhooks settings panel](https://huggingface.co/settings/webhooks). You
72
+ can also set this value as the `WEBHOOK_SECRET` environment variable. If no secret is provided, the
73
+ webhook endpoints are opened without any security.
74
+
75
+ Example:
76
+
77
+ ```python
78
+ import gradio as gr
79
+ from huggingface_hub import WebhooksServer, WebhookPayload
80
+
81
+ with gr.Blocks() as ui:
82
+ ...
83
+
84
+ app = WebhooksServer(ui=ui, webhook_secret="my_secret_key")
85
+
86
+ @app.add_webhook("/say_hello")
87
+ async def hello(payload: WebhookPayload):
88
+ return {"message": "hello"}
89
+
90
+ app.run()
91
+ ```
92
+ """
93
+
94
+ def __new__(cls, *args, **kwargs) -> "WebhooksServer":
95
+ if not is_gradio_available():
96
+ raise ImportError(
97
+ "You must have `gradio` installed to use `WebhooksServer`. Please run `pip install --upgrade gradio`"
98
+ " first."
99
+ )
100
+ return super().__new__(cls)
101
+
102
+ def __init__(
103
+ self,
104
+ ui: Optional["gr.Blocks"] = None,
105
+ webhook_secret: Optional[str] = None,
106
+ ) -> None:
107
+ self._ui = ui
108
+
109
+ self.webhook_secret = webhook_secret or os.getenv("WEBHOOK_SECRET")
110
+ self.registered_webhooks: Dict[str, Callable] = {}
111
+ _warn_on_empty_secret(self.webhook_secret)
112
+
113
+ def add_webhook(self, path: Optional[str] = None) -> Callable:
114
+ """
115
+ Decorator to add a webhook to the [`WebhooksServer`] server.
116
+
117
+ Args:
118
+ path (`str`, optional):
119
+ The URL path to register the webhook function. If not provided, the function name will be used as the
120
+ path. In any case, all webhooks are registered under `/webhooks`.
121
+
122
+ Raises:
123
+ ValueError: If the provided path is already registered as a webhook.
124
+
125
+ Example:
126
+ ```python
127
+ from huggingface_hub import WebhooksServer, WebhookPayload
128
+
129
+ app = WebhooksServer()
130
+
131
+ @app.add_webhook
132
+ async def trigger_training(payload: WebhookPayload):
133
+ if payload.repo.type == "dataset" and payload.event.action == "update":
134
+ # Trigger a training job if a dataset is updated
135
+ ...
136
+
137
+ app.run()
138
+ ```
139
+ """
140
+ # Usage: directly as decorator. Example: `@app.add_webhook`
141
+ if callable(path):
142
+ # If path is a function, it means it was used as a decorator without arguments
143
+ return self.add_webhook()(path)
144
+
145
+ # Usage: provide a path. Example: `@app.add_webhook(...)`
146
+ @wraps(FastAPI.post)
147
+ def _inner_post(*args, **kwargs):
148
+ func = args[0]
149
+ abs_path = f"/webhooks/{(path or func.__name__).strip('/')}"
150
+ if abs_path in self.registered_webhooks:
151
+ raise ValueError(f"Webhook {abs_path} already exists.")
152
+ self.registered_webhooks[abs_path] = func
153
+
154
+ return _inner_post
155
+
156
+ def launch(self, prevent_thread_lock: bool = False, **launch_kwargs: Any) -> None:
157
+ """Launch the Gradio app and register webhooks to the underlying FastAPI server.
158
+
159
+ Input parameters are forwarded to Gradio when launching the app.
160
+ """
161
+ ui = self._ui or self._get_default_ui()
162
+
163
+ # Start Gradio App
164
+ # - as non-blocking so that webhooks can be added afterwards
165
+ # - as shared if launch locally (to debug webhooks)
166
+ launch_kwargs.setdefault("share", _is_local)
167
+ self.fastapi_app, _, _ = ui.launch(prevent_thread_lock=True, **launch_kwargs)
168
+
169
+ # Register webhooks to FastAPI app
170
+ for path, func in self.registered_webhooks.items():
171
+ # Add secret check if required
172
+ if self.webhook_secret is not None:
173
+ func = _wrap_webhook_to_check_secret(func, webhook_secret=self.webhook_secret)
174
+
175
+ # Add route to FastAPI app
176
+ self.fastapi_app.post(path)(func)
177
+
178
+ # Print instructions and block main thread
179
+ url = (ui.share_url or ui.local_url).strip("/")
180
+ message = "\nWebhooks are correctly setup and ready to use:"
181
+ message += "\n" + "\n".join(f" - POST {url}{webhook}" for webhook in self.registered_webhooks)
182
+ message += "\nGo to https://huggingface.co/settings/webhooks to setup your webhooks."
183
+ print(message)
184
+
185
+ if not prevent_thread_lock:
186
+ ui.block_thread()
187
+
188
+ @_deprecate_method(version="0.23", message="Use `WebhooksServer.launch` instead.")
189
+ def run(self) -> None:
190
+ return self.launch()
191
+
192
+ def _get_default_ui(self) -> "gr.Blocks":
193
+ """Default UI if not provided (lists webhooks and provides basic instructions)."""
194
+ import gradio as gr
195
+
196
+ with gr.Blocks() as ui:
197
+ gr.Markdown("# This is an app to process 🤗 Webhooks")
198
+ gr.Markdown(
199
+ "Webhooks are a foundation for MLOps-related features. They allow you to listen for new changes on"
200
+ " specific repos or to all repos belonging to particular set of users/organizations (not just your"
201
+ " repos, but any repo). Check out this [guide](https://huggingface.co/docs/hub/webhooks) to get to"
202
+ " know more about webhooks on the Huggingface Hub."
203
+ )
204
+ gr.Markdown(
205
+ f"{len(self.registered_webhooks)} webhook(s) are registered:"
206
+ + "\n\n"
207
+ + "\n ".join(
208
+ f"- [{webhook_path}]({_get_webhook_doc_url(webhook.__name__, webhook_path)})"
209
+ for webhook_path, webhook in self.registered_webhooks.items()
210
+ )
211
+ )
212
+ gr.Markdown(
213
+ "Go to https://huggingface.co/settings/webhooks to setup your webhooks."
214
+ + "\nYou app is running locally. Please look at the logs to check the full URL you need to set."
215
+ if _is_local
216
+ else (
217
+ "\nThis app is running on a Space. You can find the corresponding URL in the options menu"
218
+ " (top-right) > 'Embed the Space'. The URL looks like 'https://{username}-{repo_name}.hf.space'."
219
+ )
220
+ )
221
+ return ui
222
+
223
+
224
+ @experimental
225
+ def webhook_endpoint(path: Optional[str] = None) -> Callable:
226
+ """Decorator to start a [`WebhooksServer`] and register the decorated function as a webhook endpoint.
227
+
228
+ This is a helper to get started quickly. If you need more flexibility (custom landing page or webhook secret),
229
+ you can use [`WebhooksServer`] directly. You can register multiple webhook endpoints (to the same server) by using
230
+ this decorator multiple times.
231
+
232
+ Check out the [webhooks guide](../guides/webhooks_server) for a step-by-step tutorial on how to setup your
233
+ server and deploy it on a Space.
234
+
235
+ <Tip warning={true}>
236
+
237
+ `webhook_endpoint` is experimental. Its API is subject to change in the future.
238
+
239
+ </Tip>
240
+
241
+ <Tip warning={true}>
242
+
243
+ You must have `gradio` installed to use `webhook_endpoint` (`pip install --upgrade gradio`).
244
+
245
+ </Tip>
246
+
247
+ Args:
248
+ path (`str`, optional):
249
+ The URL path to register the webhook function. If not provided, the function name will be used as the path.
250
+ In any case, all webhooks are registered under `/webhooks`.
251
+
252
+ Examples:
253
+ The default usage is to register a function as a webhook endpoint. The function name will be used as the path.
254
+ The server will be started automatically at exit (i.e. at the end of the script).
255
+
256
+ ```python
257
+ from huggingface_hub import webhook_endpoint, WebhookPayload
258
+
259
+ @webhook_endpoint
260
+ async def trigger_training(payload: WebhookPayload):
261
+ if payload.repo.type == "dataset" and payload.event.action == "update":
262
+ # Trigger a training job if a dataset is updated
263
+ ...
264
+
265
+ # Server is automatically started at the end of the script.
266
+ ```
267
+
268
+ Advanced usage: register a function as a webhook endpoint and start the server manually. This is useful if you
269
+ are running it in a notebook.
270
+
271
+ ```python
272
+ from huggingface_hub import webhook_endpoint, WebhookPayload
273
+
274
+ @webhook_endpoint
275
+ async def trigger_training(payload: WebhookPayload):
276
+ if payload.repo.type == "dataset" and payload.event.action == "update":
277
+ # Trigger a training job if a dataset is updated
278
+ ...
279
+
280
+ # Start the server manually
281
+ trigger_training.run()
282
+ ```
283
+ """
284
+ if callable(path):
285
+ # If path is a function, it means it was used as a decorator without arguments
286
+ return webhook_endpoint()(path)
287
+
288
+ @wraps(WebhooksServer.add_webhook)
289
+ def _inner(func: Callable) -> Callable:
290
+ app = _get_global_app()
291
+ app.add_webhook(path)(func)
292
+ if len(app.registered_webhooks) == 1:
293
+ # Register `app.run` to run at exit (only once)
294
+ atexit.register(app.run)
295
+
296
+ @wraps(app.run)
297
+ def _run_now():
298
+ # Run the app directly (without waiting atexit)
299
+ atexit.unregister(app.run)
300
+ app.run()
301
+
302
+ func.run = _run_now # type: ignore
303
+ return func
304
+
305
+ return _inner
306
+
307
+
308
+ def _get_global_app() -> WebhooksServer:
309
+ global _global_app
310
+ if _global_app is None:
311
+ _global_app = WebhooksServer()
312
+ return _global_app
313
+
314
+
315
+ def _warn_on_empty_secret(webhook_secret: Optional[str]) -> None:
316
+ if webhook_secret is None:
317
+ print("Webhook secret is not defined. This means your webhook endpoints will be open to everyone.")
318
+ print(
319
+ "To add a secret, set `WEBHOOK_SECRET` as environment variable or pass it at initialization: "
320
+ "\n\t`app = WebhooksServer(webhook_secret='my_secret', ...)`"
321
+ )
322
+ print(
323
+ "For more details about webhook secrets, please refer to"
324
+ " https://huggingface.co/docs/hub/webhooks#webhook-secret."
325
+ )
326
+ else:
327
+ print("Webhook secret is correctly defined.")
328
+
329
+
330
+ def _get_webhook_doc_url(webhook_name: str, webhook_path: str) -> str:
331
+ """Returns the anchor to a given webhook in the docs (experimental)"""
332
+ return "/docs#/default/" + webhook_name + webhook_path.replace("/", "_") + "_post"
333
+
334
+
335
+ def _wrap_webhook_to_check_secret(func: Callable, webhook_secret: str) -> Callable:
336
+ """Wraps a webhook function to check the webhook secret before calling the function.
337
+
338
+ This is a hacky way to add the `request` parameter to the function signature. Since FastAPI based itself on route
339
+ parameters to inject the values to the function, we need to hack the function signature to retrieve the `Request`
340
+ object (and hence the headers). A far cleaner solution would be to use a middleware. However, since
341
+ `fastapi==0.90.1`, a middleware cannot be added once the app has started. And since the FastAPI app is started by
342
+ Gradio internals (and not by us), we cannot add a middleware.
343
+
344
+ This method is called only when a secret has been defined by the user. If a request is sent without the
345
+ "x-webhook-secret", the function will return a 401 error (unauthorized). If the header is sent but is incorrect,
346
+ the function will return a 403 error (forbidden).
347
+
348
+ Inspired by https://stackoverflow.com/a/33112180.
349
+ """
350
+ initial_sig = inspect.signature(func)
351
+
352
+ @wraps(func)
353
+ async def _protected_func(request: Request, **kwargs):
354
+ request_secret = request.headers.get("x-webhook-secret")
355
+ if request_secret is None:
356
+ return JSONResponse({"error": "x-webhook-secret header not set."}, status_code=401)
357
+ if request_secret != webhook_secret:
358
+ return JSONResponse({"error": "Invalid webhook secret."}, status_code=403)
359
+
360
+ # Inject `request` in kwargs if required
361
+ if "request" in initial_sig.parameters:
362
+ kwargs["request"] = request
363
+
364
+ # Handle both sync and async routes
365
+ if inspect.iscoroutinefunction(func):
366
+ return await func(**kwargs)
367
+ else:
368
+ return func(**kwargs)
369
+
370
+ # Update signature to include request
371
+ if "request" not in initial_sig.parameters:
372
+ _protected_func.__signature__ = initial_sig.replace( # type: ignore
373
+ parameters=(
374
+ inspect.Parameter(name="request", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=Request),
375
+ )
376
+ + tuple(initial_sig.parameters.values())
377
+ )
378
+
379
+ # Return protected route
380
+ return _protected_func
env-llmeval/lib/python3.10/site-packages/huggingface_hub/fastai_utils.py ADDED
@@ -0,0 +1,425 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ from pathlib import Path
4
+ from pickle import DEFAULT_PROTOCOL, PicklingError
5
+ from typing import Any, Dict, List, Optional, Union
6
+
7
+ from packaging import version
8
+
9
+ from huggingface_hub import snapshot_download
10
+ from huggingface_hub.constants import CONFIG_NAME
11
+ from huggingface_hub.hf_api import HfApi
12
+ from huggingface_hub.utils import (
13
+ SoftTemporaryDirectory,
14
+ get_fastai_version,
15
+ get_fastcore_version,
16
+ get_python_version,
17
+ )
18
+
19
+ from .utils import logging, validate_hf_hub_args
20
+ from .utils._runtime import _PY_VERSION # noqa: F401 # for backward compatibility...
21
+
22
+
23
+ logger = logging.get_logger(__name__)
24
+
25
+
26
+ def _check_fastai_fastcore_versions(
27
+ fastai_min_version: str = "2.4",
28
+ fastcore_min_version: str = "1.3.27",
29
+ ):
30
+ """
31
+ Checks that the installed fastai and fastcore versions are compatible for pickle serialization.
32
+
33
+ Args:
34
+ fastai_min_version (`str`, *optional*):
35
+ The minimum fastai version supported.
36
+ fastcore_min_version (`str`, *optional*):
37
+ The minimum fastcore version supported.
38
+
39
+ <Tip>
40
+ Raises the following error:
41
+
42
+ - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
43
+ if the fastai or fastcore libraries are not available or are of an invalid version.
44
+
45
+ </Tip>
46
+ """
47
+
48
+ if (get_fastcore_version() or get_fastai_version()) == "N/A":
49
+ raise ImportError(
50
+ f"fastai>={fastai_min_version} and fastcore>={fastcore_min_version} are"
51
+ f" required. Currently using fastai=={get_fastai_version()} and"
52
+ f" fastcore=={get_fastcore_version()}."
53
+ )
54
+
55
+ current_fastai_version = version.Version(get_fastai_version())
56
+ current_fastcore_version = version.Version(get_fastcore_version())
57
+
58
+ if current_fastai_version < version.Version(fastai_min_version):
59
+ raise ImportError(
60
+ "`push_to_hub_fastai` and `from_pretrained_fastai` require a"
61
+ f" fastai>={fastai_min_version} version, but you are using fastai version"
62
+ f" {get_fastai_version()} which is incompatible. Upgrade with `pip install"
63
+ " fastai==2.5.6`."
64
+ )
65
+
66
+ if current_fastcore_version < version.Version(fastcore_min_version):
67
+ raise ImportError(
68
+ "`push_to_hub_fastai` and `from_pretrained_fastai` require a"
69
+ f" fastcore>={fastcore_min_version} version, but you are using fastcore"
70
+ f" version {get_fastcore_version()} which is incompatible. Upgrade with"
71
+ " `pip install fastcore==1.3.27`."
72
+ )
73
+
74
+
75
+ def _check_fastai_fastcore_pyproject_versions(
76
+ storage_folder: str,
77
+ fastai_min_version: str = "2.4",
78
+ fastcore_min_version: str = "1.3.27",
79
+ ):
80
+ """
81
+ Checks that the `pyproject.toml` file in the directory `storage_folder` has fastai and fastcore versions
82
+ that are compatible with `from_pretrained_fastai` and `push_to_hub_fastai`. If `pyproject.toml` does not exist
83
+ or does not contain versions for fastai and fastcore, then it logs a warning.
84
+
85
+ Args:
86
+ storage_folder (`str`):
87
+ Folder to look for the `pyproject.toml` file.
88
+ fastai_min_version (`str`, *optional*):
89
+ The minimum fastai version supported.
90
+ fastcore_min_version (`str`, *optional*):
91
+ The minimum fastcore version supported.
92
+
93
+ <Tip>
94
+ Raises the following errors:
95
+
96
+ - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
97
+ if the `toml` module is not installed.
98
+ - [`ImportError`](https://docs.python.org/3/library/exceptions.html#ImportError)
99
+ if the `pyproject.toml` indicates a lower than minimum supported version of fastai or fastcore.
100
+
101
+ </Tip>
102
+ """
103
+
104
+ try:
105
+ import toml
106
+ except ModuleNotFoundError:
107
+ raise ImportError(
108
+ "`push_to_hub_fastai` and `from_pretrained_fastai` require the toml module."
109
+ " Install it with `pip install toml`."
110
+ )
111
+
112
+ # Checks that a `pyproject.toml`, with `build-system` and `requires` sections, exists in the repository. If so, get a list of required packages.
113
+ if not os.path.isfile(f"{storage_folder}/pyproject.toml"):
114
+ logger.warning(
115
+ "There is no `pyproject.toml` in the repository that contains the fastai"
116
+ " `Learner`. The `pyproject.toml` would allow us to verify that your fastai"
117
+ " and fastcore versions are compatible with those of the model you want to"
118
+ " load."
119
+ )
120
+ return
121
+ pyproject_toml = toml.load(f"{storage_folder}/pyproject.toml")
122
+
123
+ if "build-system" not in pyproject_toml.keys():
124
+ logger.warning(
125
+ "There is no `build-system` section in the pyproject.toml of the repository"
126
+ " that contains the fastai `Learner`. The `build-system` would allow us to"
127
+ " verify that your fastai and fastcore versions are compatible with those"
128
+ " of the model you want to load."
129
+ )
130
+ return
131
+ build_system_toml = pyproject_toml["build-system"]
132
+
133
+ if "requires" not in build_system_toml.keys():
134
+ logger.warning(
135
+ "There is no `requires` section in the pyproject.toml of the repository"
136
+ " that contains the fastai `Learner`. The `requires` would allow us to"
137
+ " verify that your fastai and fastcore versions are compatible with those"
138
+ " of the model you want to load."
139
+ )
140
+ return
141
+ package_versions = build_system_toml["requires"]
142
+
143
+ # Extracts contains fastai and fastcore versions from `pyproject.toml` if available.
144
+ # If the package is specified but not the version (e.g. "fastai" instead of "fastai=2.4"), the default versions are the highest.
145
+ fastai_packages = [pck for pck in package_versions if pck.startswith("fastai")]
146
+ if len(fastai_packages) == 0:
147
+ logger.warning("The repository does not have a fastai version specified in the `pyproject.toml`.")
148
+ # fastai_version is an empty string if not specified
149
+ else:
150
+ fastai_version = str(fastai_packages[0]).partition("=")[2]
151
+ if fastai_version != "" and version.Version(fastai_version) < version.Version(fastai_min_version):
152
+ raise ImportError(
153
+ "`from_pretrained_fastai` requires"
154
+ f" fastai>={fastai_min_version} version but the model to load uses"
155
+ f" {fastai_version} which is incompatible."
156
+ )
157
+
158
+ fastcore_packages = [pck for pck in package_versions if pck.startswith("fastcore")]
159
+ if len(fastcore_packages) == 0:
160
+ logger.warning("The repository does not have a fastcore version specified in the `pyproject.toml`.")
161
+ # fastcore_version is an empty string if not specified
162
+ else:
163
+ fastcore_version = str(fastcore_packages[0]).partition("=")[2]
164
+ if fastcore_version != "" and version.Version(fastcore_version) < version.Version(fastcore_min_version):
165
+ raise ImportError(
166
+ "`from_pretrained_fastai` requires"
167
+ f" fastcore>={fastcore_min_version} version, but you are using fastcore"
168
+ f" version {fastcore_version} which is incompatible."
169
+ )
170
+
171
+
172
+ README_TEMPLATE = """---
173
+ tags:
174
+ - fastai
175
+ ---
176
+
177
+ # Amazing!
178
+
179
+ 🥳 Congratulations on hosting your fastai model on the Hugging Face Hub!
180
+
181
+ # Some next steps
182
+ 1. Fill out this model card with more information (see the template below and the [documentation here](https://huggingface.co/docs/hub/model-repos))!
183
+
184
+ 2. Create a demo in Gradio or Streamlit using 🤗 Spaces ([documentation here](https://huggingface.co/docs/hub/spaces)).
185
+
186
+ 3. Join the fastai community on the [Fastai Discord](https://discord.com/invite/YKrxeNn)!
187
+
188
+ Greetings fellow fastlearner 🤝! Don't forget to delete this content from your model card.
189
+
190
+
191
+ ---
192
+
193
+
194
+ # Model card
195
+
196
+ ## Model description
197
+ More information needed
198
+
199
+ ## Intended uses & limitations
200
+ More information needed
201
+
202
+ ## Training and evaluation data
203
+ More information needed
204
+ """
205
+
206
+ PYPROJECT_TEMPLATE = f"""[build-system]
207
+ requires = ["setuptools>=40.8.0", "wheel", "python={get_python_version()}", "fastai={get_fastai_version()}", "fastcore={get_fastcore_version()}"]
208
+ build-backend = "setuptools.build_meta:__legacy__"
209
+ """
210
+
211
+
212
+ def _create_model_card(repo_dir: Path):
213
+ """
214
+ Creates a model card for the repository.
215
+
216
+ Args:
217
+ repo_dir (`Path`):
218
+ Directory where model card is created.
219
+ """
220
+ readme_path = repo_dir / "README.md"
221
+
222
+ if not readme_path.exists():
223
+ with readme_path.open("w", encoding="utf-8") as f:
224
+ f.write(README_TEMPLATE)
225
+
226
+
227
+ def _create_model_pyproject(repo_dir: Path):
228
+ """
229
+ Creates a `pyproject.toml` for the repository.
230
+
231
+ Args:
232
+ repo_dir (`Path`):
233
+ Directory where `pyproject.toml` is created.
234
+ """
235
+ pyproject_path = repo_dir / "pyproject.toml"
236
+
237
+ if not pyproject_path.exists():
238
+ with pyproject_path.open("w", encoding="utf-8") as f:
239
+ f.write(PYPROJECT_TEMPLATE)
240
+
241
+
242
+ def _save_pretrained_fastai(
243
+ learner,
244
+ save_directory: Union[str, Path],
245
+ config: Optional[Dict[str, Any]] = None,
246
+ ):
247
+ """
248
+ Saves a fastai learner to `save_directory` in pickle format using the default pickle protocol for the version of python used.
249
+
250
+ Args:
251
+ learner (`Learner`):
252
+ The `fastai.Learner` you'd like to save.
253
+ save_directory (`str` or `Path`):
254
+ Specific directory in which you want to save the fastai learner.
255
+ config (`dict`, *optional*):
256
+ Configuration object. Will be uploaded as a .json file. Example: 'https://huggingface.co/espejelomar/fastai-pet-breeds-classification/blob/main/config.json'.
257
+
258
+ <Tip>
259
+
260
+ Raises the following error:
261
+
262
+ - [`RuntimeError`](https://docs.python.org/3/library/exceptions.html#RuntimeError)
263
+ if the config file provided is not a dictionary.
264
+
265
+ </Tip>
266
+ """
267
+ _check_fastai_fastcore_versions()
268
+
269
+ os.makedirs(save_directory, exist_ok=True)
270
+
271
+ # if the user provides config then we update it with the fastai and fastcore versions in CONFIG_TEMPLATE.
272
+ if config is not None:
273
+ if not isinstance(config, dict):
274
+ raise RuntimeError(f"Provided config should be a dict. Got: '{type(config)}'")
275
+ path = os.path.join(save_directory, CONFIG_NAME)
276
+ with open(path, "w") as f:
277
+ json.dump(config, f)
278
+
279
+ _create_model_card(Path(save_directory))
280
+ _create_model_pyproject(Path(save_directory))
281
+
282
+ # learner.export saves the model in `self.path`.
283
+ learner.path = Path(save_directory)
284
+ os.makedirs(save_directory, exist_ok=True)
285
+ try:
286
+ learner.export(
287
+ fname="model.pkl",
288
+ pickle_protocol=DEFAULT_PROTOCOL,
289
+ )
290
+ except PicklingError:
291
+ raise PicklingError(
292
+ "You are using a lambda function, i.e., an anonymous function. `pickle`"
293
+ " cannot pickle function objects and requires that all functions have"
294
+ " names. One possible solution is to name the function."
295
+ )
296
+
297
+
298
+ @validate_hf_hub_args
299
+ def from_pretrained_fastai(
300
+ repo_id: str,
301
+ revision: Optional[str] = None,
302
+ ):
303
+ """
304
+ Load pretrained fastai model from the Hub or from a local directory.
305
+
306
+ Args:
307
+ repo_id (`str`):
308
+ The location where the pickled fastai.Learner is. It can be either of the two:
309
+ - Hosted on the Hugging Face Hub. E.g.: 'espejelomar/fatai-pet-breeds-classification' or 'distilgpt2'.
310
+ You can add a `revision` by appending `@` at the end of `repo_id`. E.g.: `dbmdz/bert-base-german-cased@main`.
311
+ Revision is the specific model version to use. Since we use a git-based system for storing models and other
312
+ artifacts on the Hugging Face Hub, it can be a branch name, a tag name, or a commit id.
313
+ - Hosted locally. `repo_id` would be a directory containing the pickle and a pyproject.toml
314
+ indicating the fastai and fastcore versions used to build the `fastai.Learner`. E.g.: `./my_model_directory/`.
315
+ revision (`str`, *optional*):
316
+ Revision at which the repo's files are downloaded. See documentation of `snapshot_download`.
317
+
318
+ Returns:
319
+ The `fastai.Learner` model in the `repo_id` repo.
320
+ """
321
+ _check_fastai_fastcore_versions()
322
+
323
+ # Load the `repo_id` repo.
324
+ # `snapshot_download` returns the folder where the model was stored.
325
+ # `cache_dir` will be the default '/root/.cache/huggingface/hub'
326
+ if not os.path.isdir(repo_id):
327
+ storage_folder = snapshot_download(
328
+ repo_id=repo_id,
329
+ revision=revision,
330
+ library_name="fastai",
331
+ library_version=get_fastai_version(),
332
+ )
333
+ else:
334
+ storage_folder = repo_id
335
+
336
+ _check_fastai_fastcore_pyproject_versions(storage_folder)
337
+
338
+ from fastai.learner import load_learner # type: ignore
339
+
340
+ return load_learner(os.path.join(storage_folder, "model.pkl"))
341
+
342
+
343
+ @validate_hf_hub_args
344
+ def push_to_hub_fastai(
345
+ learner,
346
+ *,
347
+ repo_id: str,
348
+ commit_message: str = "Push FastAI model using huggingface_hub.",
349
+ private: bool = False,
350
+ token: Optional[str] = None,
351
+ config: Optional[dict] = None,
352
+ branch: Optional[str] = None,
353
+ create_pr: Optional[bool] = None,
354
+ allow_patterns: Optional[Union[List[str], str]] = None,
355
+ ignore_patterns: Optional[Union[List[str], str]] = None,
356
+ delete_patterns: Optional[Union[List[str], str]] = None,
357
+ api_endpoint: Optional[str] = None,
358
+ ):
359
+ """
360
+ Upload learner checkpoint files to the Hub.
361
+
362
+ Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use
363
+ `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more
364
+ details.
365
+
366
+ Args:
367
+ learner (`Learner`):
368
+ The `fastai.Learner' you'd like to push to the Hub.
369
+ repo_id (`str`):
370
+ The repository id for your model in Hub in the format of "namespace/repo_name". The namespace can be your individual account or an organization to which you have write access (for example, 'stanfordnlp/stanza-de').
371
+ commit_message (`str`, *optional*):
372
+ Message to commit while pushing. Will default to :obj:`"add model"`.
373
+ private (`bool`, *optional*, defaults to `False`):
374
+ Whether or not the repository created should be private.
375
+ token (`str`, *optional*):
376
+ The Hugging Face account token to use as HTTP bearer authorization for remote files. If :obj:`None`, the token will be asked by a prompt.
377
+ config (`dict`, *optional*):
378
+ Configuration object to be saved alongside the model weights.
379
+ branch (`str`, *optional*):
380
+ The git branch on which to push the model. This defaults to
381
+ the default branch as specified in your repository, which
382
+ defaults to `"main"`.
383
+ create_pr (`boolean`, *optional*):
384
+ Whether or not to create a Pull Request from `branch` with that commit.
385
+ Defaults to `False`.
386
+ api_endpoint (`str`, *optional*):
387
+ The API endpoint to use when pushing the model to the hub.
388
+ allow_patterns (`List[str]` or `str`, *optional*):
389
+ If provided, only files matching at least one pattern are pushed.
390
+ ignore_patterns (`List[str]` or `str`, *optional*):
391
+ If provided, files matching any of the patterns are not pushed.
392
+ delete_patterns (`List[str]` or `str`, *optional*):
393
+ If provided, remote files matching any of the patterns will be deleted from the repo.
394
+
395
+ Returns:
396
+ The url of the commit of your model in the given repository.
397
+
398
+ <Tip>
399
+
400
+ Raises the following error:
401
+
402
+ - [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
403
+ if the user is not log on to the Hugging Face Hub.
404
+
405
+ </Tip>
406
+ """
407
+ _check_fastai_fastcore_versions()
408
+ api = HfApi(endpoint=api_endpoint)
409
+ repo_id = api.create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True).repo_id
410
+
411
+ # Push the files to the repo in a single commit
412
+ with SoftTemporaryDirectory() as tmp:
413
+ saved_path = Path(tmp) / repo_id
414
+ _save_pretrained_fastai(learner, saved_path, config=config)
415
+ return api.upload_folder(
416
+ repo_id=repo_id,
417
+ token=token,
418
+ folder_path=saved_path,
419
+ commit_message=commit_message,
420
+ revision=branch,
421
+ create_pr=create_pr,
422
+ allow_patterns=allow_patterns,
423
+ ignore_patterns=ignore_patterns,
424
+ delete_patterns=delete_patterns,
425
+ )
env-llmeval/lib/python3.10/site-packages/huggingface_hub/hub_mixin.py ADDED
@@ -0,0 +1,704 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import inspect
2
+ import json
3
+ import os
4
+ from dataclasses import asdict, dataclass, is_dataclass
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, TypeVar, Union, get_args
7
+
8
+ from .constants import CONFIG_NAME, PYTORCH_WEIGHTS_NAME, SAFETENSORS_SINGLE_FILE
9
+ from .file_download import hf_hub_download
10
+ from .hf_api import HfApi
11
+ from .repocard import ModelCard, ModelCardData
12
+ from .utils import (
13
+ EntryNotFoundError,
14
+ HfHubHTTPError,
15
+ SoftTemporaryDirectory,
16
+ is_jsonable,
17
+ is_safetensors_available,
18
+ is_torch_available,
19
+ logging,
20
+ validate_hf_hub_args,
21
+ )
22
+ from .utils._deprecation import _deprecate_arguments
23
+
24
+
25
+ if TYPE_CHECKING:
26
+ from _typeshed import DataclassInstance
27
+
28
+ if is_torch_available():
29
+ import torch # type: ignore
30
+
31
+ if is_safetensors_available():
32
+ from safetensors.torch import load_model as load_model_as_safetensor
33
+ from safetensors.torch import save_model as save_model_as_safetensor
34
+
35
+
36
+ logger = logging.get_logger(__name__)
37
+
38
+ # Generic variable that is either ModelHubMixin or a subclass thereof
39
+ T = TypeVar("T", bound="ModelHubMixin")
40
+
41
+ DEFAULT_MODEL_CARD = """
42
+ ---
43
+ # For reference on model card metadata, see the spec: https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1
44
+ # Doc / guide: https://huggingface.co/docs/hub/model-cards
45
+ {{ card_data }}
46
+ ---
47
+
48
+ This model has been pushed to the Hub using **{{ library_name }}**:
49
+ - Repo: {{ repo_url | default("[More Information Needed]", true) }}
50
+ - Docs: {{ docs_url | default("[More Information Needed]", true) }}
51
+ """
52
+
53
+
54
+ @dataclass
55
+ class MixinInfo:
56
+ library_name: Optional[str] = None
57
+ tags: Optional[List[str]] = None
58
+ repo_url: Optional[str] = None
59
+ docs_url: Optional[str] = None
60
+
61
+
62
+ class ModelHubMixin:
63
+ """
64
+ A generic mixin to integrate ANY machine learning framework with the Hub.
65
+
66
+ To integrate your framework, your model class must inherit from this class. Custom logic for saving/loading models
67
+ have to be overwritten in [`_from_pretrained`] and [`_save_pretrained`]. [`PyTorchModelHubMixin`] is a good example
68
+ of mixin integration with the Hub. Check out our [integration guide](../guides/integrations) for more instructions.
69
+
70
+ When inheriting from [`ModelHubMixin`], you can define class-level attributes. These attributes are not passed to
71
+ `__init__` but to the class definition itself. This is useful to define metadata about the library integrating
72
+ [`ModelHubMixin`].
73
+
74
+ Args:
75
+ library_name (`str`, *optional*):
76
+ Name of the library integrating ModelHubMixin. Used to generate model card.
77
+ tags (`List[str]`, *optional*):
78
+ Tags to be added to the model card. Used to generate model card.
79
+ repo_url (`str`, *optional*):
80
+ URL of the library repository. Used to generate model card.
81
+ docs_url (`str`, *optional*):
82
+ URL of the library documentation. Used to generate model card.
83
+
84
+ Example:
85
+
86
+ ```python
87
+ >>> from huggingface_hub import ModelHubMixin
88
+
89
+ # Inherit from ModelHubMixin
90
+ >>> class MyCustomModel(
91
+ ... ModelHubMixin,
92
+ ... library_name="my-library",
93
+ ... tags=["x-custom-tag"],
94
+ ... repo_url="https://github.com/huggingface/my-cool-library",
95
+ ... docs_url="https://huggingface.co/docs/my-cool-library",
96
+ ... # ^ optional metadata to generate model card
97
+ ... ):
98
+ ... def __init__(self, size: int = 512, device: str = "cpu"):
99
+ ... # define how to initialize your model
100
+ ... super().__init__()
101
+ ... ...
102
+ ...
103
+ ... def _save_pretrained(self, save_directory: Path) -> None:
104
+ ... # define how to serialize your model
105
+ ... ...
106
+ ...
107
+ ... @classmethod
108
+ ... def from_pretrained(
109
+ ... cls: Type[T],
110
+ ... pretrained_model_name_or_path: Union[str, Path],
111
+ ... *,
112
+ ... force_download: bool = False,
113
+ ... resume_download: bool = False,
114
+ ... proxies: Optional[Dict] = None,
115
+ ... token: Optional[Union[str, bool]] = None,
116
+ ... cache_dir: Optional[Union[str, Path]] = None,
117
+ ... local_files_only: bool = False,
118
+ ... revision: Optional[str] = None,
119
+ ... **model_kwargs,
120
+ ... ) -> T:
121
+ ... # define how to deserialize your model
122
+ ... ...
123
+
124
+ >>> model = MyCustomModel(size=256, device="gpu")
125
+
126
+ # Save model weights to local directory
127
+ >>> model.save_pretrained("my-awesome-model")
128
+
129
+ # Push model weights to the Hub
130
+ >>> model.push_to_hub("my-awesome-model")
131
+
132
+ # Download and initialize weights from the Hub
133
+ >>> reloaded_model = MyCustomModel.from_pretrained("username/my-awesome-model")
134
+ >>> reloaded_model._hub_mixin_config
135
+ {"size": 256, "device": "gpu"}
136
+
137
+ # Model card has been correctly populated
138
+ >>> from huggingface_hub import ModelCard
139
+ >>> card = ModelCard.load("username/my-awesome-model")
140
+ >>> card.data.tags
141
+ ["x-custom-tag", "pytorch_model_hub_mixin", "model_hub_mixin"]
142
+ >>> card.data.library_name
143
+ "my-library"
144
+ ```
145
+ """
146
+
147
+ _hub_mixin_config: Optional[Union[dict, "DataclassInstance"]] = None
148
+ # ^ optional config attribute automatically set in `from_pretrained`
149
+ _hub_mixin_info: MixinInfo
150
+ # ^ information about the library integrating ModelHubMixin (used to generate model card)
151
+ _hub_mixin_init_parameters: Dict[str, inspect.Parameter]
152
+ _hub_mixin_jsonable_default_values: Dict[str, Any]
153
+ _hub_mixin_inject_config: bool
154
+ # ^ internal values to handle config
155
+
156
+ def __init_subclass__(
157
+ cls,
158
+ *,
159
+ library_name: Optional[str] = None,
160
+ tags: Optional[List[str]] = None,
161
+ repo_url: Optional[str] = None,
162
+ docs_url: Optional[str] = None,
163
+ ) -> None:
164
+ """Inspect __init__ signature only once when subclassing + handle modelcard."""
165
+ super().__init_subclass__()
166
+
167
+ # Will be reused when creating modelcard
168
+ tags = tags or []
169
+ tags.append("model_hub_mixin")
170
+ cls._hub_mixin_info = MixinInfo(
171
+ library_name=library_name,
172
+ tags=tags,
173
+ repo_url=repo_url,
174
+ docs_url=docs_url,
175
+ )
176
+
177
+ # Inspect __init__ signature to handle config
178
+ cls._hub_mixin_init_parameters = dict(inspect.signature(cls.__init__).parameters)
179
+ cls._hub_mixin_jsonable_default_values = {
180
+ param.name: param.default
181
+ for param in cls._hub_mixin_init_parameters.values()
182
+ if param.default is not inspect.Parameter.empty and is_jsonable(param.default)
183
+ }
184
+ cls._hub_mixin_inject_config = "config" in inspect.signature(cls._from_pretrained).parameters
185
+
186
+ def __new__(cls, *args, **kwargs) -> "ModelHubMixin":
187
+ """Create a new instance of the class and handle config.
188
+
189
+ 3 cases:
190
+ - If `self._hub_mixin_config` is already set, do nothing.
191
+ - If `config` is passed as a dataclass, set it as `self._hub_mixin_config`.
192
+ - Otherwise, build `self._hub_mixin_config` from default values and passed values.
193
+ """
194
+ instance = super().__new__(cls)
195
+
196
+ # If `config` is already set, return early
197
+ if instance._hub_mixin_config is not None:
198
+ return instance
199
+
200
+ # Infer passed values
201
+ passed_values = {
202
+ **{
203
+ key: value
204
+ for key, value in zip(
205
+ # [1:] to skip `self` parameter
206
+ list(cls._hub_mixin_init_parameters)[1:],
207
+ args,
208
+ )
209
+ },
210
+ **kwargs,
211
+ }
212
+
213
+ # If config passed as dataclass => set it and return early
214
+ if is_dataclass(passed_values.get("config")):
215
+ instance._hub_mixin_config = passed_values["config"]
216
+ return instance
217
+
218
+ # Otherwise, build config from default + passed values
219
+ init_config = {
220
+ # default values
221
+ **cls._hub_mixin_jsonable_default_values,
222
+ # passed values
223
+ **{key: value for key, value in passed_values.items() if is_jsonable(value)},
224
+ }
225
+ init_config.pop("config", {})
226
+
227
+ # Populate `init_config` with provided config
228
+ provided_config = passed_values.get("config")
229
+ if isinstance(provided_config, dict):
230
+ init_config.update(provided_config)
231
+
232
+ # Set `config` attribute and return
233
+ if init_config != {}:
234
+ instance._hub_mixin_config = init_config
235
+ return instance
236
+
237
+ def save_pretrained(
238
+ self,
239
+ save_directory: Union[str, Path],
240
+ *,
241
+ config: Optional[Union[dict, "DataclassInstance"]] = None,
242
+ repo_id: Optional[str] = None,
243
+ push_to_hub: bool = False,
244
+ **push_to_hub_kwargs,
245
+ ) -> Optional[str]:
246
+ """
247
+ Save weights in local directory.
248
+
249
+ Args:
250
+ save_directory (`str` or `Path`):
251
+ Path to directory in which the model weights and configuration will be saved.
252
+ config (`dict` or `DataclassInstance`, *optional*):
253
+ Model configuration specified as a key/value dictionary or a dataclass instance.
254
+ push_to_hub (`bool`, *optional*, defaults to `False`):
255
+ Whether or not to push your model to the Huggingface Hub after saving it.
256
+ repo_id (`str`, *optional*):
257
+ ID of your repository on the Hub. Used only if `push_to_hub=True`. Will default to the folder name if
258
+ not provided.
259
+ kwargs:
260
+ Additional key word arguments passed along to the [`~ModelHubMixin.push_to_hub`] method.
261
+ """
262
+ save_directory = Path(save_directory)
263
+ save_directory.mkdir(parents=True, exist_ok=True)
264
+
265
+ # Remove config.json if already exists. After `_save_pretrained` we don't want to overwrite config.json
266
+ # as it might have been saved by the custom `_save_pretrained` already. However we do want to overwrite
267
+ # an existing config.json if it was not saved by `_save_pretrained`.
268
+ config_path = save_directory / CONFIG_NAME
269
+ config_path.unlink(missing_ok=True)
270
+
271
+ # save model weights/files (framework-specific)
272
+ self._save_pretrained(save_directory)
273
+
274
+ # save config (if provided and if not serialized yet in `_save_pretrained`)
275
+ if config is None:
276
+ config = self._hub_mixin_config
277
+ if config is not None:
278
+ if is_dataclass(config):
279
+ config = asdict(config) # type: ignore[arg-type]
280
+ if not config_path.exists():
281
+ config_str = json.dumps(config, sort_keys=True, indent=2)
282
+ config_path.write_text(config_str)
283
+
284
+ # save model card
285
+ model_card_path = save_directory / "README.md"
286
+ if not model_card_path.exists(): # do not overwrite if already exists
287
+ self.generate_model_card().save(save_directory / "README.md")
288
+
289
+ # push to the Hub if required
290
+ if push_to_hub:
291
+ kwargs = push_to_hub_kwargs.copy() # soft-copy to avoid mutating input
292
+ if config is not None: # kwarg for `push_to_hub`
293
+ kwargs["config"] = config
294
+ if repo_id is None:
295
+ repo_id = save_directory.name # Defaults to `save_directory` name
296
+ return self.push_to_hub(repo_id=repo_id, **kwargs)
297
+ return None
298
+
299
+ def _save_pretrained(self, save_directory: Path) -> None:
300
+ """
301
+ Overwrite this method in subclass to define how to save your model.
302
+ Check out our [integration guide](../guides/integrations) for instructions.
303
+
304
+ Args:
305
+ save_directory (`str` or `Path`):
306
+ Path to directory in which the model weights and configuration will be saved.
307
+ """
308
+ raise NotImplementedError
309
+
310
+ @classmethod
311
+ @validate_hf_hub_args
312
+ def from_pretrained(
313
+ cls: Type[T],
314
+ pretrained_model_name_or_path: Union[str, Path],
315
+ *,
316
+ force_download: bool = False,
317
+ resume_download: bool = False,
318
+ proxies: Optional[Dict] = None,
319
+ token: Optional[Union[str, bool]] = None,
320
+ cache_dir: Optional[Union[str, Path]] = None,
321
+ local_files_only: bool = False,
322
+ revision: Optional[str] = None,
323
+ **model_kwargs,
324
+ ) -> T:
325
+ """
326
+ Download a model from the Huggingface Hub and instantiate it.
327
+
328
+ Args:
329
+ pretrained_model_name_or_path (`str`, `Path`):
330
+ - Either the `model_id` (string) of a model hosted on the Hub, e.g. `bigscience/bloom`.
331
+ - Or a path to a `directory` containing model weights saved using
332
+ [`~transformers.PreTrainedModel.save_pretrained`], e.g., `../path/to/my_model_directory/`.
333
+ revision (`str`, *optional*):
334
+ Revision of the model on the Hub. Can be a branch name, a git tag or any commit id.
335
+ Defaults to the latest commit on `main` branch.
336
+ force_download (`bool`, *optional*, defaults to `False`):
337
+ Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding
338
+ the existing cache.
339
+ resume_download (`bool`, *optional*, defaults to `False`):
340
+ Whether to delete incompletely received files. Will attempt to resume the download if such a file exists.
341
+ proxies (`Dict[str, str]`, *optional*):
342
+ A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
343
+ 'http://hostname': 'foo.bar:4012'}`. The proxies are used on every request.
344
+ token (`str` or `bool`, *optional*):
345
+ The token to use as HTTP bearer authorization for remote files. By default, it will use the token
346
+ cached when running `huggingface-cli login`.
347
+ cache_dir (`str`, `Path`, *optional*):
348
+ Path to the folder where cached files are stored.
349
+ local_files_only (`bool`, *optional*, defaults to `False`):
350
+ If `True`, avoid downloading the file and return the path to the local cached file if it exists.
351
+ model_kwargs (`Dict`, *optional*):
352
+ Additional kwargs to pass to the model during initialization.
353
+ """
354
+ model_id = str(pretrained_model_name_or_path)
355
+ config_file: Optional[str] = None
356
+ if os.path.isdir(model_id):
357
+ if CONFIG_NAME in os.listdir(model_id):
358
+ config_file = os.path.join(model_id, CONFIG_NAME)
359
+ else:
360
+ logger.warning(f"{CONFIG_NAME} not found in {Path(model_id).resolve()}")
361
+ else:
362
+ try:
363
+ config_file = hf_hub_download(
364
+ repo_id=model_id,
365
+ filename=CONFIG_NAME,
366
+ revision=revision,
367
+ cache_dir=cache_dir,
368
+ force_download=force_download,
369
+ proxies=proxies,
370
+ resume_download=resume_download,
371
+ token=token,
372
+ local_files_only=local_files_only,
373
+ )
374
+ except HfHubHTTPError as e:
375
+ logger.info(f"{CONFIG_NAME} not found on the HuggingFace Hub: {str(e)}")
376
+
377
+ # Read config
378
+ config = None
379
+ if config_file is not None:
380
+ with open(config_file, "r", encoding="utf-8") as f:
381
+ config = json.load(f)
382
+
383
+ # Populate model_kwargs from config
384
+ for param in cls._hub_mixin_init_parameters.values():
385
+ if param.name not in model_kwargs and param.name in config:
386
+ model_kwargs[param.name] = config[param.name]
387
+
388
+ # Check if `config` argument was passed at init
389
+ if "config" in cls._hub_mixin_init_parameters:
390
+ # Check if `config` argument is a dataclass
391
+ config_annotation = cls._hub_mixin_init_parameters["config"].annotation
392
+ if config_annotation is inspect.Parameter.empty:
393
+ pass # no annotation
394
+ elif is_dataclass(config_annotation):
395
+ config = _load_dataclass(config_annotation, config)
396
+ else:
397
+ # if Optional/Union annotation => check if a dataclass is in the Union
398
+ for _sub_annotation in get_args(config_annotation):
399
+ if is_dataclass(_sub_annotation):
400
+ config = _load_dataclass(_sub_annotation, config)
401
+ break
402
+
403
+ # Forward config to model initialization
404
+ model_kwargs["config"] = config
405
+
406
+ # Inject config if `**kwargs` are expected
407
+ if is_dataclass(cls):
408
+ for key in cls.__dataclass_fields__:
409
+ if key not in model_kwargs and key in config:
410
+ model_kwargs[key] = config[key]
411
+ elif any(param.kind == inspect.Parameter.VAR_KEYWORD for param in cls._hub_mixin_init_parameters.values()):
412
+ for key, value in config.items():
413
+ if key not in model_kwargs:
414
+ model_kwargs[key] = value
415
+
416
+ # Finally, also inject if `_from_pretrained` expects it
417
+ if cls._hub_mixin_inject_config:
418
+ model_kwargs["config"] = config
419
+
420
+ instance = cls._from_pretrained(
421
+ model_id=str(model_id),
422
+ revision=revision,
423
+ cache_dir=cache_dir,
424
+ force_download=force_download,
425
+ proxies=proxies,
426
+ resume_download=resume_download,
427
+ local_files_only=local_files_only,
428
+ token=token,
429
+ **model_kwargs,
430
+ )
431
+
432
+ # Implicitly set the config as instance attribute if not already set by the class
433
+ # This way `config` will be available when calling `save_pretrained` or `push_to_hub`.
434
+ if config is not None and (getattr(instance, "_hub_mixin_config", None) in (None, {})):
435
+ instance._hub_mixin_config = config
436
+
437
+ return instance
438
+
439
+ @classmethod
440
+ def _from_pretrained(
441
+ cls: Type[T],
442
+ *,
443
+ model_id: str,
444
+ revision: Optional[str],
445
+ cache_dir: Optional[Union[str, Path]],
446
+ force_download: bool,
447
+ proxies: Optional[Dict],
448
+ resume_download: bool,
449
+ local_files_only: bool,
450
+ token: Optional[Union[str, bool]],
451
+ **model_kwargs,
452
+ ) -> T:
453
+ """Overwrite this method in subclass to define how to load your model from pretrained.
454
+
455
+ Use [`hf_hub_download`] or [`snapshot_download`] to download files from the Hub before loading them. Most
456
+ args taken as input can be directly passed to those 2 methods. If needed, you can add more arguments to this
457
+ method using "model_kwargs". For example [`PyTorchModelHubMixin._from_pretrained`] takes as input a `map_location`
458
+ parameter to set on which device the model should be loaded.
459
+
460
+ Check out our [integration guide](../guides/integrations) for more instructions.
461
+
462
+ Args:
463
+ model_id (`str`):
464
+ ID of the model to load from the Huggingface Hub (e.g. `bigscience/bloom`).
465
+ revision (`str`, *optional*):
466
+ Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. Defaults to the
467
+ latest commit on `main` branch.
468
+ force_download (`bool`, *optional*, defaults to `False`):
469
+ Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding
470
+ the existing cache.
471
+ resume_download (`bool`, *optional*, defaults to `False`):
472
+ Whether to delete incompletely received files. Will attempt to resume the download if such a file exists.
473
+ proxies (`Dict[str, str]`, *optional*):
474
+ A dictionary of proxy servers to use by protocol or endpoint (e.g., `{'http': 'foo.bar:3128',
475
+ 'http://hostname': 'foo.bar:4012'}`).
476
+ token (`str` or `bool`, *optional*):
477
+ The token to use as HTTP bearer authorization for remote files. By default, it will use the token
478
+ cached when running `huggingface-cli login`.
479
+ cache_dir (`str`, `Path`, *optional*):
480
+ Path to the folder where cached files are stored.
481
+ local_files_only (`bool`, *optional*, defaults to `False`):
482
+ If `True`, avoid downloading the file and return the path to the local cached file if it exists.
483
+ model_kwargs:
484
+ Additional keyword arguments passed along to the [`~ModelHubMixin._from_pretrained`] method.
485
+ """
486
+ raise NotImplementedError
487
+
488
+ @_deprecate_arguments(
489
+ version="0.23.0",
490
+ deprecated_args=["api_endpoint"],
491
+ custom_message="Use `HF_ENDPOINT` environment variable instead.",
492
+ )
493
+ @validate_hf_hub_args
494
+ def push_to_hub(
495
+ self,
496
+ repo_id: str,
497
+ *,
498
+ config: Optional[Union[dict, "DataclassInstance"]] = None,
499
+ commit_message: str = "Push model using huggingface_hub.",
500
+ private: bool = False,
501
+ token: Optional[str] = None,
502
+ branch: Optional[str] = None,
503
+ create_pr: Optional[bool] = None,
504
+ allow_patterns: Optional[Union[List[str], str]] = None,
505
+ ignore_patterns: Optional[Union[List[str], str]] = None,
506
+ delete_patterns: Optional[Union[List[str], str]] = None,
507
+ # TODO: remove once deprecated
508
+ api_endpoint: Optional[str] = None,
509
+ ) -> str:
510
+ """
511
+ Upload model checkpoint to the Hub.
512
+
513
+ Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use
514
+ `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more
515
+ details.
516
+
517
+ Args:
518
+ repo_id (`str`):
519
+ ID of the repository to push to (example: `"username/my-model"`).
520
+ config (`dict` or `DataclassInstance`, *optional*):
521
+ Model configuration specified as a key/value dictionary or a dataclass instance.
522
+ commit_message (`str`, *optional*):
523
+ Message to commit while pushing.
524
+ private (`bool`, *optional*, defaults to `False`):
525
+ Whether the repository created should be private.
526
+ api_endpoint (`str`, *optional*):
527
+ The API endpoint to use when pushing the model to the hub.
528
+ token (`str`, *optional*):
529
+ The token to use as HTTP bearer authorization for remote files. By default, it will use the token
530
+ cached when running `huggingface-cli login`.
531
+ branch (`str`, *optional*):
532
+ The git branch on which to push the model. This defaults to `"main"`.
533
+ create_pr (`boolean`, *optional*):
534
+ Whether or not to create a Pull Request from `branch` with that commit. Defaults to `False`.
535
+ allow_patterns (`List[str]` or `str`, *optional*):
536
+ If provided, only files matching at least one pattern are pushed.
537
+ ignore_patterns (`List[str]` or `str`, *optional*):
538
+ If provided, files matching any of the patterns are not pushed.
539
+ delete_patterns (`List[str]` or `str`, *optional*):
540
+ If provided, remote files matching any of the patterns will be deleted from the repo.
541
+
542
+ Returns:
543
+ The url of the commit of your model in the given repository.
544
+ """
545
+ api = HfApi(endpoint=api_endpoint, token=token)
546
+ repo_id = api.create_repo(repo_id=repo_id, private=private, exist_ok=True).repo_id
547
+
548
+ # Push the files to the repo in a single commit
549
+ with SoftTemporaryDirectory() as tmp:
550
+ saved_path = Path(tmp) / repo_id
551
+ self.save_pretrained(saved_path, config=config)
552
+ return api.upload_folder(
553
+ repo_id=repo_id,
554
+ repo_type="model",
555
+ folder_path=saved_path,
556
+ commit_message=commit_message,
557
+ revision=branch,
558
+ create_pr=create_pr,
559
+ allow_patterns=allow_patterns,
560
+ ignore_patterns=ignore_patterns,
561
+ delete_patterns=delete_patterns,
562
+ )
563
+
564
+ def generate_model_card(self, *args, **kwargs) -> ModelCard:
565
+ card = ModelCard.from_template(
566
+ card_data=ModelCardData(**asdict(self._hub_mixin_info)),
567
+ template_str=DEFAULT_MODEL_CARD,
568
+ )
569
+ return card
570
+
571
+
572
+ class PyTorchModelHubMixin(ModelHubMixin):
573
+ """
574
+ Implementation of [`ModelHubMixin`] to provide model Hub upload/download capabilities to PyTorch models. The model
575
+ is set in evaluation mode by default using `model.eval()` (dropout modules are deactivated). To train the model,
576
+ you should first set it back in training mode with `model.train()`.
577
+
578
+ Example:
579
+
580
+ ```python
581
+ >>> import torch
582
+ >>> import torch.nn as nn
583
+ >>> from huggingface_hub import PyTorchModelHubMixin
584
+
585
+ >>> class MyModel(
586
+ ... nn.Module,
587
+ ... PyTorchModelHubMixin,
588
+ ... library_name="keras-nlp",
589
+ ... repo_url="https://github.com/keras-team/keras-nlp",
590
+ ... docs_url="https://keras.io/keras_nlp/",
591
+ ... # ^ optional metadata to generate model card
592
+ ... ):
593
+ ... def __init__(self, hidden_size: int = 512, vocab_size: int = 30000, output_size: int = 4):
594
+ ... super().__init__()
595
+ ... self.param = nn.Parameter(torch.rand(hidden_size, vocab_size))
596
+ ... self.linear = nn.Linear(output_size, vocab_size)
597
+
598
+ ... def forward(self, x):
599
+ ... return self.linear(x + self.param)
600
+ >>> model = MyModel(hidden_size=256)
601
+
602
+ # Save model weights to local directory
603
+ >>> model.save_pretrained("my-awesome-model")
604
+
605
+ # Push model weights to the Hub
606
+ >>> model.push_to_hub("my-awesome-model")
607
+
608
+ # Download and initialize weights from the Hub
609
+ >>> model = MyModel.from_pretrained("username/my-awesome-model")
610
+ >>> model.hidden_size
611
+ 256
612
+ ```
613
+ """
614
+
615
+ def __init_subclass__(cls, *args, tags: Optional[List[str]] = None, **kwargs) -> None:
616
+ tags = tags or []
617
+ tags.append("pytorch_model_hub_mixin")
618
+ kwargs["tags"] = tags
619
+ return super().__init_subclass__(*args, **kwargs)
620
+
621
+ def _save_pretrained(self, save_directory: Path) -> None:
622
+ """Save weights from a Pytorch model to a local directory."""
623
+ model_to_save = self.module if hasattr(self, "module") else self # type: ignore
624
+ save_model_as_safetensor(model_to_save, str(save_directory / SAFETENSORS_SINGLE_FILE))
625
+
626
+ @classmethod
627
+ def _from_pretrained(
628
+ cls,
629
+ *,
630
+ model_id: str,
631
+ revision: Optional[str],
632
+ cache_dir: Optional[Union[str, Path]],
633
+ force_download: bool,
634
+ proxies: Optional[Dict],
635
+ resume_download: bool,
636
+ local_files_only: bool,
637
+ token: Union[str, bool, None],
638
+ map_location: str = "cpu",
639
+ strict: bool = False,
640
+ **model_kwargs,
641
+ ):
642
+ """Load Pytorch pretrained weights and return the loaded model."""
643
+ model = cls(**model_kwargs)
644
+ if os.path.isdir(model_id):
645
+ print("Loading weights from local directory")
646
+ model_file = os.path.join(model_id, SAFETENSORS_SINGLE_FILE)
647
+ return cls._load_as_safetensor(model, model_file, map_location, strict)
648
+ else:
649
+ try:
650
+ model_file = hf_hub_download(
651
+ repo_id=model_id,
652
+ filename=SAFETENSORS_SINGLE_FILE,
653
+ revision=revision,
654
+ cache_dir=cache_dir,
655
+ force_download=force_download,
656
+ proxies=proxies,
657
+ resume_download=resume_download,
658
+ token=token,
659
+ local_files_only=local_files_only,
660
+ )
661
+ return cls._load_as_safetensor(model, model_file, map_location, strict)
662
+ except EntryNotFoundError:
663
+ model_file = hf_hub_download(
664
+ repo_id=model_id,
665
+ filename=PYTORCH_WEIGHTS_NAME,
666
+ revision=revision,
667
+ cache_dir=cache_dir,
668
+ force_download=force_download,
669
+ proxies=proxies,
670
+ resume_download=resume_download,
671
+ token=token,
672
+ local_files_only=local_files_only,
673
+ )
674
+ return cls._load_as_pickle(model, model_file, map_location, strict)
675
+
676
+ @classmethod
677
+ def _load_as_pickle(cls, model: T, model_file: str, map_location: str, strict: bool) -> T:
678
+ state_dict = torch.load(model_file, map_location=torch.device(map_location))
679
+ model.load_state_dict(state_dict, strict=strict) # type: ignore
680
+ model.eval() # type: ignore
681
+ return model
682
+
683
+ @classmethod
684
+ def _load_as_safetensor(cls, model: T, model_file: str, map_location: str, strict: bool) -> T:
685
+ load_model_as_safetensor(model, model_file, strict=strict) # type: ignore [arg-type]
686
+ if map_location != "cpu":
687
+ # TODO: remove this once https://github.com/huggingface/safetensors/pull/449 is merged.
688
+ logger.warning(
689
+ "Loading model weights on other devices than 'cpu' is not supported natively."
690
+ " This means that the model is loaded on 'cpu' first and then copied to the device."
691
+ " This leads to a slower loading time."
692
+ " Support for loading directly on other devices is planned to be added in future releases."
693
+ " See https://github.com/huggingface/huggingface_hub/pull/2086 for more details."
694
+ )
695
+ model.to(map_location) # type: ignore [attr-defined]
696
+ return model
697
+
698
+
699
+ def _load_dataclass(datacls: Type["DataclassInstance"], data: dict) -> "DataclassInstance":
700
+ """Load a dataclass instance from a dictionary.
701
+
702
+ Fields not expected by the dataclass are ignored.
703
+ """
704
+ return datacls(**{k: v for k, v in data.items() if k in datacls.__dataclass_fields__})
env-llmeval/lib/python3.10/site-packages/huggingface_hub/keras_mixin.py ADDED
@@ -0,0 +1,502 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections.abc as collections
2
+ import json
3
+ import os
4
+ import warnings
5
+ from functools import wraps
6
+ from pathlib import Path
7
+ from shutil import copytree
8
+ from typing import Any, Dict, List, Optional, Union
9
+
10
+ from huggingface_hub import ModelHubMixin, snapshot_download
11
+ from huggingface_hub.utils import (
12
+ get_tf_version,
13
+ is_graphviz_available,
14
+ is_pydot_available,
15
+ is_tf_available,
16
+ yaml_dump,
17
+ )
18
+
19
+ from .constants import CONFIG_NAME
20
+ from .hf_api import HfApi
21
+ from .utils import SoftTemporaryDirectory, logging, validate_hf_hub_args
22
+ from .utils._typing import CallableT
23
+
24
+
25
+ logger = logging.get_logger(__name__)
26
+
27
+ keras = None
28
+ if is_tf_available():
29
+ # Depending on which version of TensorFlow is installed, we need to import
30
+ # keras from the correct location.
31
+ # See https://github.com/tensorflow/tensorflow/releases/tag/v2.16.1.
32
+ # Note: saving a keras model only works with Keras<3.0.
33
+ try:
34
+ import tf_keras as keras # type: ignore
35
+ except ImportError:
36
+ import tensorflow as tf # type: ignore
37
+
38
+ keras = tf.keras
39
+
40
+
41
+ def _requires_keras_2_model(fn: CallableT) -> CallableT:
42
+ # Wrapper to raise if user tries to save a Keras 3.x model
43
+ @wraps(fn)
44
+ def _inner(model, *args, **kwargs):
45
+ if not hasattr(model, "history"): # hacky way to check if model is Keras 2.x
46
+ raise NotImplementedError(
47
+ f"Cannot use '{fn.__name__}': Keras 3.x is not supported."
48
+ " Please save models manually and upload them using `upload_folder` or `huggingface-cli upload`."
49
+ )
50
+ return fn(model, *args, **kwargs)
51
+
52
+ return _inner # type: ignore [return-value]
53
+
54
+
55
+ def _flatten_dict(dictionary, parent_key=""):
56
+ """Flatten a nested dictionary.
57
+ Reference: https://stackoverflow.com/a/6027615/10319735
58
+
59
+ Args:
60
+ dictionary (`dict`):
61
+ The nested dictionary to be flattened.
62
+ parent_key (`str`):
63
+ The parent key to be prefixed to the children keys.
64
+ Necessary for recursing over the nested dictionary.
65
+
66
+ Returns:
67
+ The flattened dictionary.
68
+ """
69
+ items = []
70
+ for key, value in dictionary.items():
71
+ new_key = f"{parent_key}.{key}" if parent_key else key
72
+ if isinstance(value, collections.MutableMapping):
73
+ items.extend(
74
+ _flatten_dict(
75
+ value,
76
+ new_key,
77
+ ).items()
78
+ )
79
+ else:
80
+ items.append((new_key, value))
81
+ return dict(items)
82
+
83
+
84
+ def _create_hyperparameter_table(model):
85
+ """Parse hyperparameter dictionary into a markdown table."""
86
+ table = None
87
+ if model.optimizer is not None:
88
+ optimizer_params = model.optimizer.get_config()
89
+ # flatten the configuration
90
+ optimizer_params = _flatten_dict(optimizer_params)
91
+ optimizer_params["training_precision"] = keras.mixed_precision.global_policy().name
92
+ table = "| Hyperparameters | Value |\n| :-- | :-- |\n"
93
+ for key, value in optimizer_params.items():
94
+ table += f"| {key} | {value} |\n"
95
+ return table
96
+
97
+
98
+ def _plot_network(model, save_directory):
99
+ keras.utils.plot_model(
100
+ model,
101
+ to_file=f"{save_directory}/model.png",
102
+ show_shapes=False,
103
+ show_dtype=False,
104
+ show_layer_names=True,
105
+ rankdir="TB",
106
+ expand_nested=False,
107
+ dpi=96,
108
+ layer_range=None,
109
+ )
110
+
111
+
112
+ def _create_model_card(
113
+ model,
114
+ repo_dir: Path,
115
+ plot_model: bool = True,
116
+ metadata: Optional[dict] = None,
117
+ ):
118
+ """
119
+ Creates a model card for the repository.
120
+
121
+ Do not overwrite an existing README.md file.
122
+ """
123
+ readme_path = repo_dir / "README.md"
124
+ if readme_path.exists():
125
+ return
126
+
127
+ hyperparameters = _create_hyperparameter_table(model)
128
+ if plot_model and is_graphviz_available() and is_pydot_available():
129
+ _plot_network(model, repo_dir)
130
+ if metadata is None:
131
+ metadata = {}
132
+ metadata["library_name"] = "keras"
133
+ model_card: str = "---\n"
134
+ model_card += yaml_dump(metadata, default_flow_style=False)
135
+ model_card += "---\n"
136
+ model_card += "\n## Model description\n\nMore information needed\n"
137
+ model_card += "\n## Intended uses & limitations\n\nMore information needed\n"
138
+ model_card += "\n## Training and evaluation data\n\nMore information needed\n"
139
+ if hyperparameters is not None:
140
+ model_card += "\n## Training procedure\n"
141
+ model_card += "\n### Training hyperparameters\n"
142
+ model_card += "\nThe following hyperparameters were used during training:\n\n"
143
+ model_card += hyperparameters
144
+ model_card += "\n"
145
+ if plot_model and os.path.exists(f"{repo_dir}/model.png"):
146
+ model_card += "\n ## Model Plot\n"
147
+ model_card += "\n<details>"
148
+ model_card += "\n<summary>View Model Plot</summary>\n"
149
+ path_to_plot = "./model.png"
150
+ model_card += f"\n![Model Image]({path_to_plot})\n"
151
+ model_card += "\n</details>"
152
+
153
+ readme_path.write_text(model_card)
154
+
155
+
156
+ @_requires_keras_2_model
157
+ def save_pretrained_keras(
158
+ model,
159
+ save_directory: Union[str, Path],
160
+ config: Optional[Dict[str, Any]] = None,
161
+ include_optimizer: bool = False,
162
+ plot_model: bool = True,
163
+ tags: Optional[Union[list, str]] = None,
164
+ **model_save_kwargs,
165
+ ):
166
+ """
167
+ Saves a Keras model to save_directory in SavedModel format. Use this if
168
+ you're using the Functional or Sequential APIs.
169
+
170
+ Args:
171
+ model (`Keras.Model`):
172
+ The [Keras
173
+ model](https://www.tensorflow.org/api_docs/python/tf/keras/Model)
174
+ you'd like to save. The model must be compiled and built.
175
+ save_directory (`str` or `Path`):
176
+ Specify directory in which you want to save the Keras model.
177
+ config (`dict`, *optional*):
178
+ Configuration object to be saved alongside the model weights.
179
+ include_optimizer(`bool`, *optional*, defaults to `False`):
180
+ Whether or not to include optimizer in serialization.
181
+ plot_model (`bool`, *optional*, defaults to `True`):
182
+ Setting this to `True` will plot the model and put it in the model
183
+ card. Requires graphviz and pydot to be installed.
184
+ tags (Union[`str`,`list`], *optional*):
185
+ List of tags that are related to model or string of a single tag. See example tags
186
+ [here](https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1).
187
+ model_save_kwargs(`dict`, *optional*):
188
+ model_save_kwargs will be passed to
189
+ [`tf.keras.models.save_model()`](https://www.tensorflow.org/api_docs/python/tf/keras/models/save_model).
190
+ """
191
+ if keras is None:
192
+ raise ImportError("Called a Tensorflow-specific function but could not import it.")
193
+
194
+ if not model.built:
195
+ raise ValueError("Model should be built before trying to save")
196
+
197
+ save_directory = Path(save_directory)
198
+ save_directory.mkdir(parents=True, exist_ok=True)
199
+
200
+ # saving config
201
+ if config:
202
+ if not isinstance(config, dict):
203
+ raise RuntimeError(f"Provided config to save_pretrained_keras should be a dict. Got: '{type(config)}'")
204
+
205
+ with (save_directory / CONFIG_NAME).open("w") as f:
206
+ json.dump(config, f)
207
+
208
+ metadata = {}
209
+ if isinstance(tags, list):
210
+ metadata["tags"] = tags
211
+ elif isinstance(tags, str):
212
+ metadata["tags"] = [tags]
213
+
214
+ task_name = model_save_kwargs.pop("task_name", None)
215
+ if task_name is not None:
216
+ warnings.warn(
217
+ "`task_name` input argument is deprecated. Pass `tags` instead.",
218
+ FutureWarning,
219
+ )
220
+ if "tags" in metadata:
221
+ metadata["tags"].append(task_name)
222
+ else:
223
+ metadata["tags"] = [task_name]
224
+
225
+ if model.history is not None:
226
+ if model.history.history != {}:
227
+ path = save_directory / "history.json"
228
+ if path.exists():
229
+ warnings.warn(
230
+ "`history.json` file already exists, it will be overwritten by the history of this version.",
231
+ UserWarning,
232
+ )
233
+ with path.open("w", encoding="utf-8") as f:
234
+ json.dump(model.history.history, f, indent=2, sort_keys=True)
235
+
236
+ _create_model_card(model, save_directory, plot_model, metadata)
237
+ keras.models.save_model(model, save_directory, include_optimizer=include_optimizer, **model_save_kwargs)
238
+
239
+
240
+ def from_pretrained_keras(*args, **kwargs) -> "KerasModelHubMixin":
241
+ r"""
242
+ Instantiate a pretrained Keras model from a pre-trained model from the Hub.
243
+ The model is expected to be in `SavedModel` format.
244
+
245
+ Args:
246
+ pretrained_model_name_or_path (`str` or `os.PathLike`):
247
+ Can be either:
248
+ - A string, the `model id` of a pretrained model hosted inside a
249
+ model repo on huggingface.co. Valid model ids can be located
250
+ at the root-level, like `bert-base-uncased`, or namespaced
251
+ under a user or organization name, like
252
+ `dbmdz/bert-base-german-cased`.
253
+ - You can add `revision` by appending `@` at the end of model_id
254
+ simply like this: `dbmdz/bert-base-german-cased@main` Revision
255
+ is the specific model version to use. It can be a branch name,
256
+ a tag name, or a commit id, since we use a git-based system
257
+ for storing models and other artifacts on huggingface.co, so
258
+ `revision` can be any identifier allowed by git.
259
+ - A path to a `directory` containing model weights saved using
260
+ [`~transformers.PreTrainedModel.save_pretrained`], e.g.,
261
+ `./my_model_directory/`.
262
+ - `None` if you are both providing the configuration and state
263
+ dictionary (resp. with keyword arguments `config` and
264
+ `state_dict`).
265
+ force_download (`bool`, *optional*, defaults to `False`):
266
+ Whether to force the (re-)download of the model weights and
267
+ configuration files, overriding the cached versions if they exist.
268
+ resume_download (`bool`, *optional*, defaults to `False`):
269
+ Whether to delete incompletely received files. Will attempt to
270
+ resume the download if such a file exists.
271
+ proxies (`Dict[str, str]`, *optional*):
272
+ A dictionary of proxy servers to use by protocol or endpoint, e.g.,
273
+ `{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}`. The
274
+ proxies are used on each request.
275
+ token (`str` or `bool`, *optional*):
276
+ The token to use as HTTP bearer authorization for remote files. If
277
+ `True`, will use the token generated when running `transformers-cli
278
+ login` (stored in `~/.huggingface`).
279
+ cache_dir (`Union[str, os.PathLike]`, *optional*):
280
+ Path to a directory in which a downloaded pretrained model
281
+ configuration should be cached if the standard cache should not be
282
+ used.
283
+ local_files_only(`bool`, *optional*, defaults to `False`):
284
+ Whether to only look at local files (i.e., do not try to download
285
+ the model).
286
+ model_kwargs (`Dict`, *optional*):
287
+ model_kwargs will be passed to the model during initialization
288
+
289
+ <Tip>
290
+
291
+ Passing `token=True` is required when you want to use a private
292
+ model.
293
+
294
+ </Tip>
295
+ """
296
+ return KerasModelHubMixin.from_pretrained(*args, **kwargs)
297
+
298
+
299
+ @validate_hf_hub_args
300
+ @_requires_keras_2_model
301
+ def push_to_hub_keras(
302
+ model,
303
+ repo_id: str,
304
+ *,
305
+ config: Optional[dict] = None,
306
+ commit_message: str = "Push Keras model using huggingface_hub.",
307
+ private: bool = False,
308
+ api_endpoint: Optional[str] = None,
309
+ token: Optional[str] = None,
310
+ branch: Optional[str] = None,
311
+ create_pr: Optional[bool] = None,
312
+ allow_patterns: Optional[Union[List[str], str]] = None,
313
+ ignore_patterns: Optional[Union[List[str], str]] = None,
314
+ delete_patterns: Optional[Union[List[str], str]] = None,
315
+ log_dir: Optional[str] = None,
316
+ include_optimizer: bool = False,
317
+ tags: Optional[Union[list, str]] = None,
318
+ plot_model: bool = True,
319
+ **model_save_kwargs,
320
+ ):
321
+ """
322
+ Upload model checkpoint to the Hub.
323
+
324
+ Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use
325
+ `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more
326
+ details.
327
+
328
+ Args:
329
+ model (`Keras.Model`):
330
+ The [Keras model](`https://www.tensorflow.org/api_docs/python/tf/keras/Model`) you'd like to push to the
331
+ Hub. The model must be compiled and built.
332
+ repo_id (`str`):
333
+ ID of the repository to push to (example: `"username/my-model"`).
334
+ commit_message (`str`, *optional*, defaults to "Add Keras model"):
335
+ Message to commit while pushing.
336
+ private (`bool`, *optional*, defaults to `False`):
337
+ Whether the repository created should be private.
338
+ api_endpoint (`str`, *optional*):
339
+ The API endpoint to use when pushing the model to the hub.
340
+ token (`str`, *optional*):
341
+ The token to use as HTTP bearer authorization for remote files. If
342
+ not set, will use the token set when logging in with
343
+ `huggingface-cli login` (stored in `~/.huggingface`).
344
+ branch (`str`, *optional*):
345
+ The git branch on which to push the model. This defaults to
346
+ the default branch as specified in your repository, which
347
+ defaults to `"main"`.
348
+ create_pr (`boolean`, *optional*):
349
+ Whether or not to create a Pull Request from `branch` with that commit.
350
+ Defaults to `False`.
351
+ config (`dict`, *optional*):
352
+ Configuration object to be saved alongside the model weights.
353
+ allow_patterns (`List[str]` or `str`, *optional*):
354
+ If provided, only files matching at least one pattern are pushed.
355
+ ignore_patterns (`List[str]` or `str`, *optional*):
356
+ If provided, files matching any of the patterns are not pushed.
357
+ delete_patterns (`List[str]` or `str`, *optional*):
358
+ If provided, remote files matching any of the patterns will be deleted from the repo.
359
+ log_dir (`str`, *optional*):
360
+ TensorBoard logging directory to be pushed. The Hub automatically
361
+ hosts and displays a TensorBoard instance if log files are included
362
+ in the repository.
363
+ include_optimizer (`bool`, *optional*, defaults to `False`):
364
+ Whether or not to include optimizer during serialization.
365
+ tags (Union[`list`, `str`], *optional*):
366
+ List of tags that are related to model or string of a single tag. See example tags
367
+ [here](https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1).
368
+ plot_model (`bool`, *optional*, defaults to `True`):
369
+ Setting this to `True` will plot the model and put it in the model
370
+ card. Requires graphviz and pydot to be installed.
371
+ model_save_kwargs(`dict`, *optional*):
372
+ model_save_kwargs will be passed to
373
+ [`tf.keras.models.save_model()`](https://www.tensorflow.org/api_docs/python/tf/keras/models/save_model).
374
+
375
+ Returns:
376
+ The url of the commit of your model in the given repository.
377
+ """
378
+ api = HfApi(endpoint=api_endpoint)
379
+ repo_id = api.create_repo(repo_id=repo_id, token=token, private=private, exist_ok=True).repo_id
380
+
381
+ # Push the files to the repo in a single commit
382
+ with SoftTemporaryDirectory() as tmp:
383
+ saved_path = Path(tmp) / repo_id
384
+ save_pretrained_keras(
385
+ model,
386
+ saved_path,
387
+ config=config,
388
+ include_optimizer=include_optimizer,
389
+ tags=tags,
390
+ plot_model=plot_model,
391
+ **model_save_kwargs,
392
+ )
393
+
394
+ # If `log_dir` provided, delete remote logs and upload new ones
395
+ if log_dir is not None:
396
+ delete_patterns = (
397
+ []
398
+ if delete_patterns is None
399
+ else (
400
+ [delete_patterns] # convert `delete_patterns` to a list
401
+ if isinstance(delete_patterns, str)
402
+ else delete_patterns
403
+ )
404
+ )
405
+ delete_patterns.append("logs/*")
406
+ copytree(log_dir, saved_path / "logs")
407
+
408
+ return api.upload_folder(
409
+ repo_type="model",
410
+ repo_id=repo_id,
411
+ folder_path=saved_path,
412
+ commit_message=commit_message,
413
+ token=token,
414
+ revision=branch,
415
+ create_pr=create_pr,
416
+ allow_patterns=allow_patterns,
417
+ ignore_patterns=ignore_patterns,
418
+ delete_patterns=delete_patterns,
419
+ )
420
+
421
+
422
+ class KerasModelHubMixin(ModelHubMixin):
423
+ """
424
+ Implementation of [`ModelHubMixin`] to provide model Hub upload/download
425
+ capabilities to Keras models.
426
+
427
+
428
+ ```python
429
+ >>> import tensorflow as tf
430
+ >>> from huggingface_hub import KerasModelHubMixin
431
+
432
+
433
+ >>> class MyModel(tf.keras.Model, KerasModelHubMixin):
434
+ ... def __init__(self, **kwargs):
435
+ ... super().__init__()
436
+ ... self.config = kwargs.pop("config", None)
437
+ ... self.dummy_inputs = ...
438
+ ... self.layer = ...
439
+
440
+ ... def call(self, *args):
441
+ ... return ...
442
+
443
+
444
+ >>> # Initialize and compile the model as you normally would
445
+ >>> model = MyModel()
446
+ >>> model.compile(...)
447
+ >>> # Build the graph by training it or passing dummy inputs
448
+ >>> _ = model(model.dummy_inputs)
449
+ >>> # Save model weights to local directory
450
+ >>> model.save_pretrained("my-awesome-model")
451
+ >>> # Push model weights to the Hub
452
+ >>> model.push_to_hub("my-awesome-model")
453
+ >>> # Download and initialize weights from the Hub
454
+ >>> model = MyModel.from_pretrained("username/super-cool-model")
455
+ ```
456
+ """
457
+
458
+ def _save_pretrained(self, save_directory):
459
+ save_pretrained_keras(self, save_directory)
460
+
461
+ @classmethod
462
+ def _from_pretrained(
463
+ cls,
464
+ model_id,
465
+ revision,
466
+ cache_dir,
467
+ force_download,
468
+ proxies,
469
+ resume_download,
470
+ local_files_only,
471
+ token,
472
+ config: Optional[Dict[str, Any]] = None,
473
+ **model_kwargs,
474
+ ):
475
+ """Here we just call [`from_pretrained_keras`] function so both the mixin and
476
+ functional APIs stay in sync.
477
+
478
+ TODO - Some args above aren't used since we are calling
479
+ snapshot_download instead of hf_hub_download.
480
+ """
481
+ if keras is None:
482
+ raise ImportError("Called a TensorFlow-specific function but could not import it.")
483
+
484
+ # Root is either a local filepath matching model_id or a cached snapshot
485
+ if not os.path.isdir(model_id):
486
+ storage_folder = snapshot_download(
487
+ repo_id=model_id,
488
+ revision=revision,
489
+ cache_dir=cache_dir,
490
+ library_name="keras",
491
+ library_version=get_tf_version(),
492
+ )
493
+ else:
494
+ storage_folder = model_id
495
+
496
+ # TODO: change this in a future PR. We are not returning a KerasModelHubMixin instance here...
497
+ model = keras.models.load_model(storage_folder)
498
+
499
+ # For now, we add a new attribute, config, to store the config loaded from the hub/a local dir.
500
+ model.config = config
501
+
502
+ return model
env-llmeval/lib/python3.10/site-packages/huggingface_hub/repocard_data.py ADDED
@@ -0,0 +1,729 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ from collections import defaultdict
3
+ from dataclasses import dataclass
4
+ from typing import Any, Dict, List, Optional, Tuple, Union
5
+
6
+ from huggingface_hub.utils import logging, yaml_dump
7
+
8
+
9
+ logger = logging.get_logger(__name__)
10
+
11
+
12
+ @dataclass
13
+ class EvalResult:
14
+ """
15
+ Flattened representation of individual evaluation results found in model-index of Model Cards.
16
+
17
+ For more information on the model-index spec, see https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1.
18
+
19
+ Args:
20
+ task_type (`str`):
21
+ The task identifier. Example: "image-classification".
22
+ dataset_type (`str`):
23
+ The dataset identifier. Example: "common_voice". Use dataset id from https://hf.co/datasets.
24
+ dataset_name (`str`):
25
+ A pretty name for the dataset. Example: "Common Voice (French)".
26
+ metric_type (`str`):
27
+ The metric identifier. Example: "wer". Use metric id from https://hf.co/metrics.
28
+ metric_value (`Any`):
29
+ The metric value. Example: 0.9 or "20.0 ± 1.2".
30
+ task_name (`str`, *optional*):
31
+ A pretty name for the task. Example: "Speech Recognition".
32
+ dataset_config (`str`, *optional*):
33
+ The name of the dataset configuration used in `load_dataset()`.
34
+ Example: fr in `load_dataset("common_voice", "fr")`. See the `datasets` docs for more info:
35
+ https://hf.co/docs/datasets/package_reference/loading_methods#datasets.load_dataset.name
36
+ dataset_split (`str`, *optional*):
37
+ The split used in `load_dataset()`. Example: "test".
38
+ dataset_revision (`str`, *optional*):
39
+ The revision (AKA Git Sha) of the dataset used in `load_dataset()`.
40
+ Example: 5503434ddd753f426f4b38109466949a1217c2bb
41
+ dataset_args (`Dict[str, Any]`, *optional*):
42
+ The arguments passed during `Metric.compute()`. Example for `bleu`: `{"max_order": 4}`
43
+ metric_name (`str`, *optional*):
44
+ A pretty name for the metric. Example: "Test WER".
45
+ metric_config (`str`, *optional*):
46
+ The name of the metric configuration used in `load_metric()`.
47
+ Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`.
48
+ See the `datasets` docs for more info: https://huggingface.co/docs/datasets/v2.1.0/en/loading#load-configurations
49
+ metric_args (`Dict[str, Any]`, *optional*):
50
+ The arguments passed during `Metric.compute()`. Example for `bleu`: max_order: 4
51
+ verified (`bool`, *optional*):
52
+ Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set.
53
+ verify_token (`str`, *optional*):
54
+ A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not.
55
+ source_name (`str`, *optional*):
56
+ The name of the source of the evaluation result. Example: "Open LLM Leaderboard".
57
+ source_url (`str`, *optional*):
58
+ The URL of the source of the evaluation result. Example: "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard".
59
+ """
60
+
61
+ # Required
62
+
63
+ # The task identifier
64
+ # Example: automatic-speech-recognition
65
+ task_type: str
66
+
67
+ # The dataset identifier
68
+ # Example: common_voice. Use dataset id from https://hf.co/datasets
69
+ dataset_type: str
70
+
71
+ # A pretty name for the dataset.
72
+ # Example: Common Voice (French)
73
+ dataset_name: str
74
+
75
+ # The metric identifier
76
+ # Example: wer. Use metric id from https://hf.co/metrics
77
+ metric_type: str
78
+
79
+ # Value of the metric.
80
+ # Example: 20.0 or "20.0 ± 1.2"
81
+ metric_value: Any
82
+
83
+ # Optional
84
+
85
+ # A pretty name for the task.
86
+ # Example: Speech Recognition
87
+ task_name: Optional[str] = None
88
+
89
+ # The name of the dataset configuration used in `load_dataset()`.
90
+ # Example: fr in `load_dataset("common_voice", "fr")`.
91
+ # See the `datasets` docs for more info:
92
+ # https://huggingface.co/docs/datasets/package_reference/loading_methods#datasets.load_dataset.name
93
+ dataset_config: Optional[str] = None
94
+
95
+ # The split used in `load_dataset()`.
96
+ # Example: test
97
+ dataset_split: Optional[str] = None
98
+
99
+ # The revision (AKA Git Sha) of the dataset used in `load_dataset()`.
100
+ # Example: 5503434ddd753f426f4b38109466949a1217c2bb
101
+ dataset_revision: Optional[str] = None
102
+
103
+ # The arguments passed during `Metric.compute()`.
104
+ # Example for `bleu`: max_order: 4
105
+ dataset_args: Optional[Dict[str, Any]] = None
106
+
107
+ # A pretty name for the metric.
108
+ # Example: Test WER
109
+ metric_name: Optional[str] = None
110
+
111
+ # The name of the metric configuration used in `load_metric()`.
112
+ # Example: bleurt-large-512 in `load_metric("bleurt", "bleurt-large-512")`.
113
+ # See the `datasets` docs for more info: https://huggingface.co/docs/datasets/v2.1.0/en/loading#load-configurations
114
+ metric_config: Optional[str] = None
115
+
116
+ # The arguments passed during `Metric.compute()`.
117
+ # Example for `bleu`: max_order: 4
118
+ metric_args: Optional[Dict[str, Any]] = None
119
+
120
+ # Indicates whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not. Automatically computed by Hugging Face, do not set.
121
+ verified: Optional[bool] = None
122
+
123
+ # A JSON Web Token that is used to verify whether the metrics originate from Hugging Face's [evaluation service](https://huggingface.co/spaces/autoevaluate/model-evaluator) or not.
124
+ verify_token: Optional[str] = None
125
+
126
+ # The name of the source of the evaluation result.
127
+ # Example: Open LLM Leaderboard
128
+ source_name: Optional[str] = None
129
+
130
+ # The URL of the source of the evaluation result.
131
+ # Example: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
132
+ source_url: Optional[str] = None
133
+
134
+ @property
135
+ def unique_identifier(self) -> tuple:
136
+ """Returns a tuple that uniquely identifies this evaluation."""
137
+ return (
138
+ self.task_type,
139
+ self.dataset_type,
140
+ self.dataset_config,
141
+ self.dataset_split,
142
+ self.dataset_revision,
143
+ )
144
+
145
+ def is_equal_except_value(self, other: "EvalResult") -> bool:
146
+ """
147
+ Return True if `self` and `other` describe exactly the same metric but with a
148
+ different value.
149
+ """
150
+ for key, _ in self.__dict__.items():
151
+ if key == "metric_value":
152
+ continue
153
+ # For metrics computed by Hugging Face's evaluation service, `verify_token` is derived from `metric_value`,
154
+ # so we exclude it here in the comparison.
155
+ if key != "verify_token" and getattr(self, key) != getattr(other, key):
156
+ return False
157
+ return True
158
+
159
+ def __post_init__(self) -> None:
160
+ if self.source_name is not None and self.source_url is None:
161
+ raise ValueError("If `source_name` is provided, `source_url` must also be provided.")
162
+
163
+
164
+ @dataclass
165
+ class CardData:
166
+ """Structure containing metadata from a RepoCard.
167
+
168
+ [`CardData`] is the parent class of [`ModelCardData`] and [`DatasetCardData`].
169
+
170
+ Metadata can be exported as a dictionary or YAML. Export can be customized to alter the representation of the data
171
+ (example: flatten evaluation results). `CardData` behaves as a dictionary (can get, pop, set values) but do not
172
+ inherit from `dict` to allow this export step.
173
+ """
174
+
175
+ def __init__(self, ignore_metadata_errors: bool = False, **kwargs):
176
+ self.__dict__.update(kwargs)
177
+
178
+ def to_dict(self) -> Dict[str, Any]:
179
+ """Converts CardData to a dict.
180
+
181
+ Returns:
182
+ `dict`: CardData represented as a dictionary ready to be dumped to a YAML
183
+ block for inclusion in a README.md file.
184
+ """
185
+
186
+ data_dict = copy.deepcopy(self.__dict__)
187
+ self._to_dict(data_dict)
188
+ return _remove_none(data_dict)
189
+
190
+ def _to_dict(self, data_dict):
191
+ """Use this method in child classes to alter the dict representation of the data. Alter the dict in-place.
192
+
193
+ Args:
194
+ data_dict (`dict`): The raw dict representation of the card data.
195
+ """
196
+ pass
197
+
198
+ def to_yaml(self, line_break=None) -> str:
199
+ """Dumps CardData to a YAML block for inclusion in a README.md file.
200
+
201
+ Args:
202
+ line_break (str, *optional*):
203
+ The line break to use when dumping to yaml.
204
+
205
+ Returns:
206
+ `str`: CardData represented as a YAML block.
207
+ """
208
+ return yaml_dump(self.to_dict(), sort_keys=False, line_break=line_break).strip()
209
+
210
+ def __repr__(self):
211
+ return repr(self.__dict__)
212
+
213
+ def __str__(self):
214
+ return self.to_yaml()
215
+
216
+ def get(self, key: str, default: Any = None) -> Any:
217
+ """Get value for a given metadata key."""
218
+ return self.__dict__.get(key, default)
219
+
220
+ def pop(self, key: str, default: Any = None) -> Any:
221
+ """Pop value for a given metadata key."""
222
+ return self.__dict__.pop(key, default)
223
+
224
+ def __getitem__(self, key: str) -> Any:
225
+ """Get value for a given metadata key."""
226
+ return self.__dict__[key]
227
+
228
+ def __setitem__(self, key: str, value: Any) -> None:
229
+ """Set value for a given metadata key."""
230
+ self.__dict__[key] = value
231
+
232
+ def __contains__(self, key: str) -> bool:
233
+ """Check if a given metadata key is set."""
234
+ return key in self.__dict__
235
+
236
+ def __len__(self) -> int:
237
+ """Return the number of metadata keys set."""
238
+ return len(self.__dict__)
239
+
240
+
241
+ class ModelCardData(CardData):
242
+ """Model Card Metadata that is used by Hugging Face Hub when included at the top of your README.md
243
+
244
+ Args:
245
+ language (`Union[str, List[str]]`, *optional*):
246
+ Language of model's training data or metadata. It must be an ISO 639-1, 639-2 or
247
+ 639-3 code (two/three letters), or a special value like "code", "multilingual". Defaults to `None`.
248
+ license (`str`, *optional*):
249
+ License of this model. Example: apache-2.0 or any license from
250
+ https://huggingface.co/docs/hub/repositories-licenses. Defaults to None.
251
+ library_name (`str`, *optional*):
252
+ Name of library used by this model. Example: keras or any library from
253
+ https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/src/model-libraries.ts.
254
+ Defaults to None.
255
+ tags (`List[str]`, *optional*):
256
+ List of tags to add to your model that can be used when filtering on the Hugging
257
+ Face Hub. Defaults to None.
258
+ base_model (`str` or `List[str]`, *optional*):
259
+ The identifier of the base model from which the model derives. This is applicable for example if your model is a
260
+ fine-tune or adapter of an existing model. The value must be the ID of a model on the Hub (or a list of IDs
261
+ if your model derives from multiple models). Defaults to None.
262
+ datasets (`List[str]`, *optional*):
263
+ List of datasets that were used to train this model. Should be a dataset ID
264
+ found on https://hf.co/datasets. Defaults to None.
265
+ metrics (`List[str]`, *optional*):
266
+ List of metrics used to evaluate this model. Should be a metric name that can be found
267
+ at https://hf.co/metrics. Example: 'accuracy'. Defaults to None.
268
+ eval_results (`Union[List[EvalResult], EvalResult]`, *optional*):
269
+ List of `huggingface_hub.EvalResult` that define evaluation results of the model. If provided,
270
+ `model_name` is used to as a name on PapersWithCode's leaderboards. Defaults to `None`.
271
+ model_name (`str`, *optional*):
272
+ A name for this model. It is used along with
273
+ `eval_results` to construct the `model-index` within the card's metadata. The name
274
+ you supply here is what will be used on PapersWithCode's leaderboards. If None is provided
275
+ then the repo name is used as a default. Defaults to None.
276
+ ignore_metadata_errors (`str`):
277
+ If True, errors while parsing the metadata section will be ignored. Some information might be lost during
278
+ the process. Use it at your own risk.
279
+ kwargs (`dict`, *optional*):
280
+ Additional metadata that will be added to the model card. Defaults to None.
281
+
282
+ Example:
283
+ ```python
284
+ >>> from huggingface_hub import ModelCardData
285
+ >>> card_data = ModelCardData(
286
+ ... language="en",
287
+ ... license="mit",
288
+ ... library_name="timm",
289
+ ... tags=['image-classification', 'resnet'],
290
+ ... )
291
+ >>> card_data.to_dict()
292
+ {'language': 'en', 'license': 'mit', 'library_name': 'timm', 'tags': ['image-classification', 'resnet']}
293
+
294
+ ```
295
+ """
296
+
297
+ def __init__(
298
+ self,
299
+ *,
300
+ language: Optional[Union[str, List[str]]] = None,
301
+ license: Optional[str] = None,
302
+ library_name: Optional[str] = None,
303
+ tags: Optional[List[str]] = None,
304
+ base_model: Optional[Union[str, List[str]]] = None,
305
+ datasets: Optional[List[str]] = None,
306
+ metrics: Optional[List[str]] = None,
307
+ eval_results: Optional[List[EvalResult]] = None,
308
+ model_name: Optional[str] = None,
309
+ ignore_metadata_errors: bool = False,
310
+ **kwargs,
311
+ ):
312
+ self.language = language
313
+ self.license = license
314
+ self.library_name = library_name
315
+ self.tags = _to_unique_list(tags)
316
+ self.base_model = base_model
317
+ self.datasets = datasets
318
+ self.metrics = metrics
319
+ self.eval_results = eval_results
320
+ self.model_name = model_name
321
+
322
+ model_index = kwargs.pop("model-index", None)
323
+ if model_index:
324
+ try:
325
+ model_name, eval_results = model_index_to_eval_results(model_index)
326
+ self.model_name = model_name
327
+ self.eval_results = eval_results
328
+ except (KeyError, TypeError) as error:
329
+ if ignore_metadata_errors:
330
+ logger.warning("Invalid model-index. Not loading eval results into CardData.")
331
+ else:
332
+ raise ValueError(
333
+ f"Invalid `model_index` in metadata cannot be parsed: {error.__class__} {error}. Pass"
334
+ " `ignore_metadata_errors=True` to ignore this error while loading a Model Card. Warning:"
335
+ " some information will be lost. Use it at your own risk."
336
+ )
337
+
338
+ super().__init__(**kwargs)
339
+
340
+ if self.eval_results:
341
+ if type(self.eval_results) == EvalResult:
342
+ self.eval_results = [self.eval_results]
343
+ if self.model_name is None:
344
+ raise ValueError("Passing `eval_results` requires `model_name` to be set.")
345
+
346
+ def _to_dict(self, data_dict):
347
+ """Format the internal data dict. In this case, we convert eval results to a valid model index"""
348
+ if self.eval_results is not None:
349
+ data_dict["model-index"] = eval_results_to_model_index(self.model_name, self.eval_results)
350
+ del data_dict["eval_results"], data_dict["model_name"]
351
+
352
+
353
+ class DatasetCardData(CardData):
354
+ """Dataset Card Metadata that is used by Hugging Face Hub when included at the top of your README.md
355
+
356
+ Args:
357
+ language (`List[str]`, *optional*):
358
+ Language of dataset's data or metadata. It must be an ISO 639-1, 639-2 or
359
+ 639-3 code (two/three letters), or a special value like "code", "multilingual".
360
+ license (`Union[str, List[str]]`, *optional*):
361
+ License(s) of this dataset. Example: apache-2.0 or any license from
362
+ https://huggingface.co/docs/hub/repositories-licenses.
363
+ annotations_creators (`Union[str, List[str]]`, *optional*):
364
+ How the annotations for the dataset were created.
365
+ Options are: 'found', 'crowdsourced', 'expert-generated', 'machine-generated', 'no-annotation', 'other'.
366
+ language_creators (`Union[str, List[str]]`, *optional*):
367
+ How the text-based data in the dataset was created.
368
+ Options are: 'found', 'crowdsourced', 'expert-generated', 'machine-generated', 'other'
369
+ multilinguality (`Union[str, List[str]]`, *optional*):
370
+ Whether the dataset is multilingual.
371
+ Options are: 'monolingual', 'multilingual', 'translation', 'other'.
372
+ size_categories (`Union[str, List[str]]`, *optional*):
373
+ The number of examples in the dataset. Options are: 'n<1K', '1K<n<10K', '10K<n<100K',
374
+ '100K<n<1M', '1M<n<10M', '10M<n<100M', '100M<n<1B', '1B<n<10B', '10B<n<100B', '100B<n<1T', 'n>1T', and 'other'.
375
+ source_datasets (`List[str]]`, *optional*):
376
+ Indicates whether the dataset is an original dataset or extended from another existing dataset.
377
+ Options are: 'original' and 'extended'.
378
+ task_categories (`Union[str, List[str]]`, *optional*):
379
+ What categories of task does the dataset support?
380
+ task_ids (`Union[str, List[str]]`, *optional*):
381
+ What specific tasks does the dataset support?
382
+ paperswithcode_id (`str`, *optional*):
383
+ ID of the dataset on PapersWithCode.
384
+ pretty_name (`str`, *optional*):
385
+ A more human-readable name for the dataset. (ex. "Cats vs. Dogs")
386
+ train_eval_index (`Dict`, *optional*):
387
+ A dictionary that describes the necessary spec for doing evaluation on the Hub.
388
+ If not provided, it will be gathered from the 'train-eval-index' key of the kwargs.
389
+ config_names (`Union[str, List[str]]`, *optional*):
390
+ A list of the available dataset configs for the dataset.
391
+ """
392
+
393
+ def __init__(
394
+ self,
395
+ *,
396
+ language: Optional[Union[str, List[str]]] = None,
397
+ license: Optional[Union[str, List[str]]] = None,
398
+ annotations_creators: Optional[Union[str, List[str]]] = None,
399
+ language_creators: Optional[Union[str, List[str]]] = None,
400
+ multilinguality: Optional[Union[str, List[str]]] = None,
401
+ size_categories: Optional[Union[str, List[str]]] = None,
402
+ source_datasets: Optional[List[str]] = None,
403
+ task_categories: Optional[Union[str, List[str]]] = None,
404
+ task_ids: Optional[Union[str, List[str]]] = None,
405
+ paperswithcode_id: Optional[str] = None,
406
+ pretty_name: Optional[str] = None,
407
+ train_eval_index: Optional[Dict] = None,
408
+ config_names: Optional[Union[str, List[str]]] = None,
409
+ ignore_metadata_errors: bool = False,
410
+ **kwargs,
411
+ ):
412
+ self.annotations_creators = annotations_creators
413
+ self.language_creators = language_creators
414
+ self.language = language
415
+ self.license = license
416
+ self.multilinguality = multilinguality
417
+ self.size_categories = size_categories
418
+ self.source_datasets = source_datasets
419
+ self.task_categories = task_categories
420
+ self.task_ids = task_ids
421
+ self.paperswithcode_id = paperswithcode_id
422
+ self.pretty_name = pretty_name
423
+ self.config_names = config_names
424
+
425
+ # TODO - maybe handle this similarly to EvalResult?
426
+ self.train_eval_index = train_eval_index or kwargs.pop("train-eval-index", None)
427
+ super().__init__(**kwargs)
428
+
429
+ def _to_dict(self, data_dict):
430
+ data_dict["train-eval-index"] = data_dict.pop("train_eval_index")
431
+
432
+
433
+ class SpaceCardData(CardData):
434
+ """Space Card Metadata that is used by Hugging Face Hub when included at the top of your README.md
435
+
436
+ To get an exhaustive reference of Spaces configuration, please visit https://huggingface.co/docs/hub/spaces-config-reference#spaces-configuration-reference.
437
+
438
+ Args:
439
+ title (`str`, *optional*)
440
+ Title of the Space.
441
+ sdk (`str`, *optional*)
442
+ SDK of the Space (one of `gradio`, `streamlit`, `docker`, or `static`).
443
+ sdk_version (`str`, *optional*)
444
+ Version of the used SDK (if Gradio/Streamlit sdk).
445
+ python_version (`str`, *optional*)
446
+ Python version used in the Space (if Gradio/Streamlit sdk).
447
+ app_file (`str`, *optional*)
448
+ Path to your main application file (which contains either gradio or streamlit Python code, or static html code).
449
+ Path is relative to the root of the repository.
450
+ app_port (`str`, *optional*)
451
+ Port on which your application is running. Used only if sdk is `docker`.
452
+ license (`str`, *optional*)
453
+ License of this model. Example: apache-2.0 or any license from
454
+ https://huggingface.co/docs/hub/repositories-licenses.
455
+ duplicated_from (`str`, *optional*)
456
+ ID of the original Space if this is a duplicated Space.
457
+ models (List[`str`], *optional*)
458
+ List of models related to this Space. Should be a dataset ID found on https://hf.co/models.
459
+ datasets (`List[str]`, *optional*)
460
+ List of datasets related to this Space. Should be a dataset ID found on https://hf.co/datasets.
461
+ tags (`List[str]`, *optional*)
462
+ List of tags to add to your Space that can be used when filtering on the Hub.
463
+ ignore_metadata_errors (`str`):
464
+ If True, errors while parsing the metadata section will be ignored. Some information might be lost during
465
+ the process. Use it at your own risk.
466
+ kwargs (`dict`, *optional*):
467
+ Additional metadata that will be added to the space card.
468
+
469
+ Example:
470
+ ```python
471
+ >>> from huggingface_hub import SpaceCardData
472
+ >>> card_data = SpaceCardData(
473
+ ... title="Dreambooth Training",
474
+ ... license="mit",
475
+ ... sdk="gradio",
476
+ ... duplicated_from="multimodalart/dreambooth-training"
477
+ ... )
478
+ >>> card_data.to_dict()
479
+ {'title': 'Dreambooth Training', 'sdk': 'gradio', 'license': 'mit', 'duplicated_from': 'multimodalart/dreambooth-training'}
480
+ ```
481
+ """
482
+
483
+ def __init__(
484
+ self,
485
+ *,
486
+ title: Optional[str] = None,
487
+ sdk: Optional[str] = None,
488
+ sdk_version: Optional[str] = None,
489
+ python_version: Optional[str] = None,
490
+ app_file: Optional[str] = None,
491
+ app_port: Optional[int] = None,
492
+ license: Optional[str] = None,
493
+ duplicated_from: Optional[str] = None,
494
+ models: Optional[List[str]] = None,
495
+ datasets: Optional[List[str]] = None,
496
+ tags: Optional[List[str]] = None,
497
+ ignore_metadata_errors: bool = False,
498
+ **kwargs,
499
+ ):
500
+ self.title = title
501
+ self.sdk = sdk
502
+ self.sdk_version = sdk_version
503
+ self.python_version = python_version
504
+ self.app_file = app_file
505
+ self.app_port = app_port
506
+ self.license = license
507
+ self.duplicated_from = duplicated_from
508
+ self.models = models
509
+ self.datasets = datasets
510
+ self.tags = _to_unique_list(tags)
511
+ super().__init__(**kwargs)
512
+
513
+
514
+ def model_index_to_eval_results(model_index: List[Dict[str, Any]]) -> Tuple[str, List[EvalResult]]:
515
+ """Takes in a model index and returns the model name and a list of `huggingface_hub.EvalResult` objects.
516
+
517
+ A detailed spec of the model index can be found here:
518
+ https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1
519
+
520
+ Args:
521
+ model_index (`List[Dict[str, Any]]`):
522
+ A model index data structure, likely coming from a README.md file on the
523
+ Hugging Face Hub.
524
+
525
+ Returns:
526
+ model_name (`str`):
527
+ The name of the model as found in the model index. This is used as the
528
+ identifier for the model on leaderboards like PapersWithCode.
529
+ eval_results (`List[EvalResult]`):
530
+ A list of `huggingface_hub.EvalResult` objects containing the metrics
531
+ reported in the provided model_index.
532
+
533
+ Example:
534
+ ```python
535
+ >>> from huggingface_hub.repocard_data import model_index_to_eval_results
536
+ >>> # Define a minimal model index
537
+ >>> model_index = [
538
+ ... {
539
+ ... "name": "my-cool-model",
540
+ ... "results": [
541
+ ... {
542
+ ... "task": {
543
+ ... "type": "image-classification"
544
+ ... },
545
+ ... "dataset": {
546
+ ... "type": "beans",
547
+ ... "name": "Beans"
548
+ ... },
549
+ ... "metrics": [
550
+ ... {
551
+ ... "type": "accuracy",
552
+ ... "value": 0.9
553
+ ... }
554
+ ... ]
555
+ ... }
556
+ ... ]
557
+ ... }
558
+ ... ]
559
+ >>> model_name, eval_results = model_index_to_eval_results(model_index)
560
+ >>> model_name
561
+ 'my-cool-model'
562
+ >>> eval_results[0].task_type
563
+ 'image-classification'
564
+ >>> eval_results[0].metric_type
565
+ 'accuracy'
566
+
567
+ ```
568
+ """
569
+
570
+ eval_results = []
571
+ for elem in model_index:
572
+ name = elem["name"]
573
+ results = elem["results"]
574
+ for result in results:
575
+ task_type = result["task"]["type"]
576
+ task_name = result["task"].get("name")
577
+ dataset_type = result["dataset"]["type"]
578
+ dataset_name = result["dataset"]["name"]
579
+ dataset_config = result["dataset"].get("config")
580
+ dataset_split = result["dataset"].get("split")
581
+ dataset_revision = result["dataset"].get("revision")
582
+ dataset_args = result["dataset"].get("args")
583
+ source_name = result.get("source", {}).get("name")
584
+ source_url = result.get("source", {}).get("url")
585
+
586
+ for metric in result["metrics"]:
587
+ metric_type = metric["type"]
588
+ metric_value = metric["value"]
589
+ metric_name = metric.get("name")
590
+ metric_args = metric.get("args")
591
+ metric_config = metric.get("config")
592
+ verified = metric.get("verified")
593
+ verify_token = metric.get("verifyToken")
594
+
595
+ eval_result = EvalResult(
596
+ task_type=task_type, # Required
597
+ dataset_type=dataset_type, # Required
598
+ dataset_name=dataset_name, # Required
599
+ metric_type=metric_type, # Required
600
+ metric_value=metric_value, # Required
601
+ task_name=task_name,
602
+ dataset_config=dataset_config,
603
+ dataset_split=dataset_split,
604
+ dataset_revision=dataset_revision,
605
+ dataset_args=dataset_args,
606
+ metric_name=metric_name,
607
+ metric_args=metric_args,
608
+ metric_config=metric_config,
609
+ verified=verified,
610
+ verify_token=verify_token,
611
+ source_name=source_name,
612
+ source_url=source_url,
613
+ )
614
+ eval_results.append(eval_result)
615
+ return name, eval_results
616
+
617
+
618
+ def _remove_none(obj):
619
+ """
620
+ Recursively remove `None` values from a dict. Borrowed from: https://stackoverflow.com/a/20558778
621
+ """
622
+ if isinstance(obj, (list, tuple, set)):
623
+ return type(obj)(_remove_none(x) for x in obj if x is not None)
624
+ elif isinstance(obj, dict):
625
+ return type(obj)((_remove_none(k), _remove_none(v)) for k, v in obj.items() if k is not None and v is not None)
626
+ else:
627
+ return obj
628
+
629
+
630
+ def eval_results_to_model_index(model_name: str, eval_results: List[EvalResult]) -> List[Dict[str, Any]]:
631
+ """Takes in given model name and list of `huggingface_hub.EvalResult` and returns a
632
+ valid model-index that will be compatible with the format expected by the
633
+ Hugging Face Hub.
634
+
635
+ Args:
636
+ model_name (`str`):
637
+ Name of the model (ex. "my-cool-model"). This is used as the identifier
638
+ for the model on leaderboards like PapersWithCode.
639
+ eval_results (`List[EvalResult]`):
640
+ List of `huggingface_hub.EvalResult` objects containing the metrics to be
641
+ reported in the model-index.
642
+
643
+ Returns:
644
+ model_index (`List[Dict[str, Any]]`): The eval_results converted to a model-index.
645
+
646
+ Example:
647
+ ```python
648
+ >>> from huggingface_hub.repocard_data import eval_results_to_model_index, EvalResult
649
+ >>> # Define minimal eval_results
650
+ >>> eval_results = [
651
+ ... EvalResult(
652
+ ... task_type="image-classification", # Required
653
+ ... dataset_type="beans", # Required
654
+ ... dataset_name="Beans", # Required
655
+ ... metric_type="accuracy", # Required
656
+ ... metric_value=0.9, # Required
657
+ ... )
658
+ ... ]
659
+ >>> eval_results_to_model_index("my-cool-model", eval_results)
660
+ [{'name': 'my-cool-model', 'results': [{'task': {'type': 'image-classification'}, 'dataset': {'name': 'Beans', 'type': 'beans'}, 'metrics': [{'type': 'accuracy', 'value': 0.9}]}]}]
661
+
662
+ ```
663
+ """
664
+
665
+ # Metrics are reported on a unique task-and-dataset basis.
666
+ # Here, we make a map of those pairs and the associated EvalResults.
667
+ task_and_ds_types_map: Dict[Any, List[EvalResult]] = defaultdict(list)
668
+ for eval_result in eval_results:
669
+ task_and_ds_types_map[eval_result.unique_identifier].append(eval_result)
670
+
671
+ # Use the map from above to generate the model index data.
672
+ model_index_data = []
673
+ for results in task_and_ds_types_map.values():
674
+ # All items from `results` share same metadata
675
+ sample_result = results[0]
676
+ data = {
677
+ "task": {
678
+ "type": sample_result.task_type,
679
+ "name": sample_result.task_name,
680
+ },
681
+ "dataset": {
682
+ "name": sample_result.dataset_name,
683
+ "type": sample_result.dataset_type,
684
+ "config": sample_result.dataset_config,
685
+ "split": sample_result.dataset_split,
686
+ "revision": sample_result.dataset_revision,
687
+ "args": sample_result.dataset_args,
688
+ },
689
+ "metrics": [
690
+ {
691
+ "type": result.metric_type,
692
+ "value": result.metric_value,
693
+ "name": result.metric_name,
694
+ "config": result.metric_config,
695
+ "args": result.metric_args,
696
+ "verified": result.verified,
697
+ "verifyToken": result.verify_token,
698
+ }
699
+ for result in results
700
+ ],
701
+ }
702
+ if sample_result.source_url is not None:
703
+ source = {
704
+ "url": sample_result.source_url,
705
+ }
706
+ if sample_result.source_name is not None:
707
+ source["name"] = sample_result.source_name
708
+ data["source"] = source
709
+ model_index_data.append(data)
710
+
711
+ # TODO - Check if there cases where this list is longer than one?
712
+ # Finally, the model index itself is list of dicts.
713
+ model_index = [
714
+ {
715
+ "name": model_name,
716
+ "results": model_index_data,
717
+ }
718
+ ]
719
+ return _remove_none(model_index)
720
+
721
+
722
+ def _to_unique_list(tags: Optional[List[str]]) -> Optional[List[str]]:
723
+ if tags is None:
724
+ return tags
725
+ unique_tags = [] # make tags unique + keep order explicitly
726
+ for tag in tags:
727
+ if tag not in unique_tags:
728
+ unique_tags.append(tag)
729
+ return unique_tags
env-llmeval/lib/python3.10/site-packages/more_itertools/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ """More routines for operating on iterables, beyond itertools"""
2
+
3
+ from .more import * # noqa
4
+ from .recipes import * # noqa
5
+
6
+ __version__ = '10.2.0'
env-llmeval/lib/python3.10/site-packages/more_itertools/__init__.pyi ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .more import *
2
+ from .recipes import *
env-llmeval/lib/python3.10/site-packages/more_itertools/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (321 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/more_itertools/__pycache__/more.cpython-310.pyc ADDED
Binary file (133 kB). View file
 
env-llmeval/lib/python3.10/site-packages/more_itertools/__pycache__/recipes.cpython-310.pyc ADDED
Binary file (28.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/more_itertools/more.py ADDED
The diff for this file is too large to render. See raw diff
 
env-llmeval/lib/python3.10/site-packages/more_itertools/more.pyi ADDED
@@ -0,0 +1,695 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Stubs for more_itertools.more"""
2
+ from __future__ import annotations
3
+
4
+ from types import TracebackType
5
+ from typing import (
6
+ Any,
7
+ Callable,
8
+ Container,
9
+ ContextManager,
10
+ Generic,
11
+ Hashable,
12
+ Iterable,
13
+ Iterator,
14
+ overload,
15
+ Reversible,
16
+ Sequence,
17
+ Sized,
18
+ Type,
19
+ TypeVar,
20
+ type_check_only,
21
+ )
22
+ from typing_extensions import Protocol
23
+
24
+ # Type and type variable definitions
25
+ _T = TypeVar('_T')
26
+ _T1 = TypeVar('_T1')
27
+ _T2 = TypeVar('_T2')
28
+ _U = TypeVar('_U')
29
+ _V = TypeVar('_V')
30
+ _W = TypeVar('_W')
31
+ _T_co = TypeVar('_T_co', covariant=True)
32
+ _GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
33
+ _Raisable = BaseException | Type[BaseException]
34
+
35
+ @type_check_only
36
+ class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
37
+
38
+ @type_check_only
39
+ class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
40
+
41
+ @type_check_only
42
+ class _SupportsSlicing(Protocol[_T_co]):
43
+ def __getitem__(self, __k: slice) -> _T_co: ...
44
+
45
+ def chunked(
46
+ iterable: Iterable[_T], n: int | None, strict: bool = ...
47
+ ) -> Iterator[list[_T]]: ...
48
+ @overload
49
+ def first(iterable: Iterable[_T]) -> _T: ...
50
+ @overload
51
+ def first(iterable: Iterable[_T], default: _U) -> _T | _U: ...
52
+ @overload
53
+ def last(iterable: Iterable[_T]) -> _T: ...
54
+ @overload
55
+ def last(iterable: Iterable[_T], default: _U) -> _T | _U: ...
56
+ @overload
57
+ def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
58
+ @overload
59
+ def nth_or_last(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
60
+
61
+ class peekable(Generic[_T], Iterator[_T]):
62
+ def __init__(self, iterable: Iterable[_T]) -> None: ...
63
+ def __iter__(self) -> peekable[_T]: ...
64
+ def __bool__(self) -> bool: ...
65
+ @overload
66
+ def peek(self) -> _T: ...
67
+ @overload
68
+ def peek(self, default: _U) -> _T | _U: ...
69
+ def prepend(self, *items: _T) -> None: ...
70
+ def __next__(self) -> _T: ...
71
+ @overload
72
+ def __getitem__(self, index: int) -> _T: ...
73
+ @overload
74
+ def __getitem__(self, index: slice) -> list[_T]: ...
75
+
76
+ def consumer(func: _GenFn) -> _GenFn: ...
77
+ def ilen(iterable: Iterable[_T]) -> int: ...
78
+ def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
79
+ def with_iter(
80
+ context_manager: ContextManager[Iterable[_T]],
81
+ ) -> Iterator[_T]: ...
82
+ def one(
83
+ iterable: Iterable[_T],
84
+ too_short: _Raisable | None = ...,
85
+ too_long: _Raisable | None = ...,
86
+ ) -> _T: ...
87
+ def raise_(exception: _Raisable, *args: Any) -> None: ...
88
+ def strictly_n(
89
+ iterable: Iterable[_T],
90
+ n: int,
91
+ too_short: _GenFn | None = ...,
92
+ too_long: _GenFn | None = ...,
93
+ ) -> list[_T]: ...
94
+ def distinct_permutations(
95
+ iterable: Iterable[_T], r: int | None = ...
96
+ ) -> Iterator[tuple[_T, ...]]: ...
97
+ def intersperse(
98
+ e: _U, iterable: Iterable[_T], n: int = ...
99
+ ) -> Iterator[_T | _U]: ...
100
+ def unique_to_each(*iterables: Iterable[_T]) -> list[list[_T]]: ...
101
+ @overload
102
+ def windowed(
103
+ seq: Iterable[_T], n: int, *, step: int = ...
104
+ ) -> Iterator[tuple[_T | None, ...]]: ...
105
+ @overload
106
+ def windowed(
107
+ seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
108
+ ) -> Iterator[tuple[_T | _U, ...]]: ...
109
+ def substrings(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
110
+ def substrings_indexes(
111
+ seq: Sequence[_T], reverse: bool = ...
112
+ ) -> Iterator[tuple[Sequence[_T], int, int]]: ...
113
+
114
+ class bucket(Generic[_T, _U], Container[_U]):
115
+ def __init__(
116
+ self,
117
+ iterable: Iterable[_T],
118
+ key: Callable[[_T], _U],
119
+ validator: Callable[[_U], object] | None = ...,
120
+ ) -> None: ...
121
+ def __contains__(self, value: object) -> bool: ...
122
+ def __iter__(self) -> Iterator[_U]: ...
123
+ def __getitem__(self, value: object) -> Iterator[_T]: ...
124
+
125
+ def spy(
126
+ iterable: Iterable[_T], n: int = ...
127
+ ) -> tuple[list[_T], Iterator[_T]]: ...
128
+ def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
129
+ def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
130
+ def interleave_evenly(
131
+ iterables: list[Iterable[_T]], lengths: list[int] | None = ...
132
+ ) -> Iterator[_T]: ...
133
+ def collapse(
134
+ iterable: Iterable[Any],
135
+ base_type: type | None = ...,
136
+ levels: int | None = ...,
137
+ ) -> Iterator[Any]: ...
138
+ @overload
139
+ def side_effect(
140
+ func: Callable[[_T], object],
141
+ iterable: Iterable[_T],
142
+ chunk_size: None = ...,
143
+ before: Callable[[], object] | None = ...,
144
+ after: Callable[[], object] | None = ...,
145
+ ) -> Iterator[_T]: ...
146
+ @overload
147
+ def side_effect(
148
+ func: Callable[[list[_T]], object],
149
+ iterable: Iterable[_T],
150
+ chunk_size: int,
151
+ before: Callable[[], object] | None = ...,
152
+ after: Callable[[], object] | None = ...,
153
+ ) -> Iterator[_T]: ...
154
+ def sliced(
155
+ seq: _SupportsSlicing[_T], n: int, strict: bool = ...
156
+ ) -> Iterator[_T]: ...
157
+ def split_at(
158
+ iterable: Iterable[_T],
159
+ pred: Callable[[_T], object],
160
+ maxsplit: int = ...,
161
+ keep_separator: bool = ...,
162
+ ) -> Iterator[list[_T]]: ...
163
+ def split_before(
164
+ iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
165
+ ) -> Iterator[list[_T]]: ...
166
+ def split_after(
167
+ iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
168
+ ) -> Iterator[list[_T]]: ...
169
+ def split_when(
170
+ iterable: Iterable[_T],
171
+ pred: Callable[[_T, _T], object],
172
+ maxsplit: int = ...,
173
+ ) -> Iterator[list[_T]]: ...
174
+ def split_into(
175
+ iterable: Iterable[_T], sizes: Iterable[int | None]
176
+ ) -> Iterator[list[_T]]: ...
177
+ @overload
178
+ def padded(
179
+ iterable: Iterable[_T],
180
+ *,
181
+ n: int | None = ...,
182
+ next_multiple: bool = ...,
183
+ ) -> Iterator[_T | None]: ...
184
+ @overload
185
+ def padded(
186
+ iterable: Iterable[_T],
187
+ fillvalue: _U,
188
+ n: int | None = ...,
189
+ next_multiple: bool = ...,
190
+ ) -> Iterator[_T | _U]: ...
191
+ @overload
192
+ def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
193
+ @overload
194
+ def repeat_last(iterable: Iterable[_T], default: _U) -> Iterator[_T | _U]: ...
195
+ def distribute(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
196
+ @overload
197
+ def stagger(
198
+ iterable: Iterable[_T],
199
+ offsets: _SizedIterable[int] = ...,
200
+ longest: bool = ...,
201
+ ) -> Iterator[tuple[_T | None, ...]]: ...
202
+ @overload
203
+ def stagger(
204
+ iterable: Iterable[_T],
205
+ offsets: _SizedIterable[int] = ...,
206
+ longest: bool = ...,
207
+ fillvalue: _U = ...,
208
+ ) -> Iterator[tuple[_T | _U, ...]]: ...
209
+
210
+ class UnequalIterablesError(ValueError):
211
+ def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ...
212
+
213
+ @overload
214
+ def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ...
215
+ @overload
216
+ def zip_equal(
217
+ __iter1: Iterable[_T1], __iter2: Iterable[_T2]
218
+ ) -> Iterator[tuple[_T1, _T2]]: ...
219
+ @overload
220
+ def zip_equal(
221
+ __iter1: Iterable[_T],
222
+ __iter2: Iterable[_T],
223
+ __iter3: Iterable[_T],
224
+ *iterables: Iterable[_T],
225
+ ) -> Iterator[tuple[_T, ...]]: ...
226
+ @overload
227
+ def zip_offset(
228
+ __iter1: Iterable[_T1],
229
+ *,
230
+ offsets: _SizedIterable[int],
231
+ longest: bool = ...,
232
+ fillvalue: None = None,
233
+ ) -> Iterator[tuple[_T1 | None]]: ...
234
+ @overload
235
+ def zip_offset(
236
+ __iter1: Iterable[_T1],
237
+ __iter2: Iterable[_T2],
238
+ *,
239
+ offsets: _SizedIterable[int],
240
+ longest: bool = ...,
241
+ fillvalue: None = None,
242
+ ) -> Iterator[tuple[_T1 | None, _T2 | None]]: ...
243
+ @overload
244
+ def zip_offset(
245
+ __iter1: Iterable[_T],
246
+ __iter2: Iterable[_T],
247
+ __iter3: Iterable[_T],
248
+ *iterables: Iterable[_T],
249
+ offsets: _SizedIterable[int],
250
+ longest: bool = ...,
251
+ fillvalue: None = None,
252
+ ) -> Iterator[tuple[_T | None, ...]]: ...
253
+ @overload
254
+ def zip_offset(
255
+ __iter1: Iterable[_T1],
256
+ *,
257
+ offsets: _SizedIterable[int],
258
+ longest: bool = ...,
259
+ fillvalue: _U,
260
+ ) -> Iterator[tuple[_T1 | _U]]: ...
261
+ @overload
262
+ def zip_offset(
263
+ __iter1: Iterable[_T1],
264
+ __iter2: Iterable[_T2],
265
+ *,
266
+ offsets: _SizedIterable[int],
267
+ longest: bool = ...,
268
+ fillvalue: _U,
269
+ ) -> Iterator[tuple[_T1 | _U, _T2 | _U]]: ...
270
+ @overload
271
+ def zip_offset(
272
+ __iter1: Iterable[_T],
273
+ __iter2: Iterable[_T],
274
+ __iter3: Iterable[_T],
275
+ *iterables: Iterable[_T],
276
+ offsets: _SizedIterable[int],
277
+ longest: bool = ...,
278
+ fillvalue: _U,
279
+ ) -> Iterator[tuple[_T | _U, ...]]: ...
280
+ def sort_together(
281
+ iterables: Iterable[Iterable[_T]],
282
+ key_list: Iterable[int] = ...,
283
+ key: Callable[..., Any] | None = ...,
284
+ reverse: bool = ...,
285
+ ) -> list[tuple[_T, ...]]: ...
286
+ def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ...
287
+ def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
288
+ def always_iterable(
289
+ obj: object,
290
+ base_type: type | tuple[type | tuple[Any, ...], ...] | None = ...,
291
+ ) -> Iterator[Any]: ...
292
+ def adjacent(
293
+ predicate: Callable[[_T], bool],
294
+ iterable: Iterable[_T],
295
+ distance: int = ...,
296
+ ) -> Iterator[tuple[bool, _T]]: ...
297
+ @overload
298
+ def groupby_transform(
299
+ iterable: Iterable[_T],
300
+ keyfunc: None = None,
301
+ valuefunc: None = None,
302
+ reducefunc: None = None,
303
+ ) -> Iterator[tuple[_T, Iterator[_T]]]: ...
304
+ @overload
305
+ def groupby_transform(
306
+ iterable: Iterable[_T],
307
+ keyfunc: Callable[[_T], _U],
308
+ valuefunc: None,
309
+ reducefunc: None,
310
+ ) -> Iterator[tuple[_U, Iterator[_T]]]: ...
311
+ @overload
312
+ def groupby_transform(
313
+ iterable: Iterable[_T],
314
+ keyfunc: None,
315
+ valuefunc: Callable[[_T], _V],
316
+ reducefunc: None,
317
+ ) -> Iterable[tuple[_T, Iterable[_V]]]: ...
318
+ @overload
319
+ def groupby_transform(
320
+ iterable: Iterable[_T],
321
+ keyfunc: Callable[[_T], _U],
322
+ valuefunc: Callable[[_T], _V],
323
+ reducefunc: None,
324
+ ) -> Iterable[tuple[_U, Iterator[_V]]]: ...
325
+ @overload
326
+ def groupby_transform(
327
+ iterable: Iterable[_T],
328
+ keyfunc: None,
329
+ valuefunc: None,
330
+ reducefunc: Callable[[Iterator[_T]], _W],
331
+ ) -> Iterable[tuple[_T, _W]]: ...
332
+ @overload
333
+ def groupby_transform(
334
+ iterable: Iterable[_T],
335
+ keyfunc: Callable[[_T], _U],
336
+ valuefunc: None,
337
+ reducefunc: Callable[[Iterator[_T]], _W],
338
+ ) -> Iterable[tuple[_U, _W]]: ...
339
+ @overload
340
+ def groupby_transform(
341
+ iterable: Iterable[_T],
342
+ keyfunc: None,
343
+ valuefunc: Callable[[_T], _V],
344
+ reducefunc: Callable[[Iterable[_V]], _W],
345
+ ) -> Iterable[tuple[_T, _W]]: ...
346
+ @overload
347
+ def groupby_transform(
348
+ iterable: Iterable[_T],
349
+ keyfunc: Callable[[_T], _U],
350
+ valuefunc: Callable[[_T], _V],
351
+ reducefunc: Callable[[Iterable[_V]], _W],
352
+ ) -> Iterable[tuple[_U, _W]]: ...
353
+
354
+ class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
355
+ @overload
356
+ def __init__(self, __stop: _T) -> None: ...
357
+ @overload
358
+ def __init__(self, __start: _T, __stop: _T) -> None: ...
359
+ @overload
360
+ def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ...
361
+ def __bool__(self) -> bool: ...
362
+ def __contains__(self, elem: object) -> bool: ...
363
+ def __eq__(self, other: object) -> bool: ...
364
+ @overload
365
+ def __getitem__(self, key: int) -> _T: ...
366
+ @overload
367
+ def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ...
368
+ def __hash__(self) -> int: ...
369
+ def __iter__(self) -> Iterator[_T]: ...
370
+ def __len__(self) -> int: ...
371
+ def __reduce__(
372
+ self,
373
+ ) -> tuple[Type[numeric_range[_T, _U]], tuple[_T, _T, _U]]: ...
374
+ def __repr__(self) -> str: ...
375
+ def __reversed__(self) -> Iterator[_T]: ...
376
+ def count(self, value: _T) -> int: ...
377
+ def index(self, value: _T) -> int: ... # type: ignore
378
+
379
+ def count_cycle(
380
+ iterable: Iterable[_T], n: int | None = ...
381
+ ) -> Iterable[tuple[int, _T]]: ...
382
+ def mark_ends(
383
+ iterable: Iterable[_T],
384
+ ) -> Iterable[tuple[bool, bool, _T]]: ...
385
+ def locate(
386
+ iterable: Iterable[_T],
387
+ pred: Callable[..., Any] = ...,
388
+ window_size: int | None = ...,
389
+ ) -> Iterator[int]: ...
390
+ def lstrip(
391
+ iterable: Iterable[_T], pred: Callable[[_T], object]
392
+ ) -> Iterator[_T]: ...
393
+ def rstrip(
394
+ iterable: Iterable[_T], pred: Callable[[_T], object]
395
+ ) -> Iterator[_T]: ...
396
+ def strip(
397
+ iterable: Iterable[_T], pred: Callable[[_T], object]
398
+ ) -> Iterator[_T]: ...
399
+
400
+ class islice_extended(Generic[_T], Iterator[_T]):
401
+ def __init__(self, iterable: Iterable[_T], *args: int | None) -> None: ...
402
+ def __iter__(self) -> islice_extended[_T]: ...
403
+ def __next__(self) -> _T: ...
404
+ def __getitem__(self, index: slice) -> islice_extended[_T]: ...
405
+
406
+ def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ...
407
+ def consecutive_groups(
408
+ iterable: Iterable[_T], ordering: Callable[[_T], int] = ...
409
+ ) -> Iterator[Iterator[_T]]: ...
410
+ @overload
411
+ def difference(
412
+ iterable: Iterable[_T],
413
+ func: Callable[[_T, _T], _U] = ...,
414
+ *,
415
+ initial: None = ...,
416
+ ) -> Iterator[_T | _U]: ...
417
+ @overload
418
+ def difference(
419
+ iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
420
+ ) -> Iterator[_U]: ...
421
+
422
+ class SequenceView(Generic[_T], Sequence[_T]):
423
+ def __init__(self, target: Sequence[_T]) -> None: ...
424
+ @overload
425
+ def __getitem__(self, index: int) -> _T: ...
426
+ @overload
427
+ def __getitem__(self, index: slice) -> Sequence[_T]: ...
428
+ def __len__(self) -> int: ...
429
+
430
+ class seekable(Generic[_T], Iterator[_T]):
431
+ def __init__(
432
+ self, iterable: Iterable[_T], maxlen: int | None = ...
433
+ ) -> None: ...
434
+ def __iter__(self) -> seekable[_T]: ...
435
+ def __next__(self) -> _T: ...
436
+ def __bool__(self) -> bool: ...
437
+ @overload
438
+ def peek(self) -> _T: ...
439
+ @overload
440
+ def peek(self, default: _U) -> _T | _U: ...
441
+ def elements(self) -> SequenceView[_T]: ...
442
+ def seek(self, index: int) -> None: ...
443
+ def relative_seek(self, count: int) -> None: ...
444
+
445
+ class run_length:
446
+ @staticmethod
447
+ def encode(iterable: Iterable[_T]) -> Iterator[tuple[_T, int]]: ...
448
+ @staticmethod
449
+ def decode(iterable: Iterable[tuple[_T, int]]) -> Iterator[_T]: ...
450
+
451
+ def exactly_n(
452
+ iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
453
+ ) -> bool: ...
454
+ def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ...
455
+ def make_decorator(
456
+ wrapping_func: Callable[..., _U], result_index: int = ...
457
+ ) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
458
+ @overload
459
+ def map_reduce(
460
+ iterable: Iterable[_T],
461
+ keyfunc: Callable[[_T], _U],
462
+ valuefunc: None = ...,
463
+ reducefunc: None = ...,
464
+ ) -> dict[_U, list[_T]]: ...
465
+ @overload
466
+ def map_reduce(
467
+ iterable: Iterable[_T],
468
+ keyfunc: Callable[[_T], _U],
469
+ valuefunc: Callable[[_T], _V],
470
+ reducefunc: None = ...,
471
+ ) -> dict[_U, list[_V]]: ...
472
+ @overload
473
+ def map_reduce(
474
+ iterable: Iterable[_T],
475
+ keyfunc: Callable[[_T], _U],
476
+ valuefunc: None = ...,
477
+ reducefunc: Callable[[list[_T]], _W] = ...,
478
+ ) -> dict[_U, _W]: ...
479
+ @overload
480
+ def map_reduce(
481
+ iterable: Iterable[_T],
482
+ keyfunc: Callable[[_T], _U],
483
+ valuefunc: Callable[[_T], _V],
484
+ reducefunc: Callable[[list[_V]], _W],
485
+ ) -> dict[_U, _W]: ...
486
+ def rlocate(
487
+ iterable: Iterable[_T],
488
+ pred: Callable[..., object] = ...,
489
+ window_size: int | None = ...,
490
+ ) -> Iterator[int]: ...
491
+ def replace(
492
+ iterable: Iterable[_T],
493
+ pred: Callable[..., object],
494
+ substitutes: Iterable[_U],
495
+ count: int | None = ...,
496
+ window_size: int = ...,
497
+ ) -> Iterator[_T | _U]: ...
498
+ def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ...
499
+ def set_partitions(
500
+ iterable: Iterable[_T], k: int | None = ...
501
+ ) -> Iterator[list[list[_T]]]: ...
502
+
503
+ class time_limited(Generic[_T], Iterator[_T]):
504
+ def __init__(
505
+ self, limit_seconds: float, iterable: Iterable[_T]
506
+ ) -> None: ...
507
+ def __iter__(self) -> islice_extended[_T]: ...
508
+ def __next__(self) -> _T: ...
509
+
510
+ @overload
511
+ def only(
512
+ iterable: Iterable[_T], *, too_long: _Raisable | None = ...
513
+ ) -> _T | None: ...
514
+ @overload
515
+ def only(
516
+ iterable: Iterable[_T], default: _U, too_long: _Raisable | None = ...
517
+ ) -> _T | _U: ...
518
+ def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
519
+ def distinct_combinations(
520
+ iterable: Iterable[_T], r: int
521
+ ) -> Iterator[tuple[_T, ...]]: ...
522
+ def filter_except(
523
+ validator: Callable[[Any], object],
524
+ iterable: Iterable[_T],
525
+ *exceptions: Type[BaseException],
526
+ ) -> Iterator[_T]: ...
527
+ def map_except(
528
+ function: Callable[[Any], _U],
529
+ iterable: Iterable[_T],
530
+ *exceptions: Type[BaseException],
531
+ ) -> Iterator[_U]: ...
532
+ def map_if(
533
+ iterable: Iterable[Any],
534
+ pred: Callable[[Any], bool],
535
+ func: Callable[[Any], Any],
536
+ func_else: Callable[[Any], Any] | None = ...,
537
+ ) -> Iterator[Any]: ...
538
+ def sample(
539
+ iterable: Iterable[_T],
540
+ k: int,
541
+ weights: Iterable[float] | None = ...,
542
+ ) -> list[_T]: ...
543
+ def is_sorted(
544
+ iterable: Iterable[_T],
545
+ key: Callable[[_T], _U] | None = ...,
546
+ reverse: bool = False,
547
+ strict: bool = False,
548
+ ) -> bool: ...
549
+
550
+ class AbortThread(BaseException):
551
+ pass
552
+
553
+ class callback_iter(Generic[_T], Iterator[_T]):
554
+ def __init__(
555
+ self,
556
+ func: Callable[..., Any],
557
+ callback_kwd: str = ...,
558
+ wait_seconds: float = ...,
559
+ ) -> None: ...
560
+ def __enter__(self) -> callback_iter[_T]: ...
561
+ def __exit__(
562
+ self,
563
+ exc_type: Type[BaseException] | None,
564
+ exc_value: BaseException | None,
565
+ traceback: TracebackType | None,
566
+ ) -> bool | None: ...
567
+ def __iter__(self) -> callback_iter[_T]: ...
568
+ def __next__(self) -> _T: ...
569
+ def _reader(self) -> Iterator[_T]: ...
570
+ @property
571
+ def done(self) -> bool: ...
572
+ @property
573
+ def result(self) -> Any: ...
574
+
575
+ def windowed_complete(
576
+ iterable: Iterable[_T], n: int
577
+ ) -> Iterator[tuple[_T, ...]]: ...
578
+ def all_unique(
579
+ iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
580
+ ) -> bool: ...
581
+ def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ...
582
+ def nth_combination_with_replacement(
583
+ iterable: Iterable[_T], r: int, index: int
584
+ ) -> tuple[_T, ...]: ...
585
+ def nth_permutation(
586
+ iterable: Iterable[_T], r: int, index: int
587
+ ) -> tuple[_T, ...]: ...
588
+ def value_chain(*args: _T | Iterable[_T]) -> Iterable[_T]: ...
589
+ def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
590
+ def combination_index(
591
+ element: Iterable[_T], iterable: Iterable[_T]
592
+ ) -> int: ...
593
+ def combination_with_replacement_index(
594
+ element: Iterable[_T], iterable: Iterable[_T]
595
+ ) -> int: ...
596
+ def permutation_index(
597
+ element: Iterable[_T], iterable: Iterable[_T]
598
+ ) -> int: ...
599
+ def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
600
+
601
+ class countable(Generic[_T], Iterator[_T]):
602
+ def __init__(self, iterable: Iterable[_T]) -> None: ...
603
+ def __iter__(self) -> countable[_T]: ...
604
+ def __next__(self) -> _T: ...
605
+
606
+ def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
607
+ def zip_broadcast(
608
+ *objects: _T | Iterable[_T],
609
+ scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ...,
610
+ strict: bool = ...,
611
+ ) -> Iterable[tuple[_T, ...]]: ...
612
+ def unique_in_window(
613
+ iterable: Iterable[_T], n: int, key: Callable[[_T], _U] | None = ...
614
+ ) -> Iterator[_T]: ...
615
+ def duplicates_everseen(
616
+ iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
617
+ ) -> Iterator[_T]: ...
618
+ def duplicates_justseen(
619
+ iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
620
+ ) -> Iterator[_T]: ...
621
+ def classify_unique(
622
+ iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
623
+ ) -> Iterator[tuple[_T, bool, bool]]: ...
624
+
625
+ class _SupportsLessThan(Protocol):
626
+ def __lt__(self, __other: Any) -> bool: ...
627
+
628
+ _SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
629
+
630
+ @overload
631
+ def minmax(
632
+ iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
633
+ ) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
634
+ @overload
635
+ def minmax(
636
+ iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
637
+ ) -> tuple[_T, _T]: ...
638
+ @overload
639
+ def minmax(
640
+ iterable_or_value: Iterable[_SupportsLessThanT],
641
+ *,
642
+ key: None = None,
643
+ default: _U,
644
+ ) -> _U | tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
645
+ @overload
646
+ def minmax(
647
+ iterable_or_value: Iterable[_T],
648
+ *,
649
+ key: Callable[[_T], _SupportsLessThan],
650
+ default: _U,
651
+ ) -> _U | tuple[_T, _T]: ...
652
+ @overload
653
+ def minmax(
654
+ iterable_or_value: _SupportsLessThanT,
655
+ __other: _SupportsLessThanT,
656
+ *others: _SupportsLessThanT,
657
+ ) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
658
+ @overload
659
+ def minmax(
660
+ iterable_or_value: _T,
661
+ __other: _T,
662
+ *others: _T,
663
+ key: Callable[[_T], _SupportsLessThan],
664
+ ) -> tuple[_T, _T]: ...
665
+ def longest_common_prefix(
666
+ iterables: Iterable[Iterable[_T]],
667
+ ) -> Iterator[_T]: ...
668
+ def iequals(*iterables: Iterable[Any]) -> bool: ...
669
+ def constrained_batches(
670
+ iterable: Iterable[_T],
671
+ max_size: int,
672
+ max_count: int | None = ...,
673
+ get_len: Callable[[_T], object] = ...,
674
+ strict: bool = ...,
675
+ ) -> Iterator[tuple[_T]]: ...
676
+ def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
677
+ def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
678
+ def takewhile_inclusive(
679
+ predicate: Callable[[_T], bool], iterable: Iterable[_T]
680
+ ) -> Iterator[_T]: ...
681
+ def outer_product(
682
+ func: Callable[[_T, _U], _V],
683
+ xs: Iterable[_T],
684
+ ys: Iterable[_U],
685
+ *args: Any,
686
+ **kwargs: Any,
687
+ ) -> Iterator[tuple[_V, ...]]: ...
688
+ def iter_suppress(
689
+ iterable: Iterable[_T],
690
+ *exceptions: Type[BaseException],
691
+ ) -> Iterator[_T]: ...
692
+ def filter_map(
693
+ func: Callable[[_T], _V | None],
694
+ iterable: Iterable[_T],
695
+ ) -> Iterator[_V]: ...
env-llmeval/lib/python3.10/site-packages/more_itertools/py.typed ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/more_itertools/recipes.py ADDED
@@ -0,0 +1,1012 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Imported from the recipes section of the itertools documentation.
2
+
3
+ All functions taken from the recipes section of the itertools library docs
4
+ [1]_.
5
+ Some backward-compatible usability improvements have been made.
6
+
7
+ .. [1] http://docs.python.org/library/itertools.html#recipes
8
+
9
+ """
10
+ import math
11
+ import operator
12
+
13
+ from collections import deque
14
+ from collections.abc import Sized
15
+ from functools import partial, reduce
16
+ from itertools import (
17
+ chain,
18
+ combinations,
19
+ compress,
20
+ count,
21
+ cycle,
22
+ groupby,
23
+ islice,
24
+ product,
25
+ repeat,
26
+ starmap,
27
+ tee,
28
+ zip_longest,
29
+ )
30
+ from random import randrange, sample, choice
31
+ from sys import hexversion
32
+
33
+ __all__ = [
34
+ 'all_equal',
35
+ 'batched',
36
+ 'before_and_after',
37
+ 'consume',
38
+ 'convolve',
39
+ 'dotproduct',
40
+ 'first_true',
41
+ 'factor',
42
+ 'flatten',
43
+ 'grouper',
44
+ 'iter_except',
45
+ 'iter_index',
46
+ 'matmul',
47
+ 'ncycles',
48
+ 'nth',
49
+ 'nth_combination',
50
+ 'padnone',
51
+ 'pad_none',
52
+ 'pairwise',
53
+ 'partition',
54
+ 'polynomial_eval',
55
+ 'polynomial_from_roots',
56
+ 'polynomial_derivative',
57
+ 'powerset',
58
+ 'prepend',
59
+ 'quantify',
60
+ 'reshape',
61
+ 'random_combination_with_replacement',
62
+ 'random_combination',
63
+ 'random_permutation',
64
+ 'random_product',
65
+ 'repeatfunc',
66
+ 'roundrobin',
67
+ 'sieve',
68
+ 'sliding_window',
69
+ 'subslices',
70
+ 'sum_of_squares',
71
+ 'tabulate',
72
+ 'tail',
73
+ 'take',
74
+ 'totient',
75
+ 'transpose',
76
+ 'triplewise',
77
+ 'unique_everseen',
78
+ 'unique_justseen',
79
+ ]
80
+
81
+ _marker = object()
82
+
83
+
84
+ # zip with strict is available for Python 3.10+
85
+ try:
86
+ zip(strict=True)
87
+ except TypeError:
88
+ _zip_strict = zip
89
+ else:
90
+ _zip_strict = partial(zip, strict=True)
91
+
92
+ # math.sumprod is available for Python 3.12+
93
+ _sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y))
94
+
95
+
96
+ def take(n, iterable):
97
+ """Return first *n* items of the iterable as a list.
98
+
99
+ >>> take(3, range(10))
100
+ [0, 1, 2]
101
+
102
+ If there are fewer than *n* items in the iterable, all of them are
103
+ returned.
104
+
105
+ >>> take(10, range(3))
106
+ [0, 1, 2]
107
+
108
+ """
109
+ return list(islice(iterable, n))
110
+
111
+
112
+ def tabulate(function, start=0):
113
+ """Return an iterator over the results of ``func(start)``,
114
+ ``func(start + 1)``, ``func(start + 2)``...
115
+
116
+ *func* should be a function that accepts one integer argument.
117
+
118
+ If *start* is not specified it defaults to 0. It will be incremented each
119
+ time the iterator is advanced.
120
+
121
+ >>> square = lambda x: x ** 2
122
+ >>> iterator = tabulate(square, -3)
123
+ >>> take(4, iterator)
124
+ [9, 4, 1, 0]
125
+
126
+ """
127
+ return map(function, count(start))
128
+
129
+
130
+ def tail(n, iterable):
131
+ """Return an iterator over the last *n* items of *iterable*.
132
+
133
+ >>> t = tail(3, 'ABCDEFG')
134
+ >>> list(t)
135
+ ['E', 'F', 'G']
136
+
137
+ """
138
+ # If the given iterable has a length, then we can use islice to get its
139
+ # final elements. Note that if the iterable is not actually Iterable,
140
+ # either islice or deque will throw a TypeError. This is why we don't
141
+ # check if it is Iterable.
142
+ if isinstance(iterable, Sized):
143
+ yield from islice(iterable, max(0, len(iterable) - n), None)
144
+ else:
145
+ yield from iter(deque(iterable, maxlen=n))
146
+
147
+
148
+ def consume(iterator, n=None):
149
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
150
+ entirely.
151
+
152
+ Efficiently exhausts an iterator without returning values. Defaults to
153
+ consuming the whole iterator, but an optional second argument may be
154
+ provided to limit consumption.
155
+
156
+ >>> i = (x for x in range(10))
157
+ >>> next(i)
158
+ 0
159
+ >>> consume(i, 3)
160
+ >>> next(i)
161
+ 4
162
+ >>> consume(i)
163
+ >>> next(i)
164
+ Traceback (most recent call last):
165
+ File "<stdin>", line 1, in <module>
166
+ StopIteration
167
+
168
+ If the iterator has fewer items remaining than the provided limit, the
169
+ whole iterator will be consumed.
170
+
171
+ >>> i = (x for x in range(3))
172
+ >>> consume(i, 5)
173
+ >>> next(i)
174
+ Traceback (most recent call last):
175
+ File "<stdin>", line 1, in <module>
176
+ StopIteration
177
+
178
+ """
179
+ # Use functions that consume iterators at C speed.
180
+ if n is None:
181
+ # feed the entire iterator into a zero-length deque
182
+ deque(iterator, maxlen=0)
183
+ else:
184
+ # advance to the empty slice starting at position n
185
+ next(islice(iterator, n, n), None)
186
+
187
+
188
+ def nth(iterable, n, default=None):
189
+ """Returns the nth item or a default value.
190
+
191
+ >>> l = range(10)
192
+ >>> nth(l, 3)
193
+ 3
194
+ >>> nth(l, 20, "zebra")
195
+ 'zebra'
196
+
197
+ """
198
+ return next(islice(iterable, n, None), default)
199
+
200
+
201
+ def all_equal(iterable):
202
+ """
203
+ Returns ``True`` if all the elements are equal to each other.
204
+
205
+ >>> all_equal('aaaa')
206
+ True
207
+ >>> all_equal('aaab')
208
+ False
209
+
210
+ """
211
+ g = groupby(iterable)
212
+ return next(g, True) and not next(g, False)
213
+
214
+
215
+ def quantify(iterable, pred=bool):
216
+ """Return the how many times the predicate is true.
217
+
218
+ >>> quantify([True, False, True])
219
+ 2
220
+
221
+ """
222
+ return sum(map(pred, iterable))
223
+
224
+
225
+ def pad_none(iterable):
226
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
227
+
228
+ >>> take(5, pad_none(range(3)))
229
+ [0, 1, 2, None, None]
230
+
231
+ Useful for emulating the behavior of the built-in :func:`map` function.
232
+
233
+ See also :func:`padded`.
234
+
235
+ """
236
+ return chain(iterable, repeat(None))
237
+
238
+
239
+ padnone = pad_none
240
+
241
+
242
+ def ncycles(iterable, n):
243
+ """Returns the sequence elements *n* times
244
+
245
+ >>> list(ncycles(["a", "b"], 3))
246
+ ['a', 'b', 'a', 'b', 'a', 'b']
247
+
248
+ """
249
+ return chain.from_iterable(repeat(tuple(iterable), n))
250
+
251
+
252
+ def dotproduct(vec1, vec2):
253
+ """Returns the dot product of the two iterables.
254
+
255
+ >>> dotproduct([10, 10], [20, 20])
256
+ 400
257
+
258
+ """
259
+ return sum(map(operator.mul, vec1, vec2))
260
+
261
+
262
+ def flatten(listOfLists):
263
+ """Return an iterator flattening one level of nesting in a list of lists.
264
+
265
+ >>> list(flatten([[0, 1], [2, 3]]))
266
+ [0, 1, 2, 3]
267
+
268
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
269
+
270
+ """
271
+ return chain.from_iterable(listOfLists)
272
+
273
+
274
+ def repeatfunc(func, times=None, *args):
275
+ """Call *func* with *args* repeatedly, returning an iterable over the
276
+ results.
277
+
278
+ If *times* is specified, the iterable will terminate after that many
279
+ repetitions:
280
+
281
+ >>> from operator import add
282
+ >>> times = 4
283
+ >>> args = 3, 5
284
+ >>> list(repeatfunc(add, times, *args))
285
+ [8, 8, 8, 8]
286
+
287
+ If *times* is ``None`` the iterable will not terminate:
288
+
289
+ >>> from random import randrange
290
+ >>> times = None
291
+ >>> args = 1, 11
292
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
293
+ [2, 4, 8, 1, 8, 4]
294
+
295
+ """
296
+ if times is None:
297
+ return starmap(func, repeat(args))
298
+ return starmap(func, repeat(args, times))
299
+
300
+
301
+ def _pairwise(iterable):
302
+ """Returns an iterator of paired items, overlapping, from the original
303
+
304
+ >>> take(4, pairwise(count()))
305
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
306
+
307
+ On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
308
+
309
+ """
310
+ a, b = tee(iterable)
311
+ next(b, None)
312
+ return zip(a, b)
313
+
314
+
315
+ try:
316
+ from itertools import pairwise as itertools_pairwise
317
+ except ImportError:
318
+ pairwise = _pairwise
319
+ else:
320
+
321
+ def pairwise(iterable):
322
+ return itertools_pairwise(iterable)
323
+
324
+ pairwise.__doc__ = _pairwise.__doc__
325
+
326
+
327
+ class UnequalIterablesError(ValueError):
328
+ def __init__(self, details=None):
329
+ msg = 'Iterables have different lengths'
330
+ if details is not None:
331
+ msg += (': index 0 has length {}; index {} has length {}').format(
332
+ *details
333
+ )
334
+
335
+ super().__init__(msg)
336
+
337
+
338
+ def _zip_equal_generator(iterables):
339
+ for combo in zip_longest(*iterables, fillvalue=_marker):
340
+ for val in combo:
341
+ if val is _marker:
342
+ raise UnequalIterablesError()
343
+ yield combo
344
+
345
+
346
+ def _zip_equal(*iterables):
347
+ # Check whether the iterables are all the same size.
348
+ try:
349
+ first_size = len(iterables[0])
350
+ for i, it in enumerate(iterables[1:], 1):
351
+ size = len(it)
352
+ if size != first_size:
353
+ raise UnequalIterablesError(details=(first_size, i, size))
354
+ # All sizes are equal, we can use the built-in zip.
355
+ return zip(*iterables)
356
+ # If any one of the iterables didn't have a length, start reading
357
+ # them until one runs out.
358
+ except TypeError:
359
+ return _zip_equal_generator(iterables)
360
+
361
+
362
+ def grouper(iterable, n, incomplete='fill', fillvalue=None):
363
+ """Group elements from *iterable* into fixed-length groups of length *n*.
364
+
365
+ >>> list(grouper('ABCDEF', 3))
366
+ [('A', 'B', 'C'), ('D', 'E', 'F')]
367
+
368
+ The keyword arguments *incomplete* and *fillvalue* control what happens for
369
+ iterables whose length is not a multiple of *n*.
370
+
371
+ When *incomplete* is `'fill'`, the last group will contain instances of
372
+ *fillvalue*.
373
+
374
+ >>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x'))
375
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
376
+
377
+ When *incomplete* is `'ignore'`, the last group will not be emitted.
378
+
379
+ >>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x'))
380
+ [('A', 'B', 'C'), ('D', 'E', 'F')]
381
+
382
+ When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised.
383
+
384
+ >>> it = grouper('ABCDEFG', 3, incomplete='strict')
385
+ >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
386
+ Traceback (most recent call last):
387
+ ...
388
+ UnequalIterablesError
389
+
390
+ """
391
+ args = [iter(iterable)] * n
392
+ if incomplete == 'fill':
393
+ return zip_longest(*args, fillvalue=fillvalue)
394
+ if incomplete == 'strict':
395
+ return _zip_equal(*args)
396
+ if incomplete == 'ignore':
397
+ return zip(*args)
398
+ else:
399
+ raise ValueError('Expected fill, strict, or ignore')
400
+
401
+
402
+ def roundrobin(*iterables):
403
+ """Yields an item from each iterable, alternating between them.
404
+
405
+ >>> list(roundrobin('ABC', 'D', 'EF'))
406
+ ['A', 'D', 'E', 'B', 'F', 'C']
407
+
408
+ This function produces the same output as :func:`interleave_longest`, but
409
+ may perform better for some inputs (in particular when the number of
410
+ iterables is small).
411
+
412
+ """
413
+ # Recipe credited to George Sakkis
414
+ pending = len(iterables)
415
+ nexts = cycle(iter(it).__next__ for it in iterables)
416
+ while pending:
417
+ try:
418
+ for next in nexts:
419
+ yield next()
420
+ except StopIteration:
421
+ pending -= 1
422
+ nexts = cycle(islice(nexts, pending))
423
+
424
+
425
+ def partition(pred, iterable):
426
+ """
427
+ Returns a 2-tuple of iterables derived from the input iterable.
428
+ The first yields the items that have ``pred(item) == False``.
429
+ The second yields the items that have ``pred(item) == True``.
430
+
431
+ >>> is_odd = lambda x: x % 2 != 0
432
+ >>> iterable = range(10)
433
+ >>> even_items, odd_items = partition(is_odd, iterable)
434
+ >>> list(even_items), list(odd_items)
435
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
436
+
437
+ If *pred* is None, :func:`bool` is used.
438
+
439
+ >>> iterable = [0, 1, False, True, '', ' ']
440
+ >>> false_items, true_items = partition(None, iterable)
441
+ >>> list(false_items), list(true_items)
442
+ ([0, False, ''], [1, True, ' '])
443
+
444
+ """
445
+ if pred is None:
446
+ pred = bool
447
+
448
+ t1, t2, p = tee(iterable, 3)
449
+ p1, p2 = tee(map(pred, p))
450
+ return (compress(t1, map(operator.not_, p1)), compress(t2, p2))
451
+
452
+
453
+ def powerset(iterable):
454
+ """Yields all possible subsets of the iterable.
455
+
456
+ >>> list(powerset([1, 2, 3]))
457
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
458
+
459
+ :func:`powerset` will operate on iterables that aren't :class:`set`
460
+ instances, so repeated elements in the input will produce repeated elements
461
+ in the output. Use :func:`unique_everseen` on the input to avoid generating
462
+ duplicates:
463
+
464
+ >>> seq = [1, 1, 0]
465
+ >>> list(powerset(seq))
466
+ [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
467
+ >>> from more_itertools import unique_everseen
468
+ >>> list(powerset(unique_everseen(seq)))
469
+ [(), (1,), (0,), (1, 0)]
470
+
471
+ """
472
+ s = list(iterable)
473
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
474
+
475
+
476
+ def unique_everseen(iterable, key=None):
477
+ """
478
+ Yield unique elements, preserving order.
479
+
480
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
481
+ ['A', 'B', 'C', 'D']
482
+ >>> list(unique_everseen('ABBCcAD', str.lower))
483
+ ['A', 'B', 'C', 'D']
484
+
485
+ Sequences with a mix of hashable and unhashable items can be used.
486
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
487
+
488
+ Remember that ``list`` objects are unhashable - you can use the *key*
489
+ parameter to transform the list to a tuple (which is hashable) to
490
+ avoid a slowdown.
491
+
492
+ >>> iterable = ([1, 2], [2, 3], [1, 2])
493
+ >>> list(unique_everseen(iterable)) # Slow
494
+ [[1, 2], [2, 3]]
495
+ >>> list(unique_everseen(iterable, key=tuple)) # Faster
496
+ [[1, 2], [2, 3]]
497
+
498
+ Similarly, you may want to convert unhashable ``set`` objects with
499
+ ``key=frozenset``. For ``dict`` objects,
500
+ ``key=lambda x: frozenset(x.items())`` can be used.
501
+
502
+ """
503
+ seenset = set()
504
+ seenset_add = seenset.add
505
+ seenlist = []
506
+ seenlist_add = seenlist.append
507
+ use_key = key is not None
508
+
509
+ for element in iterable:
510
+ k = key(element) if use_key else element
511
+ try:
512
+ if k not in seenset:
513
+ seenset_add(k)
514
+ yield element
515
+ except TypeError:
516
+ if k not in seenlist:
517
+ seenlist_add(k)
518
+ yield element
519
+
520
+
521
+ def unique_justseen(iterable, key=None):
522
+ """Yields elements in order, ignoring serial duplicates
523
+
524
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
525
+ ['A', 'B', 'C', 'D', 'A', 'B']
526
+ >>> list(unique_justseen('ABBCcAD', str.lower))
527
+ ['A', 'B', 'C', 'A', 'D']
528
+
529
+ """
530
+ if key is None:
531
+ return map(operator.itemgetter(0), groupby(iterable))
532
+
533
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
534
+
535
+
536
+ def iter_except(func, exception, first=None):
537
+ """Yields results from a function repeatedly until an exception is raised.
538
+
539
+ Converts a call-until-exception interface to an iterator interface.
540
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
541
+ to end the loop.
542
+
543
+ >>> l = [0, 1, 2]
544
+ >>> list(iter_except(l.pop, IndexError))
545
+ [2, 1, 0]
546
+
547
+ Multiple exceptions can be specified as a stopping condition:
548
+
549
+ >>> l = [1, 2, 3, '...', 4, 5, 6]
550
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
551
+ [7, 6, 5]
552
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
553
+ [4, 3, 2]
554
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
555
+ []
556
+
557
+ """
558
+ try:
559
+ if first is not None:
560
+ yield first()
561
+ while 1:
562
+ yield func()
563
+ except exception:
564
+ pass
565
+
566
+
567
+ def first_true(iterable, default=None, pred=None):
568
+ """
569
+ Returns the first true value in the iterable.
570
+
571
+ If no true value is found, returns *default*
572
+
573
+ If *pred* is not None, returns the first item for which
574
+ ``pred(item) == True`` .
575
+
576
+ >>> first_true(range(10))
577
+ 1
578
+ >>> first_true(range(10), pred=lambda x: x > 5)
579
+ 6
580
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
581
+ 'missing'
582
+
583
+ """
584
+ return next(filter(pred, iterable), default)
585
+
586
+
587
+ def random_product(*args, repeat=1):
588
+ """Draw an item at random from each of the input iterables.
589
+
590
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
591
+ ('c', 3, 'Z')
592
+
593
+ If *repeat* is provided as a keyword argument, that many items will be
594
+ drawn from each iterable.
595
+
596
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
597
+ ('a', 2, 'd', 3)
598
+
599
+ This equivalent to taking a random selection from
600
+ ``itertools.product(*args, **kwarg)``.
601
+
602
+ """
603
+ pools = [tuple(pool) for pool in args] * repeat
604
+ return tuple(choice(pool) for pool in pools)
605
+
606
+
607
+ def random_permutation(iterable, r=None):
608
+ """Return a random *r* length permutation of the elements in *iterable*.
609
+
610
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
611
+ *iterable*.
612
+
613
+ >>> random_permutation(range(5)) # doctest:+SKIP
614
+ (3, 4, 0, 1, 2)
615
+
616
+ This equivalent to taking a random selection from
617
+ ``itertools.permutations(iterable, r)``.
618
+
619
+ """
620
+ pool = tuple(iterable)
621
+ r = len(pool) if r is None else r
622
+ return tuple(sample(pool, r))
623
+
624
+
625
+ def random_combination(iterable, r):
626
+ """Return a random *r* length subsequence of the elements in *iterable*.
627
+
628
+ >>> random_combination(range(5), 3) # doctest:+SKIP
629
+ (2, 3, 4)
630
+
631
+ This equivalent to taking a random selection from
632
+ ``itertools.combinations(iterable, r)``.
633
+
634
+ """
635
+ pool = tuple(iterable)
636
+ n = len(pool)
637
+ indices = sorted(sample(range(n), r))
638
+ return tuple(pool[i] for i in indices)
639
+
640
+
641
+ def random_combination_with_replacement(iterable, r):
642
+ """Return a random *r* length subsequence of elements in *iterable*,
643
+ allowing individual elements to be repeated.
644
+
645
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
646
+ (0, 0, 1, 2, 2)
647
+
648
+ This equivalent to taking a random selection from
649
+ ``itertools.combinations_with_replacement(iterable, r)``.
650
+
651
+ """
652
+ pool = tuple(iterable)
653
+ n = len(pool)
654
+ indices = sorted(randrange(n) for i in range(r))
655
+ return tuple(pool[i] for i in indices)
656
+
657
+
658
+ def nth_combination(iterable, r, index):
659
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
660
+
661
+ The subsequences of *iterable* that are of length *r* can be ordered
662
+ lexicographically. :func:`nth_combination` computes the subsequence at
663
+ sort position *index* directly, without computing the previous
664
+ subsequences.
665
+
666
+ >>> nth_combination(range(5), 3, 5)
667
+ (0, 3, 4)
668
+
669
+ ``ValueError`` will be raised If *r* is negative or greater than the length
670
+ of *iterable*.
671
+ ``IndexError`` will be raised if the given *index* is invalid.
672
+ """
673
+ pool = tuple(iterable)
674
+ n = len(pool)
675
+ if (r < 0) or (r > n):
676
+ raise ValueError
677
+
678
+ c = 1
679
+ k = min(r, n - r)
680
+ for i in range(1, k + 1):
681
+ c = c * (n - k + i) // i
682
+
683
+ if index < 0:
684
+ index += c
685
+
686
+ if (index < 0) or (index >= c):
687
+ raise IndexError
688
+
689
+ result = []
690
+ while r:
691
+ c, n, r = c * r // n, n - 1, r - 1
692
+ while index >= c:
693
+ index -= c
694
+ c, n = c * (n - r) // n, n - 1
695
+ result.append(pool[-1 - n])
696
+
697
+ return tuple(result)
698
+
699
+
700
+ def prepend(value, iterator):
701
+ """Yield *value*, followed by the elements in *iterator*.
702
+
703
+ >>> value = '0'
704
+ >>> iterator = ['1', '2', '3']
705
+ >>> list(prepend(value, iterator))
706
+ ['0', '1', '2', '3']
707
+
708
+ To prepend multiple values, see :func:`itertools.chain`
709
+ or :func:`value_chain`.
710
+
711
+ """
712
+ return chain([value], iterator)
713
+
714
+
715
+ def convolve(signal, kernel):
716
+ """Convolve the iterable *signal* with the iterable *kernel*.
717
+
718
+ >>> signal = (1, 2, 3, 4, 5)
719
+ >>> kernel = [3, 2, 1]
720
+ >>> list(convolve(signal, kernel))
721
+ [3, 8, 14, 20, 26, 14, 5]
722
+
723
+ Note: the input arguments are not interchangeable, as the *kernel*
724
+ is immediately consumed and stored.
725
+
726
+ """
727
+ # This implementation intentionally doesn't match the one in the itertools
728
+ # documentation.
729
+ kernel = tuple(kernel)[::-1]
730
+ n = len(kernel)
731
+ window = deque([0], maxlen=n) * n
732
+ for x in chain(signal, repeat(0, n - 1)):
733
+ window.append(x)
734
+ yield _sumprod(kernel, window)
735
+
736
+
737
+ def before_and_after(predicate, it):
738
+ """A variant of :func:`takewhile` that allows complete access to the
739
+ remainder of the iterator.
740
+
741
+ >>> it = iter('ABCdEfGhI')
742
+ >>> all_upper, remainder = before_and_after(str.isupper, it)
743
+ >>> ''.join(all_upper)
744
+ 'ABC'
745
+ >>> ''.join(remainder) # takewhile() would lose the 'd'
746
+ 'dEfGhI'
747
+
748
+ Note that the first iterator must be fully consumed before the second
749
+ iterator can generate valid results.
750
+ """
751
+ it = iter(it)
752
+ transition = []
753
+
754
+ def true_iterator():
755
+ for elem in it:
756
+ if predicate(elem):
757
+ yield elem
758
+ else:
759
+ transition.append(elem)
760
+ return
761
+
762
+ # Note: this is different from itertools recipes to allow nesting
763
+ # before_and_after remainders into before_and_after again. See tests
764
+ # for an example.
765
+ remainder_iterator = chain(transition, it)
766
+
767
+ return true_iterator(), remainder_iterator
768
+
769
+
770
+ def triplewise(iterable):
771
+ """Return overlapping triplets from *iterable*.
772
+
773
+ >>> list(triplewise('ABCDE'))
774
+ [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
775
+
776
+ """
777
+ for (a, _), (b, c) in pairwise(pairwise(iterable)):
778
+ yield a, b, c
779
+
780
+
781
+ def sliding_window(iterable, n):
782
+ """Return a sliding window of width *n* over *iterable*.
783
+
784
+ >>> list(sliding_window(range(6), 4))
785
+ [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
786
+
787
+ If *iterable* has fewer than *n* items, then nothing is yielded:
788
+
789
+ >>> list(sliding_window(range(3), 4))
790
+ []
791
+
792
+ For a variant with more features, see :func:`windowed`.
793
+ """
794
+ it = iter(iterable)
795
+ window = deque(islice(it, n - 1), maxlen=n)
796
+ for x in it:
797
+ window.append(x)
798
+ yield tuple(window)
799
+
800
+
801
+ def subslices(iterable):
802
+ """Return all contiguous non-empty subslices of *iterable*.
803
+
804
+ >>> list(subslices('ABC'))
805
+ [['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']]
806
+
807
+ This is similar to :func:`substrings`, but emits items in a different
808
+ order.
809
+ """
810
+ seq = list(iterable)
811
+ slices = starmap(slice, combinations(range(len(seq) + 1), 2))
812
+ return map(operator.getitem, repeat(seq), slices)
813
+
814
+
815
+ def polynomial_from_roots(roots):
816
+ """Compute a polynomial's coefficients from its roots.
817
+
818
+ >>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3)
819
+ >>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60
820
+ [1, -4, -17, 60]
821
+ """
822
+ factors = zip(repeat(1), map(operator.neg, roots))
823
+ return list(reduce(convolve, factors, [1]))
824
+
825
+
826
+ def iter_index(iterable, value, start=0, stop=None):
827
+ """Yield the index of each place in *iterable* that *value* occurs,
828
+ beginning with index *start* and ending before index *stop*.
829
+
830
+ See :func:`locate` for a more general means of finding the indexes
831
+ associated with particular values.
832
+
833
+ >>> list(iter_index('AABCADEAF', 'A'))
834
+ [0, 1, 4, 7]
835
+ >>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive
836
+ [1, 4, 7]
837
+ >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive
838
+ [1, 4]
839
+ """
840
+ seq_index = getattr(iterable, 'index', None)
841
+ if seq_index is None:
842
+ # Slow path for general iterables
843
+ it = islice(iterable, start, stop)
844
+ for i, element in enumerate(it, start):
845
+ if element is value or element == value:
846
+ yield i
847
+ else:
848
+ # Fast path for sequences
849
+ stop = len(iterable) if stop is None else stop
850
+ i = start - 1
851
+ try:
852
+ while True:
853
+ yield (i := seq_index(value, i + 1, stop))
854
+ except ValueError:
855
+ pass
856
+
857
+
858
+ def sieve(n):
859
+ """Yield the primes less than n.
860
+
861
+ >>> list(sieve(30))
862
+ [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
863
+ """
864
+ if n > 2:
865
+ yield 2
866
+ start = 3
867
+ data = bytearray((0, 1)) * (n // 2)
868
+ limit = math.isqrt(n) + 1
869
+ for p in iter_index(data, 1, start, limit):
870
+ yield from iter_index(data, 1, start, p * p)
871
+ data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
872
+ start = p * p
873
+ yield from iter_index(data, 1, start)
874
+
875
+
876
+ def _batched(iterable, n, *, strict=False):
877
+ """Batch data into tuples of length *n*. If the number of items in
878
+ *iterable* is not divisible by *n*:
879
+ * The last batch will be shorter if *strict* is ``False``.
880
+ * :exc:`ValueError` will be raised if *strict* is ``True``.
881
+
882
+ >>> list(batched('ABCDEFG', 3))
883
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
884
+
885
+ On Python 3.13 and above, this is an alias for :func:`itertools.batched`.
886
+ """
887
+ if n < 1:
888
+ raise ValueError('n must be at least one')
889
+ it = iter(iterable)
890
+ while batch := tuple(islice(it, n)):
891
+ if strict and len(batch) != n:
892
+ raise ValueError('batched(): incomplete batch')
893
+ yield batch
894
+
895
+
896
+ if hexversion >= 0x30D00A2:
897
+ from itertools import batched as itertools_batched
898
+
899
+ def batched(iterable, n, *, strict=False):
900
+ return itertools_batched(iterable, n, strict=strict)
901
+
902
+ else:
903
+ batched = _batched
904
+
905
+ batched.__doc__ = _batched.__doc__
906
+
907
+
908
+ def transpose(it):
909
+ """Swap the rows and columns of the input matrix.
910
+
911
+ >>> list(transpose([(1, 2, 3), (11, 22, 33)]))
912
+ [(1, 11), (2, 22), (3, 33)]
913
+
914
+ The caller should ensure that the dimensions of the input are compatible.
915
+ If the input is empty, no output will be produced.
916
+ """
917
+ return _zip_strict(*it)
918
+
919
+
920
+ def reshape(matrix, cols):
921
+ """Reshape the 2-D input *matrix* to have a column count given by *cols*.
922
+
923
+ >>> matrix = [(0, 1), (2, 3), (4, 5)]
924
+ >>> cols = 3
925
+ >>> list(reshape(matrix, cols))
926
+ [(0, 1, 2), (3, 4, 5)]
927
+ """
928
+ return batched(chain.from_iterable(matrix), cols)
929
+
930
+
931
+ def matmul(m1, m2):
932
+ """Multiply two matrices.
933
+
934
+ >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
935
+ [(49, 80), (41, 60)]
936
+
937
+ The caller should ensure that the dimensions of the input matrices are
938
+ compatible with each other.
939
+ """
940
+ n = len(m2[0])
941
+ return batched(starmap(_sumprod, product(m1, transpose(m2))), n)
942
+
943
+
944
+ def factor(n):
945
+ """Yield the prime factors of n.
946
+
947
+ >>> list(factor(360))
948
+ [2, 2, 2, 3, 3, 5]
949
+ """
950
+ for prime in sieve(math.isqrt(n) + 1):
951
+ while not n % prime:
952
+ yield prime
953
+ n //= prime
954
+ if n == 1:
955
+ return
956
+ if n > 1:
957
+ yield n
958
+
959
+
960
+ def polynomial_eval(coefficients, x):
961
+ """Evaluate a polynomial at a specific value.
962
+
963
+ Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5:
964
+
965
+ >>> coefficients = [1, -4, -17, 60]
966
+ >>> x = 2.5
967
+ >>> polynomial_eval(coefficients, x)
968
+ 8.125
969
+ """
970
+ n = len(coefficients)
971
+ if n == 0:
972
+ return x * 0 # coerce zero to the type of x
973
+ powers = map(pow, repeat(x), reversed(range(n)))
974
+ return _sumprod(coefficients, powers)
975
+
976
+
977
+ def sum_of_squares(it):
978
+ """Return the sum of the squares of the input values.
979
+
980
+ >>> sum_of_squares([10, 20, 30])
981
+ 1400
982
+ """
983
+ return _sumprod(*tee(it))
984
+
985
+
986
+ def polynomial_derivative(coefficients):
987
+ """Compute the first derivative of a polynomial.
988
+
989
+ Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60
990
+
991
+ >>> coefficients = [1, -4, -17, 60]
992
+ >>> derivative_coefficients = polynomial_derivative(coefficients)
993
+ >>> derivative_coefficients
994
+ [3, -8, -17]
995
+ """
996
+ n = len(coefficients)
997
+ powers = reversed(range(1, n))
998
+ return list(map(operator.mul, coefficients, powers))
999
+
1000
+
1001
+ def totient(n):
1002
+ """Return the count of natural numbers up to *n* that are coprime with *n*.
1003
+
1004
+ >>> totient(9)
1005
+ 6
1006
+ >>> totient(12)
1007
+ 4
1008
+ """
1009
+ for p in unique_justseen(factor(n)):
1010
+ n = n // p * (p - 1)
1011
+
1012
+ return n
env-llmeval/lib/python3.10/site-packages/more_itertools/recipes.pyi ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Stubs for more_itertools.recipes"""
2
+ from __future__ import annotations
3
+
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ Iterable,
8
+ Iterator,
9
+ overload,
10
+ Sequence,
11
+ Type,
12
+ TypeVar,
13
+ )
14
+
15
+ # Type and type variable definitions
16
+ _T = TypeVar('_T')
17
+ _T1 = TypeVar('_T1')
18
+ _T2 = TypeVar('_T2')
19
+ _U = TypeVar('_U')
20
+
21
+ def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
22
+ def tabulate(
23
+ function: Callable[[int], _T], start: int = ...
24
+ ) -> Iterator[_T]: ...
25
+ def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
26
+ def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
27
+ @overload
28
+ def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
29
+ @overload
30
+ def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
31
+ def all_equal(iterable: Iterable[_T]) -> bool: ...
32
+ def quantify(
33
+ iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
34
+ ) -> int: ...
35
+ def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
36
+ def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
37
+ def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
38
+ def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
39
+ def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
40
+ def repeatfunc(
41
+ func: Callable[..., _U], times: int | None = ..., *args: Any
42
+ ) -> Iterator[_U]: ...
43
+ def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ...
44
+ def grouper(
45
+ iterable: Iterable[_T],
46
+ n: int,
47
+ incomplete: str = ...,
48
+ fillvalue: _U = ...,
49
+ ) -> Iterator[tuple[_T | _U, ...]]: ...
50
+ def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
51
+ def partition(
52
+ pred: Callable[[_T], object] | None, iterable: Iterable[_T]
53
+ ) -> tuple[Iterator[_T], Iterator[_T]]: ...
54
+ def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
55
+ def unique_everseen(
56
+ iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
57
+ ) -> Iterator[_T]: ...
58
+ def unique_justseen(
59
+ iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
60
+ ) -> Iterator[_T]: ...
61
+ @overload
62
+ def iter_except(
63
+ func: Callable[[], _T],
64
+ exception: Type[BaseException] | tuple[Type[BaseException], ...],
65
+ first: None = ...,
66
+ ) -> Iterator[_T]: ...
67
+ @overload
68
+ def iter_except(
69
+ func: Callable[[], _T],
70
+ exception: Type[BaseException] | tuple[Type[BaseException], ...],
71
+ first: Callable[[], _U],
72
+ ) -> Iterator[_T | _U]: ...
73
+ @overload
74
+ def first_true(
75
+ iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ...
76
+ ) -> _T | None: ...
77
+ @overload
78
+ def first_true(
79
+ iterable: Iterable[_T],
80
+ default: _U,
81
+ pred: Callable[[_T], object] | None = ...,
82
+ ) -> _T | _U: ...
83
+ def random_product(
84
+ *args: Iterable[_T], repeat: int = ...
85
+ ) -> tuple[_T, ...]: ...
86
+ def random_permutation(
87
+ iterable: Iterable[_T], r: int | None = ...
88
+ ) -> tuple[_T, ...]: ...
89
+ def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ...
90
+ def random_combination_with_replacement(
91
+ iterable: Iterable[_T], r: int
92
+ ) -> tuple[_T, ...]: ...
93
+ def nth_combination(
94
+ iterable: Iterable[_T], r: int, index: int
95
+ ) -> tuple[_T, ...]: ...
96
+ def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ...
97
+ def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
98
+ def before_and_after(
99
+ predicate: Callable[[_T], bool], it: Iterable[_T]
100
+ ) -> tuple[Iterator[_T], Iterator[_T]]: ...
101
+ def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ...
102
+ def sliding_window(
103
+ iterable: Iterable[_T], n: int
104
+ ) -> Iterator[tuple[_T, ...]]: ...
105
+ def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
106
+ def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
107
+ def iter_index(
108
+ iterable: Iterable[_T],
109
+ value: Any,
110
+ start: int | None = ...,
111
+ stop: int | None = ...,
112
+ ) -> Iterator[int]: ...
113
+ def sieve(n: int) -> Iterator[int]: ...
114
+ def batched(
115
+ iterable: Iterable[_T], n: int, *, strict: bool = False
116
+ ) -> Iterator[tuple[_T]]: ...
117
+ def transpose(
118
+ it: Iterable[Iterable[_T]],
119
+ ) -> Iterator[tuple[_T, ...]]: ...
120
+ def reshape(
121
+ matrix: Iterable[Iterable[_T]], cols: int
122
+ ) -> Iterator[tuple[_T, ...]]: ...
123
+ def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
124
+ def factor(n: int) -> Iterator[int]: ...
125
+ def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
126
+ def sum_of_squares(it: Iterable[_T]) -> _T: ...
127
+ def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
128
+ def totient(n: int) -> int: ...
env-llmeval/lib/python3.10/site-packages/multidict/__init__.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Multidict implementation.
2
+
3
+ HTTP Headers and URL query string require specific data structure:
4
+ multidict. It behaves mostly like a dict but it can have
5
+ several values for the same key.
6
+ """
7
+
8
+ from ._abc import MultiMapping, MutableMultiMapping
9
+ from ._compat import USE_EXTENSIONS
10
+
11
+ __all__ = (
12
+ "MultiMapping",
13
+ "MutableMultiMapping",
14
+ "MultiDictProxy",
15
+ "CIMultiDictProxy",
16
+ "MultiDict",
17
+ "CIMultiDict",
18
+ "upstr",
19
+ "istr",
20
+ "getversion",
21
+ )
22
+
23
+ __version__ = "6.0.5"
24
+
25
+
26
+ try:
27
+ if not USE_EXTENSIONS:
28
+ raise ImportError
29
+ from ._multidict import (
30
+ CIMultiDict,
31
+ CIMultiDictProxy,
32
+ MultiDict,
33
+ MultiDictProxy,
34
+ getversion,
35
+ istr,
36
+ )
37
+ except ImportError: # pragma: no cover
38
+ from ._multidict_py import (
39
+ CIMultiDict,
40
+ CIMultiDictProxy,
41
+ MultiDict,
42
+ MultiDictProxy,
43
+ getversion,
44
+ istr,
45
+ )
46
+
47
+
48
+ upstr = istr
env-llmeval/lib/python3.10/site-packages/multidict/__init__.pyi ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ from typing import (
3
+ Generic,
4
+ Iterable,
5
+ Iterator,
6
+ Mapping,
7
+ MutableMapping,
8
+ TypeVar,
9
+ overload,
10
+ )
11
+
12
+ class istr(str): ...
13
+
14
+ upstr = istr
15
+
16
+ _S = str | istr
17
+
18
+ _T = TypeVar("_T")
19
+
20
+ _T_co = TypeVar("_T_co", covariant=True)
21
+
22
+ _D = TypeVar("_D")
23
+
24
+ class MultiMapping(Mapping[_S, _T_co]):
25
+ @overload
26
+ @abc.abstractmethod
27
+ def getall(self, key: _S) -> list[_T_co]: ...
28
+ @overload
29
+ @abc.abstractmethod
30
+ def getall(self, key: _S, default: _D) -> list[_T_co] | _D: ...
31
+ @overload
32
+ @abc.abstractmethod
33
+ def getone(self, key: _S) -> _T_co: ...
34
+ @overload
35
+ @abc.abstractmethod
36
+ def getone(self, key: _S, default: _D) -> _T_co | _D: ...
37
+
38
+ _Arg = (
39
+ Mapping[str, _T]
40
+ | Mapping[istr, _T]
41
+ | dict[str, _T]
42
+ | dict[istr, _T]
43
+ | MultiMapping[_T]
44
+ | Iterable[tuple[str, _T]]
45
+ | Iterable[tuple[istr, _T]]
46
+ )
47
+
48
+ class MutableMultiMapping(MultiMapping[_T], MutableMapping[_S, _T], Generic[_T]):
49
+ @abc.abstractmethod
50
+ def add(self, key: _S, value: _T) -> None: ...
51
+ @abc.abstractmethod
52
+ def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
53
+ @overload
54
+ @abc.abstractmethod
55
+ def popone(self, key: _S) -> _T: ...
56
+ @overload
57
+ @abc.abstractmethod
58
+ def popone(self, key: _S, default: _D) -> _T | _D: ...
59
+ @overload
60
+ @abc.abstractmethod
61
+ def popall(self, key: _S) -> list[_T]: ...
62
+ @overload
63
+ @abc.abstractmethod
64
+ def popall(self, key: _S, default: _D) -> list[_T] | _D: ...
65
+
66
+ class MultiDict(MutableMultiMapping[_T], Generic[_T]):
67
+ def __init__(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
68
+ def copy(self) -> MultiDict[_T]: ...
69
+ def __getitem__(self, k: _S) -> _T: ...
70
+ def __setitem__(self, k: _S, v: _T) -> None: ...
71
+ def __delitem__(self, v: _S) -> None: ...
72
+ def __iter__(self) -> Iterator[_S]: ...
73
+ def __len__(self) -> int: ...
74
+ @overload
75
+ def getall(self, key: _S) -> list[_T]: ...
76
+ @overload
77
+ def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
78
+ @overload
79
+ def getone(self, key: _S) -> _T: ...
80
+ @overload
81
+ def getone(self, key: _S, default: _D) -> _T | _D: ...
82
+ def add(self, key: _S, value: _T) -> None: ...
83
+ def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
84
+ @overload
85
+ def popone(self, key: _S) -> _T: ...
86
+ @overload
87
+ def popone(self, key: _S, default: _D) -> _T | _D: ...
88
+ @overload
89
+ def popall(self, key: _S) -> list[_T]: ...
90
+ @overload
91
+ def popall(self, key: _S, default: _D) -> list[_T] | _D: ...
92
+
93
+ class CIMultiDict(MutableMultiMapping[_T], Generic[_T]):
94
+ def __init__(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
95
+ def copy(self) -> CIMultiDict[_T]: ...
96
+ def __getitem__(self, k: _S) -> _T: ...
97
+ def __setitem__(self, k: _S, v: _T) -> None: ...
98
+ def __delitem__(self, v: _S) -> None: ...
99
+ def __iter__(self) -> Iterator[_S]: ...
100
+ def __len__(self) -> int: ...
101
+ @overload
102
+ def getall(self, key: _S) -> list[_T]: ...
103
+ @overload
104
+ def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
105
+ @overload
106
+ def getone(self, key: _S) -> _T: ...
107
+ @overload
108
+ def getone(self, key: _S, default: _D) -> _T | _D: ...
109
+ def add(self, key: _S, value: _T) -> None: ...
110
+ def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
111
+ @overload
112
+ def popone(self, key: _S) -> _T: ...
113
+ @overload
114
+ def popone(self, key: _S, default: _D) -> _T | _D: ...
115
+ @overload
116
+ def popall(self, key: _S) -> list[_T]: ...
117
+ @overload
118
+ def popall(self, key: _S, default: _D) -> list[_T] | _D: ...
119
+
120
+ class MultiDictProxy(MultiMapping[_T], Generic[_T]):
121
+ def __init__(self, arg: MultiMapping[_T] | MutableMultiMapping[_T]) -> None: ...
122
+ def copy(self) -> MultiDict[_T]: ...
123
+ def __getitem__(self, k: _S) -> _T: ...
124
+ def __iter__(self) -> Iterator[_S]: ...
125
+ def __len__(self) -> int: ...
126
+ @overload
127
+ def getall(self, key: _S) -> list[_T]: ...
128
+ @overload
129
+ def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
130
+ @overload
131
+ def getone(self, key: _S) -> _T: ...
132
+ @overload
133
+ def getone(self, key: _S, default: _D) -> _T | _D: ...
134
+
135
+ class CIMultiDictProxy(MultiMapping[_T], Generic[_T]):
136
+ def __init__(self, arg: MultiMapping[_T] | MutableMultiMapping[_T]) -> None: ...
137
+ def __getitem__(self, k: _S) -> _T: ...
138
+ def __iter__(self) -> Iterator[_S]: ...
139
+ def __len__(self) -> int: ...
140
+ @overload
141
+ def getall(self, key: _S) -> list[_T]: ...
142
+ @overload
143
+ def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
144
+ @overload
145
+ def getone(self, key: _S) -> _T: ...
146
+ @overload
147
+ def getone(self, key: _S, default: _D) -> _T | _D: ...
148
+ def copy(self) -> CIMultiDict[_T]: ...
149
+
150
+ def getversion(
151
+ md: MultiDict[_T] | CIMultiDict[_T] | MultiDictProxy[_T] | CIMultiDictProxy[_T],
152
+ ) -> int: ...
env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (857 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_abc.cpython-310.pyc ADDED
Binary file (1.94 kB). View file
 
env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_compat.cpython-310.pyc ADDED
Binary file (466 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_multidict_base.cpython-310.pyc ADDED
Binary file (3.26 kB). View file
 
env-llmeval/lib/python3.10/site-packages/multidict/__pycache__/_multidict_py.cpython-310.pyc ADDED
Binary file (16.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/multidict/_abc.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ import sys
3
+ import types
4
+ from collections.abc import Mapping, MutableMapping
5
+
6
+
7
+ class _TypingMeta(abc.ABCMeta):
8
+ # A fake metaclass to satisfy typing deps in runtime
9
+ # basically MultiMapping[str] and other generic-like type instantiations
10
+ # are emulated.
11
+ # Note: real type hints are provided by __init__.pyi stub file
12
+ if sys.version_info >= (3, 9):
13
+
14
+ def __getitem__(self, key):
15
+ return types.GenericAlias(self, key)
16
+
17
+ else:
18
+
19
+ def __getitem__(self, key):
20
+ return self
21
+
22
+
23
+ class MultiMapping(Mapping, metaclass=_TypingMeta):
24
+ @abc.abstractmethod
25
+ def getall(self, key, default=None):
26
+ raise KeyError
27
+
28
+ @abc.abstractmethod
29
+ def getone(self, key, default=None):
30
+ raise KeyError
31
+
32
+
33
+ class MutableMultiMapping(MultiMapping, MutableMapping):
34
+ @abc.abstractmethod
35
+ def add(self, key, value):
36
+ raise NotImplementedError
37
+
38
+ @abc.abstractmethod
39
+ def extend(self, *args, **kwargs):
40
+ raise NotImplementedError
41
+
42
+ @abc.abstractmethod
43
+ def popone(self, key, default=None):
44
+ raise KeyError
45
+
46
+ @abc.abstractmethod
47
+ def popall(self, key, default=None):
48
+ raise KeyError
env-llmeval/lib/python3.10/site-packages/multidict/_compat.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import platform
3
+
4
+ NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS"))
5
+
6
+ PYPY = platform.python_implementation() == "PyPy"
7
+
8
+ USE_EXTENSIONS = not NO_EXTENSIONS and not PYPY
9
+
10
+ if USE_EXTENSIONS:
11
+ try:
12
+ from . import _multidict # noqa
13
+ except ImportError:
14
+ USE_EXTENSIONS = False