applied-ai-018 commited on
Commit
846f311
·
verified ·
1 Parent(s): 54444cc

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llmeval-env/lib/python3.10/site-packages/async_timeout-4.0.3.dist-info/METADATA +131 -0
  2. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-310.pyc +0 -0
  3. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-310.pyc +0 -0
  4. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_common.cpython-310.pyc +0 -0
  5. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_templating.cpython-310.pyc +0 -0
  6. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_types.cpython-310.pyc +0 -0
  7. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-310.pyc +0 -0
  8. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-310.pyc +0 -0
  9. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-310.pyc +0 -0
  10. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-310.pyc +0 -0
  11. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-310.pyc +0 -0
  12. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-310.pyc +0 -0
  13. llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-310.pyc +0 -0
  14. llmeval-env/lib/python3.10/site-packages/huggingface_hub/templates/datasetcard_template.md +143 -0
  15. llmeval-env/lib/python3.10/site-packages/huggingface_hub/templates/modelcard_template.md +200 -0
  16. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  17. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-310.pyc +0 -0
  18. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-310.pyc +0 -0
  19. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_errors.cpython-310.pyc +0 -0
  20. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_git_credential.cpython-310.pyc +0 -0
  21. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-310.pyc +0 -0
  22. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_pagination.cpython-310.pyc +0 -0
  23. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-310.pyc +0 -0
  24. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-310.pyc +0 -0
  25. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-310.pyc +0 -0
  26. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-310.pyc +0 -0
  27. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/logging.cpython-310.pyc +0 -0
  28. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_cache_assets.py +135 -0
  29. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_datetime.py +62 -0
  30. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_hf_folder.py +96 -0
  31. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_runtime.py +382 -0
  32. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_safetensors.py +111 -0
  33. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_typing.py +50 -0
  34. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/logging.py +182 -0
  35. llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/sha.py +29 -0
  36. llmeval-env/lib/python3.10/site-packages/nltk/lm/__pycache__/__init__.cpython-310.pyc +0 -0
  37. llmeval-env/lib/python3.10/site-packages/nltk/lm/__pycache__/util.cpython-310.pyc +0 -0
  38. llmeval-env/lib/python3.10/site-packages/nltk/lm/__pycache__/vocabulary.cpython-310.pyc +0 -0
  39. llmeval-env/lib/python3.10/site-packages/scipy/__config__.py +161 -0
  40. llmeval-env/lib/python3.10/site-packages/scipy/__init__.py +141 -0
  41. llmeval-env/lib/python3.10/site-packages/scipy/_distributor_init.py +18 -0
  42. llmeval-env/lib/python3.10/site-packages/scipy/cluster/__init__.py +31 -0
  43. llmeval-env/lib/python3.10/site-packages/scipy/cluster/_hierarchy.cpython-310-x86_64-linux-gnu.so +0 -0
  44. llmeval-env/lib/python3.10/site-packages/scipy/cluster/_optimal_leaf_ordering.cpython-310-x86_64-linux-gnu.so +0 -0
  45. llmeval-env/lib/python3.10/site-packages/scipy/cluster/_vq.cpython-310-x86_64-linux-gnu.so +0 -0
  46. llmeval-env/lib/python3.10/site-packages/scipy/cluster/hierarchy.py +0 -0
  47. llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__init__.py +0 -0
  48. llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  49. llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__pycache__/test_disjoint_set.cpython-310.pyc +0 -0
  50. llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__pycache__/test_hierarchy.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/async_timeout-4.0.3.dist-info/METADATA ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: async-timeout
3
+ Version: 4.0.3
4
+ Summary: Timeout context manager for asyncio programs
5
+ Home-page: https://github.com/aio-libs/async-timeout
6
+ Author: Andrew Svetlov <[email protected]>
7
+ Author-email: [email protected]
8
+ License: Apache 2
9
+ Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
10
+ Project-URL: CI: GitHub Actions, https://github.com/aio-libs/async-timeout/actions
11
+ Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/async-timeout
12
+ Project-URL: GitHub: issues, https://github.com/aio-libs/async-timeout/issues
13
+ Project-URL: GitHub: repo, https://github.com/aio-libs/async-timeout
14
+ Classifier: Development Status :: 5 - Production/Stable
15
+ Classifier: Topic :: Software Development :: Libraries
16
+ Classifier: Framework :: AsyncIO
17
+ Classifier: Intended Audience :: Developers
18
+ Classifier: License :: OSI Approved :: Apache Software License
19
+ Classifier: Programming Language :: Python
20
+ Classifier: Programming Language :: Python :: 3
21
+ Classifier: Programming Language :: Python :: 3 :: Only
22
+ Classifier: Programming Language :: Python :: 3.7
23
+ Classifier: Programming Language :: Python :: 3.8
24
+ Classifier: Programming Language :: Python :: 3.9
25
+ Classifier: Programming Language :: Python :: 3.10
26
+ Classifier: Programming Language :: Python :: 3.11
27
+ Requires-Python: >=3.7
28
+ Description-Content-Type: text/x-rst
29
+ License-File: LICENSE
30
+ Requires-Dist: typing-extensions >=3.6.5 ; python_version < "3.8"
31
+
32
+ async-timeout
33
+ =============
34
+ .. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master
35
+ :target: https://travis-ci.com/aio-libs/async-timeout
36
+ .. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg
37
+ :target: https://codecov.io/gh/aio-libs/async-timeout
38
+ .. image:: https://img.shields.io/pypi/v/async-timeout.svg
39
+ :target: https://pypi.python.org/pypi/async-timeout
40
+ .. image:: https://badges.gitter.im/Join%20Chat.svg
41
+ :target: https://gitter.im/aio-libs/Lobby
42
+ :alt: Chat on Gitter
43
+
44
+ asyncio-compatible timeout context manager.
45
+
46
+
47
+ Usage example
48
+ -------------
49
+
50
+
51
+ The context manager is useful in cases when you want to apply timeout
52
+ logic around block of code or in cases when ``asyncio.wait_for()`` is
53
+ not suitable. Also it's much faster than ``asyncio.wait_for()``
54
+ because ``timeout`` doesn't create a new task.
55
+
56
+ The ``timeout(delay, *, loop=None)`` call returns a context manager
57
+ that cancels a block on *timeout* expiring::
58
+
59
+ from async_timeout import timeout
60
+ async with timeout(1.5):
61
+ await inner()
62
+
63
+ 1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing
64
+ happens.
65
+ 2. Otherwise ``inner()`` is cancelled internally by sending
66
+ ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is
67
+ raised outside of context manager scope.
68
+
69
+ *timeout* parameter could be ``None`` for skipping timeout functionality.
70
+
71
+
72
+ Alternatively, ``timeout_at(when)`` can be used for scheduling
73
+ at the absolute time::
74
+
75
+ loop = asyncio.get_event_loop()
76
+ now = loop.time()
77
+
78
+ async with timeout_at(now + 1.5):
79
+ await inner()
80
+
81
+
82
+ Please note: it is not POSIX time but a time with
83
+ undefined starting base, e.g. the time of the system power on.
84
+
85
+
86
+ Context manager has ``.expired`` property for check if timeout happens
87
+ exactly in context manager::
88
+
89
+ async with timeout(1.5) as cm:
90
+ await inner()
91
+ print(cm.expired)
92
+
93
+ The property is ``True`` if ``inner()`` execution is cancelled by
94
+ timeout context manager.
95
+
96
+ If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired``
97
+ is ``False``.
98
+
99
+ The scheduled deadline time is available as ``.deadline`` property::
100
+
101
+ async with timeout(1.5) as cm:
102
+ cm.deadline
103
+
104
+ Not finished yet timeout can be rescheduled by ``shift_by()``
105
+ or ``shift_to()`` methods::
106
+
107
+ async with timeout(1.5) as cm:
108
+ cm.shift(1) # add another second on waiting
109
+ cm.update(loop.time() + 5) # reschedule to now+5 seconds
110
+
111
+ Rescheduling is forbidden if the timeout is expired or after exit from ``async with``
112
+ code block.
113
+
114
+
115
+ Installation
116
+ ------------
117
+
118
+ ::
119
+
120
+ $ pip install async-timeout
121
+
122
+ The library is Python 3 only!
123
+
124
+
125
+
126
+ Authors and License
127
+ -------------------
128
+
129
+ The module is written by Andrew Svetlov.
130
+
131
+ It's *Apache 2* licensed and freely available.
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (198 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_client.cpython-310.pyc ADDED
Binary file (98.7 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_common.cpython-310.pyc ADDED
Binary file (12.3 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_templating.cpython-310.pyc ADDED
Binary file (3.72 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/__pycache__/_types.cpython-310.pyc ADDED
Binary file (1.51 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/base.cpython-310.pyc ADDED
Binary file (4.61 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-310.pyc ADDED
Binary file (7.05 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-310.pyc ADDED
Binary file (724 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-310.pyc ADDED
Binary file (1.42 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-310.pyc ADDED
Binary file (1.62 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-310.pyc ADDED
Binary file (983 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-310.pyc ADDED
Binary file (1.43 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/templates/datasetcard_template.md ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ # For reference on dataset card metadata, see the spec: https://github.com/huggingface/hub-docs/blob/main/datasetcard.md?plain=1
3
+ # Doc / guide: https://huggingface.co/docs/hub/datasets-cards
4
+ {{ card_data }}
5
+ ---
6
+
7
+ # Dataset Card for {{ pretty_name | default("Dataset Name", true) }}
8
+
9
+ <!-- Provide a quick summary of the dataset. -->
10
+
11
+ {{ dataset_summary | default("", true) }}
12
+
13
+ ## Dataset Details
14
+
15
+ ### Dataset Description
16
+
17
+ <!-- Provide a longer summary of what this dataset is. -->
18
+
19
+ {{ dataset_description | default("", true) }}
20
+
21
+ - **Curated by:** {{ curators | default("[More Information Needed]", true)}}
22
+ - **Funded by [optional]:** {{ funded_by | default("[More Information Needed]", true)}}
23
+ - **Shared by [optional]:** {{ shared_by | default("[More Information Needed]", true)}}
24
+ - **Language(s) (NLP):** {{ language | default("[More Information Needed]", true)}}
25
+ - **License:** {{ license | default("[More Information Needed]", true)}}
26
+
27
+ ### Dataset Sources [optional]
28
+
29
+ <!-- Provide the basic links for the dataset. -->
30
+
31
+ - **Repository:** {{ repo | default("[More Information Needed]", true)}}
32
+ - **Paper [optional]:** {{ paper | default("[More Information Needed]", true)}}
33
+ - **Demo [optional]:** {{ demo | default("[More Information Needed]", true)}}
34
+
35
+ ## Uses
36
+
37
+ <!-- Address questions around how the dataset is intended to be used. -->
38
+
39
+ ### Direct Use
40
+
41
+ <!-- This section describes suitable use cases for the dataset. -->
42
+
43
+ {{ direct_use | default("[More Information Needed]", true)}}
44
+
45
+ ### Out-of-Scope Use
46
+
47
+ <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
48
+
49
+ {{ out_of_scope_use | default("[More Information Needed]", true)}}
50
+
51
+ ## Dataset Structure
52
+
53
+ <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
54
+
55
+ {{ dataset_structure | default("[More Information Needed]", true)}}
56
+
57
+ ## Dataset Creation
58
+
59
+ ### Curation Rationale
60
+
61
+ <!-- Motivation for the creation of this dataset. -->
62
+
63
+ {{ curation_rationale_section | default("[More Information Needed]", true)}}
64
+
65
+ ### Source Data
66
+
67
+ <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
68
+
69
+ #### Data Collection and Processing
70
+
71
+ <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
72
+
73
+ {{ data_collection_and_processing_section | default("[More Information Needed]", true)}}
74
+
75
+ #### Who are the source data producers?
76
+
77
+ <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
78
+
79
+ {{ source_data_producers_section | default("[More Information Needed]", true)}}
80
+
81
+ ### Annotations [optional]
82
+
83
+ <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
84
+
85
+ #### Annotation process
86
+
87
+ <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
88
+
89
+ {{ annotation_process_section | default("[More Information Needed]", true)}}
90
+
91
+ #### Who are the annotators?
92
+
93
+ <!-- This section describes the people or systems who created the annotations. -->
94
+
95
+ {{ who_are_annotators_section | default("[More Information Needed]", true)}}
96
+
97
+ #### Personal and Sensitive Information
98
+
99
+ <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
100
+
101
+ {{ personal_and_sensitive_information | default("[More Information Needed]", true)}}
102
+
103
+ ## Bias, Risks, and Limitations
104
+
105
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
106
+
107
+ {{ bias_risks_limitations | default("[More Information Needed]", true)}}
108
+
109
+ ### Recommendations
110
+
111
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
112
+
113
+ {{ bias_recommendations | default("Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.", true)}}
114
+
115
+ ## Citation [optional]
116
+
117
+ <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
118
+
119
+ **BibTeX:**
120
+
121
+ {{ citation_bibtex | default("[More Information Needed]", true)}}
122
+
123
+ **APA:**
124
+
125
+ {{ citation_apa | default("[More Information Needed]", true)}}
126
+
127
+ ## Glossary [optional]
128
+
129
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
130
+
131
+ {{ glossary | default("[More Information Needed]", true)}}
132
+
133
+ ## More Information [optional]
134
+
135
+ {{ more_information | default("[More Information Needed]", true)}}
136
+
137
+ ## Dataset Card Authors [optional]
138
+
139
+ {{ dataset_card_authors | default("[More Information Needed]", true)}}
140
+
141
+ ## Dataset Card Contact
142
+
143
+ {{ dataset_card_contact | default("[More Information Needed]", true)}}
llmeval-env/lib/python3.10/site-packages/huggingface_hub/templates/modelcard_template.md ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ # For reference on model card metadata, see the spec: https://github.com/huggingface/hub-docs/blob/main/modelcard.md?plain=1
3
+ # Doc / guide: https://huggingface.co/docs/hub/model-cards
4
+ {{ card_data }}
5
+ ---
6
+
7
+ # Model Card for {{ model_id | default("Model ID", true) }}
8
+
9
+ <!-- Provide a quick summary of what the model is/does. -->
10
+
11
+ {{ model_summary | default("", true) }}
12
+
13
+ ## Model Details
14
+
15
+ ### Model Description
16
+
17
+ <!-- Provide a longer summary of what this model is. -->
18
+
19
+ {{ model_description | default("", true) }}
20
+
21
+ - **Developed by:** {{ developers | default("[More Information Needed]", true)}}
22
+ - **Funded by [optional]:** {{ funded_by | default("[More Information Needed]", true)}}
23
+ - **Shared by [optional]:** {{ shared_by | default("[More Information Needed]", true)}}
24
+ - **Model type:** {{ model_type | default("[More Information Needed]", true)}}
25
+ - **Language(s) (NLP):** {{ language | default("[More Information Needed]", true)}}
26
+ - **License:** {{ license | default("[More Information Needed]", true)}}
27
+ - **Finetuned from model [optional]:** {{ base_model | default("[More Information Needed]", true)}}
28
+
29
+ ### Model Sources [optional]
30
+
31
+ <!-- Provide the basic links for the model. -->
32
+
33
+ - **Repository:** {{ repo | default("[More Information Needed]", true)}}
34
+ - **Paper [optional]:** {{ paper | default("[More Information Needed]", true)}}
35
+ - **Demo [optional]:** {{ demo | default("[More Information Needed]", true)}}
36
+
37
+ ## Uses
38
+
39
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
40
+
41
+ ### Direct Use
42
+
43
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
44
+
45
+ {{ direct_use | default("[More Information Needed]", true)}}
46
+
47
+ ### Downstream Use [optional]
48
+
49
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
50
+
51
+ {{ downstream_use | default("[More Information Needed]", true)}}
52
+
53
+ ### Out-of-Scope Use
54
+
55
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
56
+
57
+ {{ out_of_scope_use | default("[More Information Needed]", true)}}
58
+
59
+ ## Bias, Risks, and Limitations
60
+
61
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
62
+
63
+ {{ bias_risks_limitations | default("[More Information Needed]", true)}}
64
+
65
+ ### Recommendations
66
+
67
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
68
+
69
+ {{ bias_recommendations | default("Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.", true)}}
70
+
71
+ ## How to Get Started with the Model
72
+
73
+ Use the code below to get started with the model.
74
+
75
+ {{ get_started_code | default("[More Information Needed]", true)}}
76
+
77
+ ## Training Details
78
+
79
+ ### Training Data
80
+
81
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
82
+
83
+ {{ training_data | default("[More Information Needed]", true)}}
84
+
85
+ ### Training Procedure
86
+
87
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
88
+
89
+ #### Preprocessing [optional]
90
+
91
+ {{ preprocessing | default("[More Information Needed]", true)}}
92
+
93
+
94
+ #### Training Hyperparameters
95
+
96
+ - **Training regime:** {{ training_regime | default("[More Information Needed]", true)}} <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
97
+
98
+ #### Speeds, Sizes, Times [optional]
99
+
100
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
101
+
102
+ {{ speeds_sizes_times | default("[More Information Needed]", true)}}
103
+
104
+ ## Evaluation
105
+
106
+ <!-- This section describes the evaluation protocols and provides the results. -->
107
+
108
+ ### Testing Data, Factors & Metrics
109
+
110
+ #### Testing Data
111
+
112
+ <!-- This should link to a Dataset Card if possible. -->
113
+
114
+ {{ testing_data | default("[More Information Needed]", true)}}
115
+
116
+ #### Factors
117
+
118
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
119
+
120
+ {{ testing_factors | default("[More Information Needed]", true)}}
121
+
122
+ #### Metrics
123
+
124
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
125
+
126
+ {{ testing_metrics | default("[More Information Needed]", true)}}
127
+
128
+ ### Results
129
+
130
+ {{ results | default("[More Information Needed]", true)}}
131
+
132
+ #### Summary
133
+
134
+ {{ results_summary | default("", true) }}
135
+
136
+ ## Model Examination [optional]
137
+
138
+ <!-- Relevant interpretability work for the model goes here -->
139
+
140
+ {{ model_examination | default("[More Information Needed]", true)}}
141
+
142
+ ## Environmental Impact
143
+
144
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
145
+
146
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
147
+
148
+ - **Hardware Type:** {{ hardware_type | default("[More Information Needed]", true)}}
149
+ - **Hours used:** {{ hours_used | default("[More Information Needed]", true)}}
150
+ - **Cloud Provider:** {{ cloud_provider | default("[More Information Needed]", true)}}
151
+ - **Compute Region:** {{ cloud_region | default("[More Information Needed]", true)}}
152
+ - **Carbon Emitted:** {{ co2_emitted | default("[More Information Needed]", true)}}
153
+
154
+ ## Technical Specifications [optional]
155
+
156
+ ### Model Architecture and Objective
157
+
158
+ {{ model_specs | default("[More Information Needed]", true)}}
159
+
160
+ ### Compute Infrastructure
161
+
162
+ {{ compute_infrastructure | default("[More Information Needed]", true)}}
163
+
164
+ #### Hardware
165
+
166
+ {{ hardware_requirements | default("[More Information Needed]", true)}}
167
+
168
+ #### Software
169
+
170
+ {{ software | default("[More Information Needed]", true)}}
171
+
172
+ ## Citation [optional]
173
+
174
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
175
+
176
+ **BibTeX:**
177
+
178
+ {{ citation_bibtex | default("[More Information Needed]", true)}}
179
+
180
+ **APA:**
181
+
182
+ {{ citation_apa | default("[More Information Needed]", true)}}
183
+
184
+ ## Glossary [optional]
185
+
186
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
187
+
188
+ {{ glossary | default("[More Information Needed]", true)}}
189
+
190
+ ## More Information [optional]
191
+
192
+ {{ more_information | default("[More Information Needed]", true)}}
193
+
194
+ ## Model Card Authors [optional]
195
+
196
+ {{ model_card_authors | default("[More Information Needed]", true)}}
197
+
198
+ ## Model Card Contact
199
+
200
+ {{ model_card_contact | default("[More Information Needed]", true)}}
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (3.67 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_chunk_utils.cpython-310.pyc ADDED
Binary file (1.73 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_datetime.cpython-310.pyc ADDED
Binary file (1.75 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_errors.cpython-310.pyc ADDED
Binary file (13.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_git_credential.cpython-310.pyc ADDED
Binary file (4.13 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_http.cpython-310.pyc ADDED
Binary file (11.3 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_pagination.cpython-310.pyc ADDED
Binary file (1.44 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_subprocess.cpython-310.pyc ADDED
Binary file (3.94 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_telemetry.cpython-310.pyc ADDED
Binary file (4.41 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/_typing.cpython-310.pyc ADDED
Binary file (1.73 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-310.pyc ADDED
Binary file (428 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/__pycache__/logging.cpython-310.pyc ADDED
Binary file (4.65 kB). View file
 
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_cache_assets.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2019-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ from pathlib import Path
16
+ from typing import Union
17
+
18
+ from ..constants import HF_ASSETS_CACHE
19
+
20
+
21
+ def cached_assets_path(
22
+ library_name: str,
23
+ namespace: str = "default",
24
+ subfolder: str = "default",
25
+ *,
26
+ assets_dir: Union[str, Path, None] = None,
27
+ ):
28
+ """Return a folder path to cache arbitrary files.
29
+
30
+ `huggingface_hub` provides a canonical folder path to store assets. This is the
31
+ recommended way to integrate cache in a downstream library as it will benefit from
32
+ the builtins tools to scan and delete the cache properly.
33
+
34
+ The distinction is made between files cached from the Hub and assets. Files from the
35
+ Hub are cached in a git-aware manner and entirely managed by `huggingface_hub`. See
36
+ [related documentation](https://huggingface.co/docs/huggingface_hub/how-to-cache).
37
+ All other files that a downstream library caches are considered to be "assets"
38
+ (files downloaded from external sources, extracted from a .tar archive, preprocessed
39
+ for training,...).
40
+
41
+ Once the folder path is generated, it is guaranteed to exist and to be a directory.
42
+ The path is based on 3 levels of depth: the library name, a namespace and a
43
+ subfolder. Those 3 levels grants flexibility while allowing `huggingface_hub` to
44
+ expect folders when scanning/deleting parts of the assets cache. Within a library,
45
+ it is expected that all namespaces share the same subset of subfolder names but this
46
+ is not a mandatory rule. The downstream library has then full control on which file
47
+ structure to adopt within its cache. Namespace and subfolder are optional (would
48
+ default to a `"default/"` subfolder) but library name is mandatory as we want every
49
+ downstream library to manage its own cache.
50
+
51
+ Expected tree:
52
+ ```text
53
+ assets/
54
+ └── datasets/
55
+ │ ├── SQuAD/
56
+ │ │ ├── downloaded/
57
+ │ │ ├── extracted/
58
+ │ │ └── processed/
59
+ │ ├── Helsinki-NLP--tatoeba_mt/
60
+ │ ├── downloaded/
61
+ │ ├── extracted/
62
+ │ └── processed/
63
+ └── transformers/
64
+ ├── default/
65
+ │ ├── something/
66
+ ├── bert-base-cased/
67
+ │ ├── default/
68
+ │ └── training/
69
+ hub/
70
+ └── models--julien-c--EsperBERTo-small/
71
+ ├── blobs/
72
+ │ ├── (...)
73
+ │ ├── (...)
74
+ ├── refs/
75
+ │ └── (...)
76
+ └── [ 128] snapshots/
77
+ ├── 2439f60ef33a0d46d85da5001d52aeda5b00ce9f/
78
+ │ ├── (...)
79
+ └── bbc77c8132af1cc5cf678da3f1ddf2de43606d48/
80
+ └── (...)
81
+ ```
82
+
83
+
84
+ Args:
85
+ library_name (`str`):
86
+ Name of the library that will manage the cache folder. Example: `"dataset"`.
87
+ namespace (`str`, *optional*, defaults to "default"):
88
+ Namespace to which the data belongs. Example: `"SQuAD"`.
89
+ subfolder (`str`, *optional*, defaults to "default"):
90
+ Subfolder in which the data will be stored. Example: `extracted`.
91
+ assets_dir (`str`, `Path`, *optional*):
92
+ Path to the folder where assets are cached. This must not be the same folder
93
+ where Hub files are cached. Defaults to `HF_HOME / "assets"` if not provided.
94
+ Can also be set with `HF_ASSETS_CACHE` environment variable.
95
+
96
+ Returns:
97
+ Path to the cache folder (`Path`).
98
+
99
+ Example:
100
+ ```py
101
+ >>> from huggingface_hub import cached_assets_path
102
+
103
+ >>> cached_assets_path(library_name="datasets", namespace="SQuAD", subfolder="download")
104
+ PosixPath('/home/wauplin/.cache/huggingface/extra/datasets/SQuAD/download')
105
+
106
+ >>> cached_assets_path(library_name="datasets", namespace="SQuAD", subfolder="extracted")
107
+ PosixPath('/home/wauplin/.cache/huggingface/extra/datasets/SQuAD/extracted')
108
+
109
+ >>> cached_assets_path(library_name="datasets", namespace="Helsinki-NLP/tatoeba_mt")
110
+ PosixPath('/home/wauplin/.cache/huggingface/extra/datasets/Helsinki-NLP--tatoeba_mt/default')
111
+
112
+ >>> cached_assets_path(library_name="datasets", assets_dir="/tmp/tmp123456")
113
+ PosixPath('/tmp/tmp123456/datasets/default/default')
114
+ ```
115
+ """
116
+ # Resolve assets_dir
117
+ if assets_dir is None:
118
+ assets_dir = HF_ASSETS_CACHE
119
+ assets_dir = Path(assets_dir).expanduser().resolve()
120
+
121
+ # Avoid names that could create path issues
122
+ for part in (" ", "/", "\\"):
123
+ library_name = library_name.replace(part, "--")
124
+ namespace = namespace.replace(part, "--")
125
+ subfolder = subfolder.replace(part, "--")
126
+
127
+ # Path to subfolder is created
128
+ path = assets_dir / library_name / namespace / subfolder
129
+ try:
130
+ path.mkdir(exist_ok=True, parents=True)
131
+ except (FileExistsError, NotADirectoryError):
132
+ raise ValueError(f"Corrupted assets folder: cannot create directory because of an existing file ({path}).")
133
+
134
+ # Return
135
+ return path
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_datetime.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Contains utilities to handle datetimes in Huggingface Hub."""
16
+
17
+ from datetime import datetime, timezone
18
+
19
+
20
+ def parse_datetime(date_string: str) -> datetime:
21
+ """
22
+ Parses a date_string returned from the server to a datetime object.
23
+
24
+ This parser is a weak-parser is the sense that it handles only a single format of
25
+ date_string. It is expected that the server format will never change. The
26
+ implementation depends only on the standard lib to avoid an external dependency
27
+ (python-dateutil). See full discussion about this decision on PR:
28
+ https://github.com/huggingface/huggingface_hub/pull/999.
29
+
30
+ Example:
31
+ ```py
32
+ > parse_datetime('2022-08-19T07:19:38.123Z')
33
+ datetime.datetime(2022, 8, 19, 7, 19, 38, 123000, tzinfo=timezone.utc)
34
+ ```
35
+
36
+ Args:
37
+ date_string (`str`):
38
+ A string representing a datetime returned by the Hub server.
39
+ String is expected to follow '%Y-%m-%dT%H:%M:%S.%fZ' pattern.
40
+
41
+ Returns:
42
+ A python datetime object.
43
+
44
+ Raises:
45
+ :class:`ValueError`:
46
+ If `date_string` cannot be parsed.
47
+ """
48
+ try:
49
+ # Datetime ending with a Z means "UTC". We parse the date and then explicitly
50
+ # set the timezone to UTC.
51
+ # See https://en.wikipedia.org/wiki/ISO_8601#Coordinated_Universal_Time_(UTC)
52
+ # Taken from https://stackoverflow.com/a/3168394.
53
+ if len(date_string) == 30:
54
+ # Means timezoned-timestamp with nanoseconds precision. We need to truncate the last 3 digits.
55
+ date_string = date_string[:-4] + "Z"
56
+ dt = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S.%fZ")
57
+ return dt.replace(tzinfo=timezone.utc) # Set explicit timezone
58
+ except ValueError as e:
59
+ raise ValueError(
60
+ f"Cannot parse '{date_string}' as a datetime. Date string is expected to"
61
+ " follow '%Y-%m-%dT%H:%M:%S.%fZ' pattern."
62
+ ) from e
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_hf_folder.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Contain helper class to retrieve/store token from/to local cache."""
16
+
17
+ import warnings
18
+ from pathlib import Path
19
+ from typing import Optional
20
+
21
+ from .. import constants
22
+ from ._token import get_token
23
+
24
+
25
+ class HfFolder:
26
+ path_token = Path(constants.HF_TOKEN_PATH)
27
+ # Private attribute. Will be removed in v0.15
28
+ _old_path_token = Path(constants._OLD_HF_TOKEN_PATH)
29
+
30
+ # TODO: deprecate when adapted in transformers/datasets/gradio
31
+ # @_deprecate_method(version="1.0", message="Use `huggingface_hub.login` instead.")
32
+ @classmethod
33
+ def save_token(cls, token: str) -> None:
34
+ """
35
+ Save token, creating folder as needed.
36
+
37
+ Token is saved in the huggingface home folder. You can configure it by setting
38
+ the `HF_HOME` environment variable.
39
+
40
+ Args:
41
+ token (`str`):
42
+ The token to save to the [`HfFolder`]
43
+ """
44
+ cls.path_token.parent.mkdir(parents=True, exist_ok=True)
45
+ cls.path_token.write_text(token)
46
+
47
+ # TODO: deprecate when adapted in transformers/datasets/gradio
48
+ # @_deprecate_method(version="1.0", message="Use `huggingface_hub.get_token` instead.")
49
+ @classmethod
50
+ def get_token(cls) -> Optional[str]:
51
+ """
52
+ Get token or None if not existent.
53
+
54
+ This method is deprecated in favor of [`huggingface_hub.get_token`] but is kept for backward compatibility.
55
+ Its behavior is the same as [`huggingface_hub.get_token`].
56
+
57
+ Returns:
58
+ `str` or `None`: The token, `None` if it doesn't exist.
59
+ """
60
+ # 0. Check if token exist in old path but not new location
61
+ try:
62
+ cls._copy_to_new_path_and_warn()
63
+ except Exception: # if not possible (e.g. PermissionError), do not raise
64
+ pass
65
+
66
+ return get_token()
67
+
68
+ # TODO: deprecate when adapted in transformers/datasets/gradio
69
+ # @_deprecate_method(version="1.0", message="Use `huggingface_hub.logout` instead.")
70
+ @classmethod
71
+ def delete_token(cls) -> None:
72
+ """
73
+ Deletes the token from storage. Does not fail if token does not exist.
74
+ """
75
+ try:
76
+ cls.path_token.unlink()
77
+ except FileNotFoundError:
78
+ pass
79
+
80
+ try:
81
+ cls._old_path_token.unlink()
82
+ except FileNotFoundError:
83
+ pass
84
+
85
+ @classmethod
86
+ def _copy_to_new_path_and_warn(cls):
87
+ if cls._old_path_token.exists() and not cls.path_token.exists():
88
+ cls.save_token(cls._old_path_token.read_text())
89
+ warnings.warn(
90
+ f"A token has been found in `{cls._old_path_token}`. This is the old"
91
+ " path where tokens were stored. The new location is"
92
+ f" `{cls.path_token}` which is configurable using `HF_HOME` environment"
93
+ " variable. Your token has been copied to this new location. You can"
94
+ " now safely delete the old token file manually or use"
95
+ " `huggingface-cli logout`."
96
+ )
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_runtime.py ADDED
@@ -0,0 +1,382 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Check presence of installed packages at runtime."""
16
+
17
+ import importlib.metadata
18
+ import platform
19
+ import sys
20
+ import warnings
21
+ from typing import Any, Dict
22
+
23
+ from .. import __version__, constants
24
+
25
+
26
+ _PY_VERSION: str = sys.version.split()[0].rstrip("+")
27
+
28
+ _package_versions = {}
29
+
30
+ _CANDIDATES = {
31
+ "aiohttp": {"aiohttp"},
32
+ "fastai": {"fastai"},
33
+ "fastapi": {"fastapi"},
34
+ "fastcore": {"fastcore"},
35
+ "gradio": {"gradio"},
36
+ "graphviz": {"graphviz"},
37
+ "hf_transfer": {"hf_transfer"},
38
+ "jinja": {"Jinja2"},
39
+ "keras": {"keras"},
40
+ "minijinja": {"minijinja"},
41
+ "numpy": {"numpy"},
42
+ "pillow": {"Pillow"},
43
+ "pydantic": {"pydantic"},
44
+ "pydot": {"pydot"},
45
+ "safetensors": {"safetensors"},
46
+ "tensorboard": {"tensorboardX"},
47
+ "tensorflow": (
48
+ "tensorflow",
49
+ "tensorflow-cpu",
50
+ "tensorflow-gpu",
51
+ "tf-nightly",
52
+ "tf-nightly-cpu",
53
+ "tf-nightly-gpu",
54
+ "intel-tensorflow",
55
+ "intel-tensorflow-avx512",
56
+ "tensorflow-rocm",
57
+ "tensorflow-macos",
58
+ ),
59
+ "torch": {"torch"},
60
+ }
61
+
62
+ # Check once at runtime
63
+ for candidate_name, package_names in _CANDIDATES.items():
64
+ _package_versions[candidate_name] = "N/A"
65
+ for name in package_names:
66
+ try:
67
+ _package_versions[candidate_name] = importlib.metadata.version(name)
68
+ break
69
+ except importlib.metadata.PackageNotFoundError:
70
+ pass
71
+
72
+
73
+ def _get_version(package_name: str) -> str:
74
+ return _package_versions.get(package_name, "N/A")
75
+
76
+
77
+ def is_package_available(package_name: str) -> bool:
78
+ return _get_version(package_name) != "N/A"
79
+
80
+
81
+ # Python
82
+ def get_python_version() -> str:
83
+ return _PY_VERSION
84
+
85
+
86
+ # Huggingface Hub
87
+ def get_hf_hub_version() -> str:
88
+ return __version__
89
+
90
+
91
+ # aiohttp
92
+ def is_aiohttp_available() -> bool:
93
+ return is_package_available("aiohttp")
94
+
95
+
96
+ def get_aiohttp_version() -> str:
97
+ return _get_version("aiohttp")
98
+
99
+
100
+ # FastAI
101
+ def is_fastai_available() -> bool:
102
+ return is_package_available("fastai")
103
+
104
+
105
+ def get_fastai_version() -> str:
106
+ return _get_version("fastai")
107
+
108
+
109
+ # FastAPI
110
+ def is_fastapi_available() -> bool:
111
+ return is_package_available("fastapi")
112
+
113
+
114
+ def get_fastapi_version() -> str:
115
+ return _get_version("fastapi")
116
+
117
+
118
+ # Fastcore
119
+ def is_fastcore_available() -> bool:
120
+ return is_package_available("fastcore")
121
+
122
+
123
+ def get_fastcore_version() -> str:
124
+ return _get_version("fastcore")
125
+
126
+
127
+ # FastAI
128
+ def is_gradio_available() -> bool:
129
+ return is_package_available("gradio")
130
+
131
+
132
+ def get_gradio_version() -> str:
133
+ return _get_version("gradio")
134
+
135
+
136
+ # Graphviz
137
+ def is_graphviz_available() -> bool:
138
+ return is_package_available("graphviz")
139
+
140
+
141
+ def get_graphviz_version() -> str:
142
+ return _get_version("graphviz")
143
+
144
+
145
+ # hf_transfer
146
+ def is_hf_transfer_available() -> bool:
147
+ return is_package_available("hf_transfer")
148
+
149
+
150
+ def get_hf_transfer_version() -> str:
151
+ return _get_version("hf_transfer")
152
+
153
+
154
+ # keras
155
+ def is_keras_available() -> bool:
156
+ return is_package_available("keras")
157
+
158
+
159
+ def get_keras_version() -> str:
160
+ return _get_version("keras")
161
+
162
+
163
+ # Minijinja
164
+ def is_minijinja_available() -> bool:
165
+ return is_package_available("minijinja")
166
+
167
+
168
+ def get_minijinja_version() -> str:
169
+ return _get_version("minijinja")
170
+
171
+
172
+ # Numpy
173
+ def is_numpy_available() -> bool:
174
+ return is_package_available("numpy")
175
+
176
+
177
+ def get_numpy_version() -> str:
178
+ return _get_version("numpy")
179
+
180
+
181
+ # Jinja
182
+ def is_jinja_available() -> bool:
183
+ return is_package_available("jinja")
184
+
185
+
186
+ def get_jinja_version() -> str:
187
+ return _get_version("jinja")
188
+
189
+
190
+ # Pillow
191
+ def is_pillow_available() -> bool:
192
+ return is_package_available("pillow")
193
+
194
+
195
+ def get_pillow_version() -> str:
196
+ return _get_version("pillow")
197
+
198
+
199
+ # Pydantic
200
+ def is_pydantic_available() -> bool:
201
+ if not is_package_available("pydantic"):
202
+ return False
203
+ # For Pydantic, we add an extra check to test whether it is correctly installed or not. If both pydantic 2.x and
204
+ # typing_extensions<=4.5.0 are installed, then pydantic will fail at import time. This should not happen when
205
+ # it is installed with `pip install huggingface_hub[inference]` but it can happen when it is installed manually
206
+ # by the user in an environment that we don't control.
207
+ #
208
+ # Usually we won't need to do this kind of check on optional dependencies. However, pydantic is a special case
209
+ # as it is automatically imported when doing `from huggingface_hub import ...` even if the user doesn't use it.
210
+ #
211
+ # See https://github.com/huggingface/huggingface_hub/pull/1829 for more details.
212
+ try:
213
+ from pydantic import validator # noqa: F401
214
+ except ImportError:
215
+ # Example: "ImportError: cannot import name 'TypeAliasType' from 'typing_extensions'"
216
+ warnings.warn(
217
+ "Pydantic is installed but cannot be imported. Please check your installation. `huggingface_hub` will "
218
+ "default to not using Pydantic. Error message: '{e}'"
219
+ )
220
+ return False
221
+ return True
222
+
223
+
224
+ def get_pydantic_version() -> str:
225
+ return _get_version("pydantic")
226
+
227
+
228
+ # Pydot
229
+ def is_pydot_available() -> bool:
230
+ return is_package_available("pydot")
231
+
232
+
233
+ def get_pydot_version() -> str:
234
+ return _get_version("pydot")
235
+
236
+
237
+ # Tensorboard
238
+ def is_tensorboard_available() -> bool:
239
+ return is_package_available("tensorboard")
240
+
241
+
242
+ def get_tensorboard_version() -> str:
243
+ return _get_version("tensorboard")
244
+
245
+
246
+ # Tensorflow
247
+ def is_tf_available() -> bool:
248
+ return is_package_available("tensorflow")
249
+
250
+
251
+ def get_tf_version() -> str:
252
+ return _get_version("tensorflow")
253
+
254
+
255
+ # Torch
256
+ def is_torch_available() -> bool:
257
+ return is_package_available("torch")
258
+
259
+
260
+ def get_torch_version() -> str:
261
+ return _get_version("torch")
262
+
263
+
264
+ # Safetensors
265
+ def is_safetensors_available() -> bool:
266
+ return is_package_available("safetensors")
267
+
268
+
269
+ # Shell-related helpers
270
+ try:
271
+ # Set to `True` if script is running in a Google Colab notebook.
272
+ # If running in Google Colab, git credential store is set globally which makes the
273
+ # warning disappear. See https://github.com/huggingface/huggingface_hub/issues/1043
274
+ #
275
+ # Taken from https://stackoverflow.com/a/63519730.
276
+ _is_google_colab = "google.colab" in str(get_ipython()) # type: ignore # noqa: F821
277
+ except NameError:
278
+ _is_google_colab = False
279
+
280
+
281
+ def is_notebook() -> bool:
282
+ """Return `True` if code is executed in a notebook (Jupyter, Colab, QTconsole).
283
+
284
+ Taken from https://stackoverflow.com/a/39662359.
285
+ Adapted to make it work with Google colab as well.
286
+ """
287
+ try:
288
+ shell_class = get_ipython().__class__ # type: ignore # noqa: F821
289
+ for parent_class in shell_class.__mro__: # e.g. "is subclass of"
290
+ if parent_class.__name__ == "ZMQInteractiveShell":
291
+ return True # Jupyter notebook, Google colab or qtconsole
292
+ return False
293
+ except NameError:
294
+ return False # Probably standard Python interpreter
295
+
296
+
297
+ def is_google_colab() -> bool:
298
+ """Return `True` if code is executed in a Google colab.
299
+
300
+ Taken from https://stackoverflow.com/a/63519730.
301
+ """
302
+ return _is_google_colab
303
+
304
+
305
+ def dump_environment_info() -> Dict[str, Any]:
306
+ """Dump information about the machine to help debugging issues.
307
+
308
+ Similar helper exist in:
309
+ - `datasets` (https://github.com/huggingface/datasets/blob/main/src/datasets/commands/env.py)
310
+ - `diffusers` (https://github.com/huggingface/diffusers/blob/main/src/diffusers/commands/env.py)
311
+ - `transformers` (https://github.com/huggingface/transformers/blob/main/src/transformers/commands/env.py)
312
+ """
313
+ from huggingface_hub import get_token, whoami
314
+ from huggingface_hub.utils import list_credential_helpers
315
+
316
+ token = get_token()
317
+
318
+ # Generic machine info
319
+ info: Dict[str, Any] = {
320
+ "huggingface_hub version": get_hf_hub_version(),
321
+ "Platform": platform.platform(),
322
+ "Python version": get_python_version(),
323
+ }
324
+
325
+ # Interpreter info
326
+ try:
327
+ shell_class = get_ipython().__class__ # type: ignore # noqa: F821
328
+ info["Running in iPython ?"] = "Yes"
329
+ info["iPython shell"] = shell_class.__name__
330
+ except NameError:
331
+ info["Running in iPython ?"] = "No"
332
+ info["Running in notebook ?"] = "Yes" if is_notebook() else "No"
333
+ info["Running in Google Colab ?"] = "Yes" if is_google_colab() else "No"
334
+
335
+ # Login info
336
+ info["Token path ?"] = constants.HF_TOKEN_PATH
337
+ info["Has saved token ?"] = token is not None
338
+ if token is not None:
339
+ try:
340
+ info["Who am I ?"] = whoami()["name"]
341
+ except Exception:
342
+ pass
343
+
344
+ try:
345
+ info["Configured git credential helpers"] = ", ".join(list_credential_helpers())
346
+ except Exception:
347
+ pass
348
+
349
+ # Installed dependencies
350
+ info["FastAI"] = get_fastai_version()
351
+ info["Tensorflow"] = get_tf_version()
352
+ info["Torch"] = get_torch_version()
353
+ info["Jinja2"] = get_jinja_version()
354
+ info["Graphviz"] = get_graphviz_version()
355
+ info["keras"] = get_keras_version()
356
+ info["Pydot"] = get_pydot_version()
357
+ info["Pillow"] = get_pillow_version()
358
+ info["hf_transfer"] = get_hf_transfer_version()
359
+ info["gradio"] = get_gradio_version()
360
+ info["tensorboard"] = get_tensorboard_version()
361
+ info["numpy"] = get_numpy_version()
362
+ info["pydantic"] = get_pydantic_version()
363
+ info["aiohttp"] = get_aiohttp_version()
364
+
365
+ # Environment variables
366
+ info["ENDPOINT"] = constants.ENDPOINT
367
+ info["HF_HUB_CACHE"] = constants.HF_HUB_CACHE
368
+ info["HF_ASSETS_CACHE"] = constants.HF_ASSETS_CACHE
369
+ info["HF_TOKEN_PATH"] = constants.HF_TOKEN_PATH
370
+ info["HF_HUB_OFFLINE"] = constants.HF_HUB_OFFLINE
371
+ info["HF_HUB_DISABLE_TELEMETRY"] = constants.HF_HUB_DISABLE_TELEMETRY
372
+ info["HF_HUB_DISABLE_PROGRESS_BARS"] = constants.HF_HUB_DISABLE_PROGRESS_BARS
373
+ info["HF_HUB_DISABLE_SYMLINKS_WARNING"] = constants.HF_HUB_DISABLE_SYMLINKS_WARNING
374
+ info["HF_HUB_DISABLE_EXPERIMENTAL_WARNING"] = constants.HF_HUB_DISABLE_EXPERIMENTAL_WARNING
375
+ info["HF_HUB_DISABLE_IMPLICIT_TOKEN"] = constants.HF_HUB_DISABLE_IMPLICIT_TOKEN
376
+ info["HF_HUB_ENABLE_HF_TRANSFER"] = constants.HF_HUB_ENABLE_HF_TRANSFER
377
+ info["HF_HUB_ETAG_TIMEOUT"] = constants.HF_HUB_ETAG_TIMEOUT
378
+ info["HF_HUB_DOWNLOAD_TIMEOUT"] = constants.HF_HUB_DOWNLOAD_TIMEOUT
379
+
380
+ print("\nCopy-and-paste the text below in your GitHub issue.\n")
381
+ print("\n".join([f"- {prop}: {val}" for prop, val in info.items()]) + "\n")
382
+ return info
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_safetensors.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import operator
3
+ from collections import defaultdict
4
+ from dataclasses import dataclass, field
5
+ from typing import Dict, List, Literal, Optional, Tuple
6
+
7
+
8
+ FILENAME_T = str
9
+ TENSOR_NAME_T = str
10
+ DTYPE_T = Literal["F64", "F32", "F16", "BF16", "I64", "I32", "I16", "I8", "U8", "BOOL"]
11
+
12
+
13
+ @dataclass
14
+ class TensorInfo:
15
+ """Information about a tensor.
16
+
17
+ For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format.
18
+
19
+ Attributes:
20
+ dtype (`str`):
21
+ The data type of the tensor ("F64", "F32", "F16", "BF16", "I64", "I32", "I16", "I8", "U8", "BOOL").
22
+ shape (`List[int]`):
23
+ The shape of the tensor.
24
+ data_offsets (`Tuple[int, int]`):
25
+ The offsets of the data in the file as a tuple `[BEGIN, END]`.
26
+ parameter_count (`int`):
27
+ The number of parameters in the tensor.
28
+ """
29
+
30
+ dtype: DTYPE_T
31
+ shape: List[int]
32
+ data_offsets: Tuple[int, int]
33
+ parameter_count: int = field(init=False)
34
+
35
+ def __post_init__(self) -> None:
36
+ # Taken from https://stackoverflow.com/a/13840436
37
+ try:
38
+ self.parameter_count = functools.reduce(operator.mul, self.shape)
39
+ except TypeError:
40
+ self.parameter_count = 1 # scalar value has no shape
41
+
42
+
43
+ @dataclass
44
+ class SafetensorsFileMetadata:
45
+ """Metadata for a Safetensors file hosted on the Hub.
46
+
47
+ This class is returned by [`parse_safetensors_file_metadata`].
48
+
49
+ For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format.
50
+
51
+ Attributes:
52
+ metadata (`Dict`):
53
+ The metadata contained in the file.
54
+ tensors (`Dict[str, TensorInfo]`):
55
+ A map of all tensors. Keys are tensor names and values are information about the corresponding tensor, as a
56
+ [`TensorInfo`] object.
57
+ parameter_count (`Dict[str, int]`):
58
+ A map of the number of parameters per data type. Keys are data types and values are the number of parameters
59
+ of that data type.
60
+ """
61
+
62
+ metadata: Dict[str, str]
63
+ tensors: Dict[TENSOR_NAME_T, TensorInfo]
64
+ parameter_count: Dict[DTYPE_T, int] = field(init=False)
65
+
66
+ def __post_init__(self) -> None:
67
+ parameter_count: Dict[DTYPE_T, int] = defaultdict(int)
68
+ for tensor in self.tensors.values():
69
+ parameter_count[tensor.dtype] += tensor.parameter_count
70
+ self.parameter_count = dict(parameter_count)
71
+
72
+
73
+ @dataclass
74
+ class SafetensorsRepoMetadata:
75
+ """Metadata for a Safetensors repo.
76
+
77
+ A repo is considered to be a Safetensors repo if it contains either a 'model.safetensors' weight file (non-shared
78
+ model) or a 'model.safetensors.index.json' index file (sharded model) at its root.
79
+
80
+ This class is returned by [`get_safetensors_metadata`].
81
+
82
+ For more details regarding the safetensors format, check out https://huggingface.co/docs/safetensors/index#format.
83
+
84
+ Attributes:
85
+ metadata (`Dict`, *optional*):
86
+ The metadata contained in the 'model.safetensors.index.json' file, if it exists. Only populated for sharded
87
+ models.
88
+ sharded (`bool`):
89
+ Whether the repo contains a sharded model or not.
90
+ weight_map (`Dict[str, str]`):
91
+ A map of all weights. Keys are tensor names and values are filenames of the files containing the tensors.
92
+ files_metadata (`Dict[str, SafetensorsFileMetadata]`):
93
+ A map of all files metadata. Keys are filenames and values are the metadata of the corresponding file, as
94
+ a [`SafetensorsFileMetadata`] object.
95
+ parameter_count (`Dict[str, int]`):
96
+ A map of the number of parameters per data type. Keys are data types and values are the number of parameters
97
+ of that data type.
98
+ """
99
+
100
+ metadata: Optional[Dict]
101
+ sharded: bool
102
+ weight_map: Dict[TENSOR_NAME_T, FILENAME_T] # tensor name -> filename
103
+ files_metadata: Dict[FILENAME_T, SafetensorsFileMetadata] # filename -> metadata
104
+ parameter_count: Dict[DTYPE_T, int] = field(init=False)
105
+
106
+ def __post_init__(self) -> None:
107
+ parameter_count: Dict[DTYPE_T, int] = defaultdict(int)
108
+ for file_metadata in self.files_metadata.values():
109
+ for dtype, nb_parameters_ in file_metadata.parameter_count.items():
110
+ parameter_count[dtype] += nb_parameters_
111
+ self.parameter_count = dict(parameter_count)
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/_typing.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022-present, the HuggingFace Inc. team.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Handle typing imports based on system compatibility."""
16
+
17
+ from typing import Any, Callable, Literal, TypeVar
18
+
19
+
20
+ HTTP_METHOD_T = Literal["GET", "OPTIONS", "HEAD", "POST", "PUT", "PATCH", "DELETE"]
21
+
22
+ # type hint meaning "function signature not changed by decorator"
23
+ CallableT = TypeVar("CallableT", bound=Callable)
24
+
25
+ _JSON_SERIALIZABLE_TYPES = (int, float, str, bool, type(None))
26
+
27
+
28
+ def is_jsonable(obj: Any) -> bool:
29
+ """Check if an object is JSON serializable.
30
+
31
+ This is a weak check, as it does not check for the actual JSON serialization, but only for the types of the object.
32
+ It works correctly for basic use cases but do not guarantee an exhaustive check.
33
+
34
+ Object is considered to be recursively json serializable if:
35
+ - it is an instance of int, float, str, bool, or NoneType
36
+ - it is a list or tuple and all its items are json serializable
37
+ - it is a dict and all its keys are strings and all its values are json serializable
38
+ """
39
+ try:
40
+ if isinstance(obj, _JSON_SERIALIZABLE_TYPES):
41
+ return True
42
+ if isinstance(obj, (list, tuple)):
43
+ return all(is_jsonable(item) for item in obj)
44
+ if isinstance(obj, dict):
45
+ return all(isinstance(key, str) and is_jsonable(value) for key, value in obj.items())
46
+ if hasattr(obj, "__json__"):
47
+ return True
48
+ return False
49
+ except RecursionError:
50
+ return False
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/logging.py ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 Optuna, Hugging Face
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """Logging utilities."""
16
+
17
+ import logging
18
+ import os
19
+ from logging import (
20
+ CRITICAL, # NOQA
21
+ DEBUG, # NOQA
22
+ ERROR, # NOQA
23
+ FATAL, # NOQA
24
+ INFO, # NOQA
25
+ NOTSET, # NOQA
26
+ WARN, # NOQA
27
+ WARNING, # NOQA
28
+ )
29
+ from typing import Optional
30
+
31
+
32
+ log_levels = {
33
+ "debug": logging.DEBUG,
34
+ "info": logging.INFO,
35
+ "warning": logging.WARNING,
36
+ "error": logging.ERROR,
37
+ "critical": logging.CRITICAL,
38
+ }
39
+
40
+ _default_log_level = logging.WARNING
41
+
42
+
43
+ def _get_library_name() -> str:
44
+ return __name__.split(".")[0]
45
+
46
+
47
+ def _get_library_root_logger() -> logging.Logger:
48
+ return logging.getLogger(_get_library_name())
49
+
50
+
51
+ def _get_default_logging_level():
52
+ """
53
+ If `HF_HUB_VERBOSITY` env var is set to one of the valid choices return that as the new default level. If it is not
54
+ - fall back to `_default_log_level`
55
+ """
56
+ env_level_str = os.getenv("HF_HUB_VERBOSITY", None)
57
+ if env_level_str:
58
+ if env_level_str in log_levels:
59
+ return log_levels[env_level_str]
60
+ else:
61
+ logging.getLogger().warning(
62
+ f"Unknown option HF_HUB_VERBOSITY={env_level_str}, has to be one of: { ', '.join(log_levels.keys()) }"
63
+ )
64
+ return _default_log_level
65
+
66
+
67
+ def _configure_library_root_logger() -> None:
68
+ library_root_logger = _get_library_root_logger()
69
+ library_root_logger.addHandler(logging.StreamHandler())
70
+ library_root_logger.setLevel(_get_default_logging_level())
71
+
72
+
73
+ def _reset_library_root_logger() -> None:
74
+ library_root_logger = _get_library_root_logger()
75
+ library_root_logger.setLevel(logging.NOTSET)
76
+
77
+
78
+ def get_logger(name: Optional[str] = None) -> logging.Logger:
79
+ """
80
+ Returns a logger with the specified name. This function is not supposed
81
+ to be directly accessed by library users.
82
+
83
+ Args:
84
+ name (`str`, *optional*):
85
+ The name of the logger to get, usually the filename
86
+
87
+ Example:
88
+
89
+ ```python
90
+ >>> from huggingface_hub import get_logger
91
+
92
+ >>> logger = get_logger(__file__)
93
+ >>> logger.set_verbosity_info()
94
+ ```
95
+ """
96
+
97
+ if name is None:
98
+ name = _get_library_name()
99
+
100
+ return logging.getLogger(name)
101
+
102
+
103
+ def get_verbosity() -> int:
104
+ """Return the current level for the HuggingFace Hub's root logger.
105
+
106
+ Returns:
107
+ Logging level, e.g., `huggingface_hub.logging.DEBUG` and
108
+ `huggingface_hub.logging.INFO`.
109
+
110
+ <Tip>
111
+
112
+ HuggingFace Hub has following logging levels:
113
+
114
+ - `huggingface_hub.logging.CRITICAL`, `huggingface_hub.logging.FATAL`
115
+ - `huggingface_hub.logging.ERROR`
116
+ - `huggingface_hub.logging.WARNING`, `huggingface_hub.logging.WARN`
117
+ - `huggingface_hub.logging.INFO`
118
+ - `huggingface_hub.logging.DEBUG`
119
+
120
+ </Tip>
121
+ """
122
+ return _get_library_root_logger().getEffectiveLevel()
123
+
124
+
125
+ def set_verbosity(verbosity: int) -> None:
126
+ """
127
+ Sets the level for the HuggingFace Hub's root logger.
128
+
129
+ Args:
130
+ verbosity (`int`):
131
+ Logging level, e.g., `huggingface_hub.logging.DEBUG` and
132
+ `huggingface_hub.logging.INFO`.
133
+ """
134
+ _get_library_root_logger().setLevel(verbosity)
135
+
136
+
137
+ def set_verbosity_info():
138
+ """
139
+ Sets the verbosity to `logging.INFO`.
140
+ """
141
+ return set_verbosity(INFO)
142
+
143
+
144
+ def set_verbosity_warning():
145
+ """
146
+ Sets the verbosity to `logging.WARNING`.
147
+ """
148
+ return set_verbosity(WARNING)
149
+
150
+
151
+ def set_verbosity_debug():
152
+ """
153
+ Sets the verbosity to `logging.DEBUG`.
154
+ """
155
+ return set_verbosity(DEBUG)
156
+
157
+
158
+ def set_verbosity_error():
159
+ """
160
+ Sets the verbosity to `logging.ERROR`.
161
+ """
162
+ return set_verbosity(ERROR)
163
+
164
+
165
+ def disable_propagation() -> None:
166
+ """
167
+ Disable propagation of the library log outputs. Note that log propagation is
168
+ disabled by default.
169
+ """
170
+ _get_library_root_logger().propagate = False
171
+
172
+
173
+ def enable_propagation() -> None:
174
+ """
175
+ Enable propagation of the library log outputs. Please disable the
176
+ HuggingFace Hub's default handler to prevent double logging if the root
177
+ logger has been configured.
178
+ """
179
+ _get_library_root_logger().propagate = True
180
+
181
+
182
+ _configure_library_root_logger()
llmeval-env/lib/python3.10/site-packages/huggingface_hub/utils/sha.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Utilities to efficiently compute the SHA 256 hash of a bunch of bytes."""
2
+
3
+ from typing import BinaryIO, Optional
4
+
5
+ from .insecure_hashlib import sha256
6
+
7
+
8
+ def sha_fileobj(fileobj: BinaryIO, chunk_size: Optional[int] = None) -> bytes:
9
+ """
10
+ Computes the sha256 hash of the given file object, by chunks of size `chunk_size`.
11
+
12
+ Args:
13
+ fileobj (file-like object):
14
+ The File object to compute sha256 for, typically obtained with `open(path, "rb")`
15
+ chunk_size (`int`, *optional*):
16
+ The number of bytes to read from `fileobj` at once, defaults to 1MB.
17
+
18
+ Returns:
19
+ `bytes`: `fileobj`'s sha256 hash as bytes
20
+ """
21
+ chunk_size = chunk_size if chunk_size is not None else 1024 * 1024
22
+
23
+ sha = sha256()
24
+ while True:
25
+ chunk = fileobj.read(chunk_size)
26
+ sha.update(chunk)
27
+ if not chunk:
28
+ break
29
+ return sha.digest()
llmeval-env/lib/python3.10/site-packages/nltk/lm/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (7.71 kB). View file
 
llmeval-env/lib/python3.10/site-packages/nltk/lm/__pycache__/util.cpython-310.pyc ADDED
Binary file (515 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/nltk/lm/__pycache__/vocabulary.cpython-310.pyc ADDED
Binary file (7.91 kB). View file
 
llmeval-env/lib/python3.10/site-packages/scipy/__config__.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is generated by SciPy's build process
2
+ # It contains system_info results at the time of building this package.
3
+ from enum import Enum
4
+
5
+ __all__ = ["show"]
6
+ _built_with_meson = True
7
+
8
+
9
+ class DisplayModes(Enum):
10
+ stdout = "stdout"
11
+ dicts = "dicts"
12
+
13
+
14
+ def _cleanup(d):
15
+ """
16
+ Removes empty values in a `dict` recursively
17
+ This ensures we remove values that Meson could not provide to CONFIG
18
+ """
19
+ if isinstance(d, dict):
20
+ return { k: _cleanup(v) for k, v in d.items() if v != '' and _cleanup(v) != '' }
21
+ else:
22
+ return d
23
+
24
+
25
+ CONFIG = _cleanup(
26
+ {
27
+ "Compilers": {
28
+ "c": {
29
+ "name": "gcc",
30
+ "linker": r"ld.bfd",
31
+ "version": "10.2.1",
32
+ "commands": r"cc",
33
+ "args": r"",
34
+ "linker args": r"",
35
+ },
36
+ "cython": {
37
+ "name": r"cython",
38
+ "linker": r"cython",
39
+ "version": r"3.0.10",
40
+ "commands": r"cython",
41
+ "args": r"",
42
+ "linker args": r"",
43
+ },
44
+ "c++": {
45
+ "name": "gcc",
46
+ "linker": r"ld.bfd",
47
+ "version": "10.2.1",
48
+ "commands": r"c++",
49
+ "args": r"",
50
+ "linker args": r"",
51
+ },
52
+ "fortran": {
53
+ "name": "gcc",
54
+ "linker": r"ld.bfd",
55
+ "version": "10.2.1",
56
+ "commands": r"gfortran",
57
+ "args": r"",
58
+ "linker args": r"",
59
+ },
60
+ "pythran": {
61
+ "version": r"0.15.0",
62
+ "include directory": r"../../tmp/pip-build-env-0blqy1or/overlay/lib/python3.10/site-packages/pythran"
63
+ },
64
+ },
65
+ "Machine Information": {
66
+ "host": {
67
+ "cpu": r"x86_64",
68
+ "family": r"x86_64",
69
+ "endian": r"little",
70
+ "system": r"linux",
71
+ },
72
+ "build": {
73
+ "cpu": r"x86_64",
74
+ "family": r"x86_64",
75
+ "endian": r"little",
76
+ "system": r"linux",
77
+ },
78
+ "cross-compiled": bool("False".lower().replace('false', '')),
79
+ },
80
+ "Build Dependencies": {
81
+ "blas": {
82
+ "name": "openblas",
83
+ "found": bool("True".lower().replace('false', '')),
84
+ "version": "0.3.26.dev",
85
+ "detection method": "pkgconfig",
86
+ "include directory": r"/usr/local/include",
87
+ "lib directory": r"/usr/local/lib",
88
+ "openblas configuration": r"USE_64BITINT=0 DYNAMIC_ARCH=1 DYNAMIC_OLDER= NO_CBLAS= NO_LAPACK= NO_LAPACKE= NO_AFFINITY=1 USE_OPENMP= ZEN MAX_THREADS=64",
89
+ "pc file directory": r"/usr/local/lib/pkgconfig",
90
+ },
91
+ "lapack": {
92
+ "name": "openblas",
93
+ "found": bool("True".lower().replace('false', '')),
94
+ "version": "0.3.26.dev",
95
+ "detection method": "pkgconfig",
96
+ "include directory": r"/usr/local/include",
97
+ "lib directory": r"/usr/local/lib",
98
+ "openblas configuration": r"USE_64BITINT=0 DYNAMIC_ARCH=1 DYNAMIC_OLDER= NO_CBLAS= NO_LAPACK= NO_LAPACKE= NO_AFFINITY=1 USE_OPENMP= ZEN MAX_THREADS=64",
99
+ "pc file directory": r"/usr/local/lib/pkgconfig",
100
+ },
101
+ "pybind11": {
102
+ "name": "pybind11",
103
+ "version": "2.12.0",
104
+ "detection method": "config-tool",
105
+ "include directory": r"unknown",
106
+ },
107
+ },
108
+ "Python Information": {
109
+ "path": r"/opt/python/cp310-cp310/bin/python",
110
+ "version": "3.10",
111
+ },
112
+ }
113
+ )
114
+
115
+
116
+ def _check_pyyaml():
117
+ import yaml
118
+
119
+ return yaml
120
+
121
+
122
+ def show(mode=DisplayModes.stdout.value):
123
+ """
124
+ Show libraries and system information on which SciPy was built
125
+ and is being used
126
+
127
+ Parameters
128
+ ----------
129
+ mode : {`'stdout'`, `'dicts'`}, optional.
130
+ Indicates how to display the config information.
131
+ `'stdout'` prints to console, `'dicts'` returns a dictionary
132
+ of the configuration.
133
+
134
+ Returns
135
+ -------
136
+ out : {`dict`, `None`}
137
+ If mode is `'dicts'`, a dict is returned, else None
138
+
139
+ Notes
140
+ -----
141
+ 1. The `'stdout'` mode will give more readable
142
+ output if ``pyyaml`` is installed
143
+
144
+ """
145
+ if mode == DisplayModes.stdout.value:
146
+ try: # Non-standard library, check import
147
+ yaml = _check_pyyaml()
148
+
149
+ print(yaml.dump(CONFIG))
150
+ except ModuleNotFoundError:
151
+ import warnings
152
+ import json
153
+
154
+ warnings.warn("Install `pyyaml` for better output", stacklevel=1)
155
+ print(json.dumps(CONFIG, indent=2))
156
+ elif mode == DisplayModes.dicts.value:
157
+ return CONFIG
158
+ else:
159
+ raise AttributeError(
160
+ f"Invalid `mode`, use one of: {', '.join([e.value for e in DisplayModes])}"
161
+ )
llmeval-env/lib/python3.10/site-packages/scipy/__init__.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ SciPy: A scientific computing package for Python
3
+ ================================================
4
+
5
+ Documentation is available in the docstrings and
6
+ online at https://docs.scipy.org.
7
+
8
+ Subpackages
9
+ -----------
10
+ Using any of these subpackages requires an explicit import. For example,
11
+ ``import scipy.cluster``.
12
+
13
+ ::
14
+
15
+ cluster --- Vector Quantization / Kmeans
16
+ constants --- Physical and mathematical constants and units
17
+ datasets --- Dataset methods
18
+ fft --- Discrete Fourier transforms
19
+ fftpack --- Legacy discrete Fourier transforms
20
+ integrate --- Integration routines
21
+ interpolate --- Interpolation Tools
22
+ io --- Data input and output
23
+ linalg --- Linear algebra routines
24
+ misc --- Utilities that don't have another home.
25
+ ndimage --- N-D image package
26
+ odr --- Orthogonal Distance Regression
27
+ optimize --- Optimization Tools
28
+ signal --- Signal Processing Tools
29
+ sparse --- Sparse Matrices
30
+ spatial --- Spatial data structures and algorithms
31
+ special --- Special functions
32
+ stats --- Statistical Functions
33
+
34
+ Public API in the main SciPy namespace
35
+ --------------------------------------
36
+ ::
37
+
38
+ __version__ --- SciPy version string
39
+ LowLevelCallable --- Low-level callback function
40
+ show_config --- Show scipy build configuration
41
+ test --- Run scipy unittests
42
+
43
+ """
44
+
45
+ import importlib as _importlib
46
+
47
+ from numpy import __version__ as __numpy_version__
48
+
49
+
50
+ try:
51
+ from scipy.__config__ import show as show_config
52
+ except ImportError as e:
53
+ msg = """Error importing SciPy: you cannot import SciPy while
54
+ being in scipy source directory; please exit the SciPy source
55
+ tree first and relaunch your Python interpreter."""
56
+ raise ImportError(msg) from e
57
+
58
+
59
+ from scipy.version import version as __version__
60
+
61
+
62
+ # Allow distributors to run custom init code
63
+ from . import _distributor_init
64
+ del _distributor_init
65
+
66
+
67
+ from scipy._lib import _pep440
68
+ # In maintenance branch, change to np_maxversion N+3 if numpy is at N
69
+ np_minversion = '1.22.4'
70
+ np_maxversion = '2.3.0'
71
+ if (_pep440.parse(__numpy_version__) < _pep440.Version(np_minversion) or
72
+ _pep440.parse(__numpy_version__) >= _pep440.Version(np_maxversion)):
73
+ import warnings
74
+ warnings.warn(f"A NumPy version >={np_minversion} and <{np_maxversion}"
75
+ f" is required for this version of SciPy (detected "
76
+ f"version {__numpy_version__})",
77
+ UserWarning, stacklevel=2)
78
+ del _pep440
79
+
80
+
81
+ # This is the first import of an extension module within SciPy. If there's
82
+ # a general issue with the install, such that extension modules are missing
83
+ # or cannot be imported, this is where we'll get a failure - so give an
84
+ # informative error message.
85
+ try:
86
+ from scipy._lib._ccallback import LowLevelCallable
87
+ except ImportError as e:
88
+ msg = "The `scipy` install you are using seems to be broken, " + \
89
+ "(extension modules cannot be imported), " + \
90
+ "please try reinstalling."
91
+ raise ImportError(msg) from e
92
+
93
+
94
+ from scipy._lib._testutils import PytestTester
95
+ test = PytestTester(__name__)
96
+ del PytestTester
97
+
98
+
99
+ submodules = [
100
+ 'cluster',
101
+ 'constants',
102
+ 'datasets',
103
+ 'fft',
104
+ 'fftpack',
105
+ 'integrate',
106
+ 'interpolate',
107
+ 'io',
108
+ 'linalg',
109
+ 'misc',
110
+ 'ndimage',
111
+ 'odr',
112
+ 'optimize',
113
+ 'signal',
114
+ 'sparse',
115
+ 'spatial',
116
+ 'special',
117
+ 'stats'
118
+ ]
119
+
120
+ __all__ = submodules + [
121
+ 'LowLevelCallable',
122
+ 'test',
123
+ 'show_config',
124
+ '__version__',
125
+ ]
126
+
127
+
128
+ def __dir__():
129
+ return __all__
130
+
131
+
132
+ def __getattr__(name):
133
+ if name in submodules:
134
+ return _importlib.import_module(f'scipy.{name}')
135
+ else:
136
+ try:
137
+ return globals()[name]
138
+ except KeyError:
139
+ raise AttributeError(
140
+ f"Module 'scipy' has no attribute '{name}'"
141
+ )
llmeval-env/lib/python3.10/site-packages/scipy/_distributor_init.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ Distributor init file
2
+
3
+ Distributors: you can replace the contents of this file with your own custom
4
+ code to support particular distributions of SciPy.
5
+
6
+ For example, this is a good place to put any checks for hardware requirements
7
+ or BLAS/LAPACK library initialization.
8
+
9
+ The SciPy standard source distribution will not put code in this file beyond
10
+ the try-except import of `_distributor_init_local` (which is not part of a
11
+ standard source distribution), so you can safely replace this file with your
12
+ own version.
13
+ """
14
+
15
+ try:
16
+ from . import _distributor_init_local # noqa: F401
17
+ except ImportError:
18
+ pass
llmeval-env/lib/python3.10/site-packages/scipy/cluster/__init__.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ =========================================
3
+ Clustering package (:mod:`scipy.cluster`)
4
+ =========================================
5
+
6
+ .. currentmodule:: scipy.cluster
7
+
8
+ .. toctree::
9
+ :hidden:
10
+
11
+ cluster.vq
12
+ cluster.hierarchy
13
+
14
+ Clustering algorithms are useful in information theory, target detection,
15
+ communications, compression, and other areas. The `vq` module only
16
+ supports vector quantization and the k-means algorithms.
17
+
18
+ The `hierarchy` module provides functions for hierarchical and
19
+ agglomerative clustering. Its features include generating hierarchical
20
+ clusters from distance matrices,
21
+ calculating statistics on clusters, cutting linkages
22
+ to generate flat clusters, and visualizing clusters with dendrograms.
23
+
24
+ """
25
+ __all__ = ['vq', 'hierarchy']
26
+
27
+ from . import vq, hierarchy
28
+
29
+ from scipy._lib._testutils import PytestTester
30
+ test = PytestTester(__name__)
31
+ del PytestTester
llmeval-env/lib/python3.10/site-packages/scipy/cluster/_hierarchy.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (423 kB). View file
 
llmeval-env/lib/python3.10/site-packages/scipy/cluster/_optimal_leaf_ordering.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (356 kB). View file
 
llmeval-env/lib/python3.10/site-packages/scipy/cluster/_vq.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (128 kB). View file
 
llmeval-env/lib/python3.10/site-packages/scipy/cluster/hierarchy.py ADDED
The diff for this file is too large to render. See raw diff
 
llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__init__.py ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (192 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__pycache__/test_disjoint_set.cpython-310.pyc ADDED
Binary file (6.22 kB). View file
 
llmeval-env/lib/python3.10/site-packages/scipy/cluster/tests/__pycache__/test_hierarchy.cpython-310.pyc ADDED
Binary file (41.4 kB). View file