diff --git a/.gitattributes b/.gitattributes index a3b08a51d24df209d88f27e14e30826e411219c3..012f17aecd7eaf5a552523ae2a7ce8cfa2342109 100644 --- a/.gitattributes +++ b/.gitattributes @@ -107,3 +107,4 @@ venv/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-x86_64-linux venv/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolver.so.11 filter=lfs diff=lfs merge=lfs -text venv/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text venv/lib/python3.10/site-packages/torch/lib/libtorch_cpu.so filter=lfs diff=lfs merge=lfs -text +venv/bin/python filter=lfs diff=lfs merge=lfs -text diff --git a/ckpts/universal/global_step20/zero/17.post_attention_layernorm.weight/fp32.pt b/ckpts/universal/global_step20/zero/17.post_attention_layernorm.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..7760e5211e15e3e39f9be25f7ca01f903e186e3a --- /dev/null +++ b/ckpts/universal/global_step20/zero/17.post_attention_layernorm.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:829fdc40d57e711876b85faffda42464ceec27743f94e681bb196cb770b24690 +size 9293 diff --git a/ckpts/universal/global_step20/zero/25.mlp.dense_4h_to_h.weight/exp_avg_sq.pt b/ckpts/universal/global_step20/zero/25.mlp.dense_4h_to_h.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..101ef50196f758eb9c54623d0403e928617bfe66 --- /dev/null +++ b/ckpts/universal/global_step20/zero/25.mlp.dense_4h_to_h.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd254701cf527d2732837bacc2043f57b9c913df4ad4461504560e72fbc038bc +size 33555627 diff --git a/ckpts/universal/global_step20/zero/25.mlp.dense_4h_to_h.weight/fp32.pt b/ckpts/universal/global_step20/zero/25.mlp.dense_4h_to_h.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..6678edfec541a057258ee26bdef60eb2f2dc9151 --- /dev/null +++ b/ckpts/universal/global_step20/zero/25.mlp.dense_4h_to_h.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6affe892b38bce02cad515f1d1fed1f4dd548a931cb4d1d7690c4976296020b0 +size 33555533 diff --git a/ckpts/universal/global_step20/zero/9.post_attention_layernorm.weight/exp_avg.pt b/ckpts/universal/global_step20/zero/9.post_attention_layernorm.weight/exp_avg.pt new file mode 100644 index 0000000000000000000000000000000000000000..02067384d4710bed3a6168cba797bbe93e465b2e --- /dev/null +++ b/ckpts/universal/global_step20/zero/9.post_attention_layernorm.weight/exp_avg.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6fa8eb359f2a6725f93a6ac8b302cf6f8234348e9df053ef44a2683598b41da +size 9372 diff --git a/lm-evaluation-harness/tests/testdata/gpt3_test_8025023377febbd8c5f2b9f26705c394ff375d0cad7c89c10fd9b8e1eb66ff1c.pkl b/lm-evaluation-harness/tests/testdata/gpt3_test_8025023377febbd8c5f2b9f26705c394ff375d0cad7c89c10fd9b8e1eb66ff1c.pkl new file mode 100644 index 0000000000000000000000000000000000000000..2f4c6f9c7102ac9466d1da0d81307088f2fdcdb4 --- /dev/null +++ b/lm-evaluation-harness/tests/testdata/gpt3_test_8025023377febbd8c5f2b9f26705c394ff375d0cad7c89c10fd9b8e1eb66ff1c.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:590805560ee790d530c075ad76633eb2e9749440083e0bab63489ff920fdfd33 +size 70917 diff --git a/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/config.yaml b/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d4385c32832c4d7c9972ea1e6cdb8b92bad73459 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/config.yaml @@ -0,0 +1,86 @@ +wandb_version: 1 + +_wandb: + desc: null + value: + python_version: 3.10.12 + cli_version: 0.17.0 + framework: huggingface + huggingface_version: 4.40.2 + is_jupyter_run: false + is_kaggle_kernel: false + start_time: 1715682602 + t: + 1: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 2: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 3: + - 2 + - 23 + - 62 + 4: 3.10.12 + 5: 0.17.0 + 6: 4.40.2 + 8: + - 5 + 13: linux-x86_64 +task_configs: + desc: null + value: + indiccopa-hi: + task: indiccopa-hi + group: ai4bharat/IndicCOPA + dataset_path: ai4bharat/IndicCOPA + dataset_name: translation-hi + test_split: test + doc_to_text: 'functools.partial(, connector={''cause'': + ''कारण'', ''effect'': ''परिणाम''})' + doc_to_target: label + doc_to_choice: "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"\ + ]), convert_choice(doc[\"choice2\"])]\n" + description: '' + target_delimiter: ' ' + fewshot_delimiter: ' + + + ' + num_fewshot: 0 + metric_list: + - metric: acc + output_type: multiple_choice + repeats: 1 + should_decontaminate: false + metadata: + version: 1.0 +cli_configs: + desc: null + value: + model: hf + model_args: pretrained=/data/cronscript/ckpts//hf_ckpt//global_step100 + batch_size: auto + batch_sizes: + - 64 + device: null + use_cache: null + limit: null + bootstrap_iters: 100000 + gen_kwargs: null diff --git a/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/media/table/evaluation/eval_results_1_c78c57917f215d296d9d.table.json b/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/media/table/evaluation/eval_results_1_c78c57917f215d296d9d.table.json new file mode 100644 index 0000000000000000000000000000000000000000..fc9a92c5b80c0631baf44f28a9d55b68d269f5da --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/media/table/evaluation/eval_results_1_c78c57917f215d296d9d.table.json @@ -0,0 +1 @@ +{"columns": ["Tasks", "Version", "Filter", "num_fewshot", "Metric", "Value", "Stderr"], "data": [["indiccopa-hi", 1.0, "none", 0, "acc", "0.534521158129176", "0.0236"]]} \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/output.log b/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/output.log new file mode 100644 index 0000000000000000000000000000000000000000..4777bc20d591eaf2623ed863b4d2e0f7ba4f330e --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_103002-b0385ab4/files/output.log @@ -0,0 +1,33 @@ + +2024-05-14:10:30:02,606 INFO [__main__.py:251] Verbosity set to INFO +2024-05-14:10:30:07,108 INFO [__main__.py:335] Selected Tasks: ['indiccopa-hi'] +2024-05-14:10:30:07,110 INFO [evaluator.py:131] Setting random seed to 0 | Setting numpy seed to 1234 | Setting torch manual seed to 1234 +2024-05-14:10:30:07,111 INFO [evaluator.py:177] Initializing hf model, with arguments: {'pretrained': '/data/cronscript/ckpts//hf_ckpt//global_step100'} +/usr/local/lib/python3.10/dist-packages/habana_frameworks/torch/gpu_migration/torch/cuda/memory.py:36: UserWarning: No need to call empty_cache on HPU. It manages the memory internally in an effcient way. + warnings.warn( +/usr/local/lib/python3.10/dist-packages/habana_frameworks/torch/hpu/__init__.py:158: UserWarning: torch.hpu.setDeterministic is deprecated and will be removed in next release. Please use torch.use_deterministic_algorithms instead. + warnings.warn( +You are using the default legacy behaviour of the . This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565 +2024-05-14:10:30:13,541 INFO [huggingface.py:334] Using 8 devices with data parallelism +2024-05-14:10:30:13,543 WARNING [task.py:763] [Task: indiccopa-hi] metric acc is defined, but aggregation is not. using default aggregation=mean +2024-05-14:10:30:13,543 WARNING [task.py:775] [Task: indiccopa-hi] metric acc is defined, but higher_is_better is not. using default higher_is_better=True +[2024-05-14 10:30:13,162] [INFO] [real_accelerator.py:178:get_accelerator] Setting ds_accelerator to hpu (auto detect) +/usr/local/lib/python3.10/dist-packages/datasets/load.py:1486: FutureWarning: The repository for ai4bharat/IndicCOPA contains custom code which must be executed to correctly load the dataset. You can inspect the repository content at https://hf.co/datasets/ai4bharat/IndicCOPA +You can avoid this message in future by passing the argument `trust_remote_code=True`. +Passing `trust_remote_code=True` will be mandatory to load this dataset from the next major release of `datasets`. + warnings.warn( +2024-05-14:10:30:15,028 WARNING [task.py:322] [Task: indiccopa-hi] has_training_docs and has_validation_docs are False, using test_docs as fewshot_docs but this is not recommended. +2024-05-14:10:30:15,028 WARNING [task.py:322] [Task: indiccopa-hi] has_training_docs and has_validation_docs are False, using test_docs as fewshot_docs but this is not recommended. +2024-05-14:10:30:15,047 INFO [task.py:395] Building contexts for indiccopa-hi on rank 0... +100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 57/57 [00:00<00:00, 100875.67it/s] +Passed argument batch_size = auto:1. Detecting largest batch size +2024-05-14:10:30:16,729 INFO [evaluator.py:379] Running loglikelihood requests +Running loglikelihood requests: 0%| | 0/114 [00:00, connector={'cause': 'कारण', 'effect': 'परिणाम'})", 'doc_to_target': 'label', 'doc_to_choice': 'def doc_to_choice(doc):\n return [convert_choice(doc["choice1"]), convert_choice(doc["choice2"])]\n', 'description': '', 'target_delimiter': ' ', 'fewshot_delimiter': '\n\n', 'num_fewshot': 0, 'metric_list': [{'metric': 'acc'}], 'output_type': 'multiple_choice', 'repeats': 1, 'should_decontaminate': False, 'metadata': {'version': 1.0}}}, 'cli_configs': {'model': 'hf', 'model_args': 'pretrained=/data/cronscript/ckpts//hf_ckpt//global_step100', 'batch_size': 'auto', 'batch_sizes': [64], 'device': None, 'use_cache': None, 'limit': None, 'bootstrap_iters': 100000, 'gen_kwargs': None}} +2024-05-14 10:30:25,199 INFO MainThread:480 [wandb_run.py:_finish():2103] finishing run smlgenai/bharatgpt/b0385ab4 +2024-05-14 10:30:25,199 INFO MainThread:480 [wandb_run.py:_atexit_cleanup():2343] got exitcode: 0 +2024-05-14 10:30:25,200 INFO MainThread:480 [wandb_run.py:_restore():2326] restore +2024-05-14 10:30:25,200 INFO MainThread:480 [wandb_run.py:_restore():2332] restore done +2024-05-14 10:30:31,304 INFO MainThread:480 [wandb_run.py:_footer_history_summary_info():3994] rendering history +2024-05-14 10:30:31,304 INFO MainThread:480 [wandb_run.py:_footer_history_summary_info():4026] rendering summary +2024-05-14 10:30:31,309 INFO MainThread:480 [wandb_run.py:_footer_sync_info():3953] logging synced files diff --git a/lm-evaluation-harness/wandb/run-20240514_114140-3myl7vfa/run-3myl7vfa.wandb b/lm-evaluation-harness/wandb/run-20240514_114140-3myl7vfa/run-3myl7vfa.wandb new file mode 100644 index 0000000000000000000000000000000000000000..a6d05c80279df51e42bbfd63aa00dcb6668ebddc Binary files /dev/null and b/lm-evaluation-harness/wandb/run-20240514_114140-3myl7vfa/run-3myl7vfa.wandb differ diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/config.yaml b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9400ed6fb4b71cbea320fefea3535c49fbdff733 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/config.yaml @@ -0,0 +1,43 @@ +wandb_version: 1 + +_wandb: + desc: null + value: + python_version: 3.10.12 + cli_version: 0.17.0 + framework: huggingface + huggingface_version: 4.40.2 + is_jupyter_run: false + is_kaggle_kernel: false + start_time: 1715704566 + t: + 1: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 2: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 3: + - 23 + 4: 3.10.12 + 5: 0.17.0 + 6: 4.40.2 + 8: + - 5 + 13: linux-x86_64 diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log new file mode 100644 index 0000000000000000000000000000000000000000..4bbfeb38e0c8a8a1a057065ab9b209839491ff75 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log @@ -0,0 +1,33 @@ + +2024-05-14:16:36:07,480 INFO [__main__.py:251] Verbosity set to INFO +2024-05-14:16:36:12,176 INFO [__main__.py:335] Selected Tasks: ['indiccopa-hi'] +2024-05-14:16:36:12,178 INFO [evaluator.py:131] Setting random seed to 0 | Setting numpy seed to 1234 | Setting torch manual seed to 1234 +2024-05-14:16:36:12,178 INFO [evaluator.py:177] Initializing hf model, with arguments: {'pretrained': '/data/cronscript/ckpts//hf_ckpt//global_step120'} +Traceback (most recent call last): + File "/usr/lib/python3.10/runpy.py", line 196, in _run_module_as_main + return _run_code(code, main_globals, None, + File "/usr/lib/python3.10/runpy.py", line 86, in _run_code + exec(code, run_globals) + File "/data/cronscript/lm-evaluation-harness/lm_eval/__main__.py", line 417, in + cli_evaluate() + File "/data/cronscript/lm-evaluation-harness/lm_eval/__main__.py", line 341, in cli_evaluate + results = evaluator.simple_evaluate( + File "/data/cronscript/lm-evaluation-harness/lm_eval/utils.py", line 288, in _wrapper + return fn(*args, **kwargs) + File "/data/cronscript/lm-evaluation-harness/lm_eval/evaluator.py", line 180, in simple_evaluate + lm = lm_eval.api.registry.get_model(model).create_from_arg_string( + File "/data/cronscript/lm-evaluation-harness/lm_eval/api/model.py", line 134, in create_from_arg_string + return cls(**args, **args2) + File "/data/cronscript/lm-evaluation-harness/lm_eval/models/huggingface.py", line 190, in __init__ + self._get_config( + File "/data/cronscript/lm-evaluation-harness/lm_eval/models/huggingface.py", line 471, in _get_config + self._config = transformers.AutoConfig.from_pretrained( + File "/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py", line 928, in from_pretrained + config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) + File "/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py", line 631, in get_config_dict + config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) + File "/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py", line 686, in _get_config_dict + resolved_config_file = cached_file( + File "/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py", line 369, in cached_file + raise EnvironmentError( +OSError: /data/cronscript/ckpts//hf_ckpt//global_step120 does not appear to have a file named config.json. Checkout 'https://huggingface.co//data/cronscript/ckpts//hf_ckpt//global_step120/tree/main' for available files. \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/requirements.txt b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..d63edd781bd5bdbb7f67523ac1ba9f0f1ed392dc --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/requirements.txt @@ -0,0 +1,163 @@ +DataProperty==1.0.1 +GitPython==3.1.43 +Jinja2==3.1.3 +Markdown==3.6 +MarkupSafe==2.1.5 +Pillow-SIMD==7.0.0.post3 +PyYAML==6.0 +Werkzeug==3.0.2 +absl-py==2.1.0 +accelerate==0.30.1 +aiohttp==3.9.4 +aiosignal==1.3.1 +antlr4-python3-runtime==4.9.3 +anyio==4.3.0 +async-timeout==4.0.3 +attrs==23.2.0 +av==9.2.0 +cachetools==5.3.3 +certifi==2024.2.2 +cffi==1.15.1 +cfgv==3.4.0 +chardet==5.2.0 +charset-normalizer==3.3.2 +click==8.1.7 +cmake==3.29.2 +colorama==0.4.6 +datasets==2.19.1 +deepspeed==0.12.4+hpu.synapse.v1.15.1 +dill==0.3.8 +distlib==0.3.8 +distro==1.9.0 +docker-pycreds==0.4.0 +einops==0.8.0 +evaluate==0.4.2 +exceptiongroup==1.2.0 +expecttest==0.2.1 +filelock==3.13.4 +frozenlist==1.4.1 +fsspec==2024.3.1 +gitdb==4.0.11 +google-auth-oauthlib==0.4.6 +google-auth==2.29.0 +grpcio==1.62.1 +h11==0.14.0 +habana-media-loader==1.15.1.15 +habana-pyhlml==1.15.1.15 +habana-torch-dataloader==1.15.1.15 +habana-torch-plugin==1.15.1.15 +habana_gpu_migration==1.15.1.15 +habana_quantization_toolkit==1.15.1.15 +hjson==3.1.0 +httpcore==1.0.5 +httpx==0.27.0 +huggingface-hub==0.23.0 +identify==2.5.35 +idna==3.7 +importlib_resources==6.4.0 +iniconfig==2.0.0 +joblib==1.4.2 +jsonlines==4.0.0 +lightning-habana==1.4.0 +lightning-utilities==0.11.2 +lightning==2.2.0.post0 +lm_eval==0.3.0 +lm_eval==0.4.2 +lm_eval==0.4.2 +lm_eval==0.4.2 +mbstrdecoder==1.1.3 +more-itertools==10.2.0 +mpi4py==3.1.4 +mpmath==1.3.0 +multidict==6.0.5 +multiprocess==0.70.16 +networkx==3.3 +ninja==1.11.1.1 +nltk==3.8.1 +nodeenv==1.8.0 +numexpr==2.10.0 +numpy==1.23.5 +oauthlib==3.2.2 +omegaconf==2.3.0 +openai==1.29.0 +packaging==24.0 +pandas==2.0.1 +pathspec==0.12.1 +pathvalidate==3.2.0 +peft==0.10.0 +perfetto==0.7.0 +pip==22.0.2 +pip==23.3.1 +platformdirs==4.2.0 +pluggy==1.4.0 +portalocker==2.8.2 +pre-commit==3.3.3 +protobuf==3.20.3 +psutil==5.9.8 +py-cpuinfo==9.0.0 +pyarrow-hotfix==0.6 +pyarrow==16.0.0 +pyasn1==0.6.0 +pyasn1_modules==0.4.0 +pybind11==2.10.4 +pycountry==23.12.11 +pycparser==2.22 +pydantic==1.10.13 +pynvml==8.0.4 +pytablewriter==1.2.0 +pytest==8.1.1 +python-dateutil==2.9.0.post0 +pytorch-lightning==2.2.2 +pytz==2024.1 +regex==2023.5.5 +requests-oauthlib==2.0.0 +requests==2.31.0 +rouge_score==0.1.2 +rsa==4.9 +sacrebleu==1.5.0 +safetensors==0.4.3 +scikit-learn==1.4.2 +scipy==1.13.0 +sentencepiece==0.2.0 +sentry-sdk==2.1.1 +setproctitle==1.3.3 +setuptools==59.6.0 +setuptools==69.5.1 +six==1.16.0 +smmap==5.0.1 +sniffio==1.3.1 +sqlitedict==2.1.0 +symengine==0.11.0 +sympy==1.12 +tabledata==1.3.3 +tcolorpy==0.1.6 +tdqm==0.0.1 +tensorboard-data-server==0.6.1 +tensorboard-plugin-wit==1.8.1 +tensorboard==2.11.2 +threadpoolctl==3.5.0 +tokenizers==0.19.1 +tomli==2.0.1 +torch==2.2.0a0+git8964477 +torch_tb_profiler==0.4.0 +torchaudio==2.2.0+08901ad +torchdata==0.7.1+5e6f7b7 +torchmetrics==1.3.2 +torchtext==0.17.0+400da5c +torchvision==0.17.0+b2383d4 +tqdm-multiprocess==0.0.11 +tqdm==4.66.2 +transformers==4.40.2 +typepy==1.3.2 +typing_extensions==4.11.0 +tzdata==2024.1 +urllib3==1.26.18 +virtualenv==20.25.1 +wandb==0.17.0 +wheel==0.37.1 +wheel==0.43.0 +word2number==1.1 +xxhash==3.4.1 +yamllint==1.35.1 +yarl==1.9.4 +zstandard==0.22.0 \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-metadata.json b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..38f457761de7e3a2c2801b8599c90f1e43d59c45 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-metadata.json @@ -0,0 +1,810 @@ +{ + "os": "Linux-5.15.0-92-generic-x86_64-with-glibc2.35", + "python": "3.10.12", + "heartbeatAt": "2024-05-14T16:36:07.334023", + "startedAt": "2024-05-14T16:36:06.905531", + "docker": null, + "cuda": null, + "args": [ + "--model", + "hf", + "--model_args", + "pretrained=/data/cronscript/ckpts//hf_ckpt//global_step120", + "--tasks", + "indiccopa-hi", + "--batch_size", + "auto", + "--wandb_args", + "project=bharatgpt,group=trial_expt" + ], + "state": "running", + "program": "-m lm_eval.__main__", + "codePathLocal": null, + "git": { + "remote": "https://github.com/EleutherAI/lm-evaluation-harness", + "commit": null + }, + "email": null, + "root": "/data/cronscript/lm-evaluation-harness", + "host": "vizzhy-150-3", + "username": "root", + "executable": "/usr/bin/python3", + "cpu_count": 76, + "cpu_count_logical": 152, + "cpu_freq": { + "current": 3389.3346710526316, + "min": 800.0, + "max": 3400.0 + }, + "cpu_freq_per_core": [ + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3299.935, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3299.996, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3299.996, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3365.29, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + } + ], + "disk": { + "/": { + "total": 866.4415092468262, + "used": 863.4254722595215 + } + }, + "memory": { + "total": 1007.5000267028809 + } +} diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-summary.json b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-summary.json new file mode 100644 index 0000000000000000000000000000000000000000..e682bae6b5eaeba8295fd0fffdc51474a259249e --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-summary.json @@ -0,0 +1 @@ +{"_wandb": {"runtime": 5}} \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug-internal.log b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug-internal.log new file mode 100644 index 0000000000000000000000000000000000000000..26982ab237697ff3df2a1ffac770eb378f108f72 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug-internal.log @@ -0,0 +1,182 @@ +2024-05-14 16:36:06,918 INFO StreamThr :125134 [internal.py:wandb_internal():85] W&B internal server running at pid: 125134, started at: 2024-05-14 16:36:06.917912 +2024-05-14 16:36:06,920 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: status +2024-05-14 16:36:06,921 INFO WriterThread:125134 [datastore.py:open_for_write():87] open: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/run-6ikrp0kk.wandb +2024-05-14 16:36:06,922 DEBUG SenderThread:125134 [sender.py:send():378] send: header +2024-05-14 16:36:06,933 DEBUG SenderThread:125134 [sender.py:send():378] send: run +2024-05-14 16:36:07,194 INFO SenderThread:125134 [dir_watcher.py:__init__():211] watching files in: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files +2024-05-14 16:36:07,194 INFO SenderThread:125134 [sender.py:_start_run_threads():1123] run started: 6ikrp0kk with start time 1715704566.917641 +2024-05-14 16:36:07,202 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: check_version +2024-05-14 16:36:07,202 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: check_version +2024-05-14 16:36:07,286 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: run_start +2024-05-14 16:36:07,287 DEBUG HandlerThread:125134 [system_info.py:__init__():26] System info init +2024-05-14 16:36:07,288 DEBUG HandlerThread:125134 [system_info.py:__init__():41] System info init done +2024-05-14 16:36:07,288 INFO HandlerThread:125134 [system_monitor.py:start():194] Starting system monitor +2024-05-14 16:36:07,288 INFO SystemMonitor:125134 [system_monitor.py:_start():158] Starting system asset monitoring threads +2024-05-14 16:36:07,288 INFO HandlerThread:125134 [system_monitor.py:probe():214] Collecting system info +2024-05-14 16:36:07,288 INFO SystemMonitor:125134 [interfaces.py:start():188] Started cpu monitoring +2024-05-14 16:36:07,288 INFO SystemMonitor:125134 [interfaces.py:start():188] Started disk monitoring +2024-05-14 16:36:07,289 INFO SystemMonitor:125134 [interfaces.py:start():188] Started memory monitoring +2024-05-14 16:36:07,289 INFO SystemMonitor:125134 [interfaces.py:start():188] Started network monitoring +2024-05-14 16:36:07,333 DEBUG HandlerThread:125134 [system_info.py:probe():150] Probing system +2024-05-14 16:36:07,354 DEBUG HandlerThread:125134 [system_info.py:_probe_git():135] Probing git +2024-05-14 16:36:07,374 ERROR HandlerThread:125134 [gitlib.py:root():92] git root error: Cmd('git') failed due to: exit code(128) + cmdline: git rev-parse --show-toplevel + stderr: 'fatal: detected dubious ownership in repository at '/data/cronscript/lm-evaluation-harness' +To add an exception for this directory, call: + + git config --global --add safe.directory /data/cronscript/lm-evaluation-harness' +2024-05-14 16:36:07,374 DEBUG HandlerThread:125134 [system_info.py:_probe_git():143] Probing git done +2024-05-14 16:36:07,374 DEBUG HandlerThread:125134 [system_info.py:probe():198] Probing system done +2024-05-14 16:36:07,374 DEBUG HandlerThread:125134 [system_monitor.py:probe():223] {'os': 'Linux-5.15.0-92-generic-x86_64-with-glibc2.35', 'python': '3.10.12', 'heartbeatAt': '2024-05-14T16:36:07.334023', 'startedAt': '2024-05-14T16:36:06.905531', 'docker': None, 'cuda': None, 'args': ('--model', 'hf', '--model_args', 'pretrained=/data/cronscript/ckpts//hf_ckpt//global_step120', '--tasks', 'indiccopa-hi', '--batch_size', 'auto', '--wandb_args', 'project=bharatgpt,group=trial_expt'), 'state': 'running', 'program': '-m lm_eval.__main__', 'codePathLocal': None, 'git': {'remote': 'https://github.com/EleutherAI/lm-evaluation-harness', 'commit': None}, 'email': None, 'root': '/data/cronscript/lm-evaluation-harness', 'host': 'vizzhy-150-3', 'username': 'root', 'executable': '/usr/bin/python3', 'cpu_count': 76, 'cpu_count_logical': 152, 'cpu_freq': {'current': 3389.3346710526316, 'min': 800.0, 'max': 3400.0}, 'cpu_freq_per_core': [{'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3299.935, 'min': 800.0, 'max': 3400.0}, {'current': 3299.996, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3299.996, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3365.29, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}], 'disk': {'/': {'total': 866.4415092468262, 'used': 863.4254722595215}}, 'memory': {'total': 1007.5000267028809}} +2024-05-14 16:36:07,375 INFO HandlerThread:125134 [system_monitor.py:probe():224] Finished collecting system info +2024-05-14 16:36:07,375 INFO HandlerThread:125134 [system_monitor.py:probe():227] Publishing system info +2024-05-14 16:36:07,376 INFO HandlerThread:125134 [system_monitor.py:probe():229] Finished publishing system info +2024-05-14 16:36:07,380 DEBUG SenderThread:125134 [sender.py:send():378] send: files +2024-05-14 16:36:07,380 INFO SenderThread:125134 [sender.py:_save_file():1389] saving file wandb-metadata.json with policy now +2024-05-14 16:36:07,476 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: python_packages +2024-05-14 16:36:07,477 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: python_packages +2024-05-14 16:36:07,477 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: stop_status +2024-05-14 16:36:07,478 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: stop_status +2024-05-14 16:36:07,683 DEBUG SenderThread:125134 [sender.py:send():378] send: telemetry +2024-05-14 16:36:07,926 INFO wandb-upload_0:125134 [upload_job.py:push():130] Uploaded file /tmp/tmpkiqc7vm7wandb/pbabnvzr-wandb-metadata.json +2024-05-14 16:36:08,196 INFO Thread-12 :125134 [dir_watcher.py:_on_file_created():271] file/dir created: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-metadata.json +2024-05-14 16:36:08,196 INFO Thread-12 :125134 [dir_watcher.py:_on_file_created():271] file/dir created: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log +2024-05-14 16:36:08,196 INFO Thread-12 :125134 [dir_watcher.py:_on_file_created():271] file/dir created: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/requirements.txt +2024-05-14 16:36:10,196 INFO Thread-12 :125134 [dir_watcher.py:_on_file_modified():288] file/dir modified: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log +2024-05-14 16:36:12,178 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: status_report +2024-05-14 16:36:13,264 DEBUG SenderThread:125134 [sender.py:send():378] send: exit +2024-05-14 16:36:13,264 INFO SenderThread:125134 [sender.py:send_exit():585] handling exit code: 1 +2024-05-14 16:36:13,264 INFO SenderThread:125134 [sender.py:send_exit():587] handling runtime: 5 +2024-05-14 16:36:13,265 INFO SenderThread:125134 [sender.py:_save_file():1389] saving file wandb-summary.json with policy end +2024-05-14 16:36:13,265 INFO SenderThread:125134 [sender.py:send_exit():593] send defer +2024-05-14 16:36:13,265 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,265 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 0 +2024-05-14 16:36:13,266 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,266 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 0 +2024-05-14 16:36:13,266 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 1 +2024-05-14 16:36:13,266 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,266 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 1 +2024-05-14 16:36:13,266 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,266 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 1 +2024-05-14 16:36:13,266 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 2 +2024-05-14 16:36:13,266 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,266 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 2 +2024-05-14 16:36:13,266 INFO HandlerThread:125134 [system_monitor.py:finish():203] Stopping system monitor +2024-05-14 16:36:13,266 DEBUG SystemMonitor:125134 [system_monitor.py:_start():172] Starting system metrics aggregation loop +2024-05-14 16:36:13,266 DEBUG SystemMonitor:125134 [system_monitor.py:_start():179] Finished system metrics aggregation loop +2024-05-14 16:36:13,266 DEBUG SystemMonitor:125134 [system_monitor.py:_start():183] Publishing last batch of metrics +2024-05-14 16:36:13,267 INFO HandlerThread:125134 [interfaces.py:finish():200] Joined cpu monitor +2024-05-14 16:36:13,267 INFO HandlerThread:125134 [interfaces.py:finish():200] Joined disk monitor +2024-05-14 16:36:13,267 INFO HandlerThread:125134 [interfaces.py:finish():200] Joined memory monitor +2024-05-14 16:36:13,268 INFO HandlerThread:125134 [interfaces.py:finish():200] Joined network monitor +2024-05-14 16:36:13,268 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,268 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 2 +2024-05-14 16:36:13,268 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 3 +2024-05-14 16:36:13,268 DEBUG SenderThread:125134 [sender.py:send():378] send: stats +2024-05-14 16:36:13,268 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,268 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 3 +2024-05-14 16:36:13,268 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,268 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 3 +2024-05-14 16:36:13,268 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 4 +2024-05-14 16:36:13,268 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,268 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 4 +2024-05-14 16:36:13,268 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,268 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 4 +2024-05-14 16:36:13,268 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 5 +2024-05-14 16:36:13,268 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,268 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 5 +2024-05-14 16:36:13,269 DEBUG SenderThread:125134 [sender.py:send():378] send: summary +2024-05-14 16:36:13,269 INFO SenderThread:125134 [sender.py:_save_file():1389] saving file wandb-summary.json with policy end +2024-05-14 16:36:13,269 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,269 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 5 +2024-05-14 16:36:13,269 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 6 +2024-05-14 16:36:13,269 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,269 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 6 +2024-05-14 16:36:13,270 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,270 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 6 +2024-05-14 16:36:13,272 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: status_report +2024-05-14 16:36:13,345 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 7 +2024-05-14 16:36:13,345 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:13,345 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 7 +2024-05-14 16:36:13,346 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:13,346 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 7 +2024-05-14 16:36:14,199 INFO Thread-12 :125134 [dir_watcher.py:_on_file_modified():288] file/dir modified: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/config.yaml +2024-05-14 16:36:14,199 INFO Thread-12 :125134 [dir_watcher.py:_on_file_modified():288] file/dir modified: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log +2024-05-14 16:36:14,199 INFO Thread-12 :125134 [dir_watcher.py:_on_file_created():271] file/dir created: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-summary.json +2024-05-14 16:36:14,264 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-14 16:36:15,698 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 8 +2024-05-14 16:36:15,698 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: poll_exit +2024-05-14 16:36:15,698 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:15,698 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 8 +2024-05-14 16:36:15,698 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:15,698 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 8 +2024-05-14 16:36:15,698 INFO SenderThread:125134 [job_builder.py:build():432] Attempting to build job artifact +2024-05-14 16:36:15,699 INFO SenderThread:125134 [job_builder.py:_get_source_type():576] no source found +2024-05-14 16:36:15,699 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 9 +2024-05-14 16:36:15,699 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:15,699 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 9 +2024-05-14 16:36:15,699 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:15,699 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 9 +2024-05-14 16:36:15,699 INFO SenderThread:125134 [dir_watcher.py:finish():358] shutting down directory watcher +2024-05-14 16:36:16,200 INFO SenderThread:125134 [dir_watcher.py:_on_file_modified():288] file/dir modified: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log +2024-05-14 16:36:16,201 INFO SenderThread:125134 [dir_watcher.py:finish():388] scan: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files +2024-05-14 16:36:16,201 INFO SenderThread:125134 [dir_watcher.py:finish():402] scan save: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/requirements.txt requirements.txt +2024-05-14 16:36:16,201 INFO SenderThread:125134 [dir_watcher.py:finish():402] scan save: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-metadata.json wandb-metadata.json +2024-05-14 16:36:16,201 INFO SenderThread:125134 [dir_watcher.py:finish():402] scan save: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/config.yaml config.yaml +2024-05-14 16:36:16,201 INFO SenderThread:125134 [dir_watcher.py:finish():402] scan save: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log output.log +2024-05-14 16:36:16,201 INFO SenderThread:125134 [dir_watcher.py:finish():402] scan save: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-summary.json wandb-summary.json +2024-05-14 16:36:16,202 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 10 +2024-05-14 16:36:16,202 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:16,204 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 10 +2024-05-14 16:36:16,207 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:16,208 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 10 +2024-05-14 16:36:16,208 INFO SenderThread:125134 [file_pusher.py:finish():169] shutting down file pusher +2024-05-14 16:36:16,264 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-14 16:36:16,265 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: poll_exit +2024-05-14 16:36:16,452 INFO wandb-upload_1:125134 [upload_job.py:push():130] Uploaded file /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/requirements.txt +2024-05-14 16:36:16,600 INFO wandb-upload_0:125134 [upload_job.py:push():130] Uploaded file /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/config.yaml +2024-05-14 16:36:16,698 INFO wandb-upload_3:125134 [upload_job.py:push():130] Uploaded file /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/wandb-summary.json +2024-05-14 16:36:16,704 INFO wandb-upload_2:125134 [upload_job.py:push():130] Uploaded file /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/files/output.log +2024-05-14 16:36:16,905 INFO Thread-11 (_thread_body):125134 [sender.py:transition_state():613] send defer: 11 +2024-05-14 16:36:16,905 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:16,905 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 11 +2024-05-14 16:36:16,906 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:16,906 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 11 +2024-05-14 16:36:16,906 INFO SenderThread:125134 [file_pusher.py:join():175] waiting for file pusher +2024-05-14 16:36:16,906 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 12 +2024-05-14 16:36:16,906 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:16,906 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 12 +2024-05-14 16:36:16,906 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:16,906 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 12 +2024-05-14 16:36:16,906 INFO SenderThread:125134 [file_stream.py:finish():601] file stream finish called +2024-05-14 16:36:17,113 INFO SenderThread:125134 [file_stream.py:finish():605] file stream finish is done +2024-05-14 16:36:17,113 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 13 +2024-05-14 16:36:17,113 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:17,113 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 13 +2024-05-14 16:36:17,113 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:17,113 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 13 +2024-05-14 16:36:17,113 INFO SenderThread:125134 [sender.py:transition_state():613] send defer: 14 +2024-05-14 16:36:17,114 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: defer +2024-05-14 16:36:17,114 DEBUG SenderThread:125134 [sender.py:send():378] send: final +2024-05-14 16:36:17,114 INFO HandlerThread:125134 [handler.py:handle_request_defer():184] handle defer: 14 +2024-05-14 16:36:17,114 DEBUG SenderThread:125134 [sender.py:send():378] send: footer +2024-05-14 16:36:17,114 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: defer +2024-05-14 16:36:17,114 INFO SenderThread:125134 [sender.py:send_request_defer():609] handle sender defer: 14 +2024-05-14 16:36:17,114 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-14 16:36:17,115 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: poll_exit +2024-05-14 16:36:17,115 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-14 16:36:17,115 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: poll_exit +2024-05-14 16:36:17,115 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: server_info +2024-05-14 16:36:17,115 DEBUG SenderThread:125134 [sender.py:send_request():405] send_request: server_info +2024-05-14 16:36:17,116 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: get_summary +2024-05-14 16:36:17,116 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: sampled_history +2024-05-14 16:36:17,117 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: internal_messages +2024-05-14 16:36:17,169 INFO MainThread:125134 [wandb_run.py:_footer_history_summary_info():3994] rendering history +2024-05-14 16:36:17,169 INFO MainThread:125134 [wandb_run.py:_footer_history_summary_info():4026] rendering summary +2024-05-14 16:36:17,169 INFO MainThread:125134 [wandb_run.py:_footer_sync_info():3953] logging synced files +2024-05-14 16:36:17,170 DEBUG HandlerThread:125134 [handler.py:handle_request():158] handle_request: shutdown +2024-05-14 16:36:17,170 INFO HandlerThread:125134 [handler.py:finish():882] shutting down handler +2024-05-14 16:36:18,115 INFO WriterThread:125134 [datastore.py:close():296] close: /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/run-6ikrp0kk.wandb +2024-05-14 16:36:18,169 INFO SenderThread:125134 [sender.py:finish():1545] shutting down sender +2024-05-14 16:36:18,169 INFO SenderThread:125134 [file_pusher.py:finish():169] shutting down file pusher +2024-05-14 16:36:18,169 INFO SenderThread:125134 [file_pusher.py:join():175] waiting for file pusher diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug.log b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug.log new file mode 100644 index 0000000000000000000000000000000000000000..4d1f6e8e4399683f651b5fb02fbf562346e76290 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug.log @@ -0,0 +1,29 @@ +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Current SDK version is 0.17.0 +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Configure stats pid to 123910 +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Loading settings from /data/cronscript/lm-evaluation-harness/wandb/settings +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Loading settings from environment variables: {} +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False} +2024-05-14 16:36:06,914 WARNING MainThread:123910 [wandb_setup.py:_flush():76] Could not find program at -m lm_eval.__main__ +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': None, 'program': '-m lm_eval.__main__'} +2024-05-14 16:36:06,914 INFO MainThread:123910 [wandb_setup.py:_flush():76] Applying login settings: {} +2024-05-14 16:36:06,915 INFO MainThread:123910 [wandb_init.py:_log_setup():520] Logging user logs to /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug.log +2024-05-14 16:36:06,915 INFO MainThread:123910 [wandb_init.py:_log_setup():521] Logging internal logs to /data/cronscript/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/logs/debug-internal.log +2024-05-14 16:36:06,915 INFO MainThread:123910 [wandb_init.py:init():560] calling init triggers +2024-05-14 16:36:06,915 INFO MainThread:123910 [wandb_init.py:init():567] wandb.init called with sweep_config: {} +config: {} +2024-05-14 16:36:06,915 INFO MainThread:123910 [wandb_init.py:init():610] starting backend +2024-05-14 16:36:06,915 INFO MainThread:123910 [wandb_init.py:init():614] setting up manager +2024-05-14 16:36:06,916 INFO MainThread:123910 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn +2024-05-14 16:36:06,917 INFO MainThread:123910 [wandb_init.py:init():622] backend started and connected +2024-05-14 16:36:06,921 INFO MainThread:123910 [wandb_init.py:init():711] updated telemetry +2024-05-14 16:36:06,932 INFO MainThread:123910 [wandb_init.py:init():744] communicating run to backend with 90.0 second timeout +2024-05-14 16:36:07,201 INFO MainThread:123910 [wandb_run.py:_on_init():2396] communicating current version +2024-05-14 16:36:07,279 INFO MainThread:123910 [wandb_run.py:_on_init():2405] got version response +2024-05-14 16:36:07,279 INFO MainThread:123910 [wandb_init.py:init():795] starting run threads in backend +2024-05-14 16:36:07,477 INFO MainThread:123910 [wandb_run.py:_console_start():2374] atexit reg +2024-05-14 16:36:07,477 INFO MainThread:123910 [wandb_run.py:_redirect():2229] redirect: wrap_raw +2024-05-14 16:36:07,477 INFO MainThread:123910 [wandb_run.py:_redirect():2294] Wrapping output streams. +2024-05-14 16:36:07,477 INFO MainThread:123910 [wandb_run.py:_redirect():2319] Redirects installed. +2024-05-14 16:36:07,478 INFO MainThread:123910 [wandb_init.py:init():838] run started, returning control to user process +2024-05-14 16:36:18,170 WARNING MsgRouterThr:123910 [router.py:message_loop():77] message_loop has been closed diff --git a/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/run-6ikrp0kk.wandb b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/run-6ikrp0kk.wandb new file mode 100644 index 0000000000000000000000000000000000000000..3705cdef9e771b8301b20405d870bd8b23e245e9 Binary files /dev/null and b/lm-evaluation-harness/wandb/run-20240514_163606-6ikrp0kk/run-6ikrp0kk.wandb differ diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/config.yaml b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bb3ee579ae653f0899dd0655f3b015f0a344ba1e --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/config.yaml @@ -0,0 +1,43 @@ +wandb_version: 1 + +_wandb: + desc: null + value: + python_version: 3.10.12 + cli_version: 0.17.0 + framework: huggingface + huggingface_version: 4.41.1 + is_jupyter_run: false + is_kaggle_kernel: false + start_time: 1716451216 + t: + 1: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 2: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 3: + - 23 + 4: 3.10.12 + 5: 0.17.0 + 6: 4.41.1 + 8: + - 5 + 13: linux-x86_64 diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log new file mode 100644 index 0000000000000000000000000000000000000000..c335c2bc3264a4725c0748828a850a24e9476fae --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log @@ -0,0 +1,34 @@ + +2024-05-23:08:00:17,000 INFO [__main__.py:251] Verbosity set to INFO +2024-05-23:08:00:25,444 INFO [__main__.py:335] Selected Tasks: ['arc_easy', 'hellaswag', 'mrpc', 'openbookqa', 'sst2', 'winogrande'] +2024-05-23:08:00:25,444 INFO [evaluator.py:131] Setting random seed to 0 | Setting numpy seed to 1234 | Setting torch manual seed to 1234 +2024-05-23:08:00:25,445 INFO [evaluator.py:177] Initializing hf model, with arguments: {'pretrained': '/mnt/weka/peacock/experiments/llama/checkpoint/llamav2-3b//hf_ckpt//global_step20000'} +2024-05-23:08:00:27,741 INFO [huggingface.py:164] Using device 'cuda' +Traceback (most recent call last): + File "/usr/lib/python3.10/runpy.py", line 196, in _run_module_as_main + return _run_code(code, main_globals, None, + File "/usr/lib/python3.10/runpy.py", line 86, in _run_code + exec(code, run_globals) + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/__main__.py", line 417, in + cli_evaluate() + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/__main__.py", line 341, in cli_evaluate + results = evaluator.simple_evaluate( + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/utils.py", line 288, in _wrapper + return fn(*args, **kwargs) + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/evaluator.py", line 180, in simple_evaluate + lm = lm_eval.api.registry.get_model(model).create_from_arg_string( + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/api/model.py", line 134, in create_from_arg_string + return cls(**args, **args2) + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/models/huggingface.py", line 190, in __init__ + self._get_config( + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/models/huggingface.py", line 471, in _get_config + self._config = transformers.AutoConfig.from_pretrained( + File "/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py", line 934, in from_pretrained + config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) + File "/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py", line 632, in get_config_dict + config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) + File "/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py", line 689, in _get_config_dict + resolved_config_file = cached_file( + File "/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py", line 370, in cached_file + raise EnvironmentError( +OSError: /mnt/weka/peacock/experiments/llama/checkpoint/llamav2-3b//hf_ckpt//global_step20000 does not appear to have a file named config.json. Checkout 'https://huggingface.co//mnt/weka/peacock/experiments/llama/checkpoint/llamav2-3b//hf_ckpt//global_step20000/tree/main' for available files. \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/requirements.txt b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..8150356038c46ec25f623f6e945d6dcb66a2e717 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/requirements.txt @@ -0,0 +1,155 @@ +DataProperty==1.0.1 +GitPython==3.1.43 +Jinja2==3.1.4 +Markdown==3.6 +MarkupSafe==2.1.5 +Pillow-SIMD==7.0.0.post3 +PyYAML==6.0 +Werkzeug==3.0.3 +absl-py==2.1.0 +accelerate==0.30.1 +aiohttp==3.9.5 +aiosignal==1.3.1 +async-timeout==4.0.3 +attrs==23.2.0 +av==9.2.0 +cachetools==5.3.3 +certifi==2024.2.2 +cffi==1.15.1 +cfgv==3.4.0 +chardet==5.2.0 +charset-normalizer==3.3.2 +click==8.1.7 +cmake==3.29.2 +colorama==0.4.6 +datasets==2.19.1 +deepspeed==0.12.4+hpu.synapse.v1.15.1 +dill==0.3.8 +distlib==0.3.8 +docker-pycreds==0.4.0 +einops==0.8.0 +evaluate==0.4.2 +exceptiongroup==1.2.1 +expecttest==0.2.1 +filelock==3.14.0 +frozenlist==1.4.1 +fsspec==2024.3.1 +gitdb==4.0.11 +google-auth-oauthlib==0.4.6 +google-auth==2.29.0 +grpcio==1.63.0 +habana-media-loader==1.15.1.15 +habana-pyhlml==1.15.1.15 +habana-torch-dataloader==1.15.1.15 +habana-torch-plugin==1.15.1.15 +habana_gpu_migration==1.15.1.15 +habana_quantization_toolkit==1.15.1.15 +hjson==3.1.0 +huggingface-hub==0.23.1 +identify==2.5.36 +idna==3.7 +iniconfig==2.0.0 +joblib==1.4.2 +jsonlines==4.0.0 +lightning-habana==1.4.0 +lightning-utilities==0.11.2 +lightning==2.2.0.post0 +lm_eval==0.4.2 +lm_eval==0.4.2 +lm_eval==0.4.2 +lxml==5.2.2 +mbstrdecoder==1.1.3 +more-itertools==10.2.0 +mpi4py==3.1.4 +mpmath==1.3.0 +multidict==6.0.5 +multiprocess==0.70.16 +networkx==3.3 +ninja==1.11.1.1 +nltk==3.8.1 +nodeenv==1.8.0 +numexpr==2.10.0 +numpy==1.23.5 +oauthlib==3.2.2 +packaging==24.0 +pandas==2.0.1 +pathspec==0.12.1 +pathvalidate==3.2.0 +peft==0.11.1 +perfetto==0.7.0 +pillow==10.3.0 +pip==22.0.2 +pip==23.3.1 +platformdirs==4.2.1 +pluggy==1.5.0 +portalocker==2.8.2 +pre-commit==3.3.3 +pretty-errors==1.2.25 +protobuf==3.20.3 +psutil==5.9.8 +py-cpuinfo==9.0.0 +pyarrow-hotfix==0.6 +pyarrow==16.1.0 +pyasn1==0.6.0 +pyasn1_modules==0.4.0 +pybind11==2.10.4 +pycparser==2.22 +pydantic==1.10.13 +pynvml==8.0.4 +pytablewriter==1.2.0 +pytest==8.2.0 +python-dateutil==2.9.0.post0 +pytorch-lightning==2.2.4 +pytz==2024.1 +regex==2023.5.5 +requests-oauthlib==2.0.0 +requests==2.31.0 +rouge_score==0.1.2 +rsa==4.9 +sacrebleu==2.4.2 +safetensors==0.4.3 +scikit-learn==1.5.0 +scipy==1.13.1 +sentencepiece==0.2.0 +sentry-sdk==2.2.1 +setproctitle==1.3.3 +setuptools==59.6.0 +setuptools==69.5.1 +six==1.16.0 +smmap==5.0.1 +sqlitedict==2.1.0 +symengine==0.11.0 +sympy==1.12 +tabledata==1.3.3 +tabulate==0.9.0 +tcolorpy==0.1.6 +tdqm==0.0.1 +tensorboard-data-server==0.6.1 +tensorboard-plugin-wit==1.8.1 +tensorboard==2.11.2 +threadpoolctl==3.5.0 +tokenizers==0.19.1 +tomli==2.0.1 +torch==2.2.0a0+git8964477 +torch_tb_profiler==0.4.0 +torchaudio==2.2.0+08901ad +torchdata==0.7.1+5e6f7b7 +torchmetrics==1.4.0 +torchtext==0.17.0+400da5c +torchvision==0.17.0+b2383d4 +tqdm-multiprocess==0.0.11 +tqdm==4.66.4 +transformers==4.41.1 +typepy==1.3.2 +typing_extensions==4.11.0 +tzdata==2024.1 +urllib3==1.26.18 +virtualenv==20.26.1 +wandb==0.17.0 +wheel==0.37.1 +wheel==0.43.0 +word2number==1.1 +xxhash==3.4.1 +yamllint==1.35.1 +yarl==1.9.4 +zstandard==0.22.0 \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-metadata.json b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..f9237f925de52768cb10979b5036af9547d07e54 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-metadata.json @@ -0,0 +1,850 @@ +{ + "os": "Linux-5.15.0-92-generic-x86_64-with-glibc2.35", + "python": "3.10.12", + "heartbeatAt": "2024-05-23T08:00:16.800877", + "startedAt": "2024-05-23T08:00:16.091228", + "docker": null, + "cuda": null, + "args": [ + "--model", + "hf", + "--model_args", + "pretrained=/mnt/weka/peacock/experiments/llama/checkpoint/llamav2-3b//hf_ckpt//global_step20000", + "--tasks", + "hellaswag,arc_easy,openbookqa,winogrande,sst2,mrpc", + "--batch_size", + "auto", + "--wandb_args", + "project=bharatgpt,group=trial_expt_2" + ], + "state": "running", + "program": "-m lm_eval.__main__", + "codePathLocal": null, + "git": { + "remote": "https://github.com/EleutherAI/lm-evaluation-harness", + "commit": null + }, + "email": null, + "root": "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness", + "host": "peacock-evaluation-worker-0", + "username": "root", + "executable": "/usr/bin/python3", + "cpu_count": 80, + "cpu_count_logical": 160, + "cpu_freq": { + "current": 2327.4999875000003, + "min": 800.0, + "max": 3400.0 + }, + "cpu_freq_per_core": [ + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3391.994, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + } + ], + "disk": { + "/": { + "total": 877.6341285705566, + "used": 211.6374168395996 + } + }, + "memory": { + "total": 1007.4379806518555 + } +} diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-summary.json b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-summary.json new file mode 100644 index 0000000000000000000000000000000000000000..8bf99d152ad35c3699ec8600ecb8b169d4e35875 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-summary.json @@ -0,0 +1 @@ +{"_wandb": {"runtime": 11}} \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug-internal.log b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug-internal.log new file mode 100644 index 0000000000000000000000000000000000000000..f87b3f2f83201ecb7c0025f0746cc13184207748 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug-internal.log @@ -0,0 +1,182 @@ +2024-05-23 08:00:16,111 INFO StreamThr :2566 [internal.py:wandb_internal():85] W&B internal server running at pid: 2566, started at: 2024-05-23 08:00:16.109850 +2024-05-23 08:00:16,116 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: status +2024-05-23 08:00:16,116 INFO WriterThread:2566 [datastore.py:open_for_write():87] open: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/run-i5wqccww.wandb +2024-05-23 08:00:16,119 DEBUG SenderThread:2566 [sender.py:send():378] send: header +2024-05-23 08:00:16,122 DEBUG SenderThread:2566 [sender.py:send():378] send: run +2024-05-23 08:00:16,603 INFO SenderThread:2566 [dir_watcher.py:__init__():211] watching files in: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files +2024-05-23 08:00:16,603 INFO SenderThread:2566 [sender.py:_start_run_threads():1123] run started: i5wqccww with start time 1716451216.109939 +2024-05-23 08:00:16,607 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: check_version +2024-05-23 08:00:16,607 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: check_version +2024-05-23 08:00:16,725 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: run_start +2024-05-23 08:00:16,727 DEBUG HandlerThread:2566 [system_info.py:__init__():26] System info init +2024-05-23 08:00:16,727 DEBUG HandlerThread:2566 [system_info.py:__init__():41] System info init done +2024-05-23 08:00:16,727 INFO HandlerThread:2566 [system_monitor.py:start():194] Starting system monitor +2024-05-23 08:00:16,727 INFO SystemMonitor:2566 [system_monitor.py:_start():158] Starting system asset monitoring threads +2024-05-23 08:00:16,728 INFO HandlerThread:2566 [system_monitor.py:probe():214] Collecting system info +2024-05-23 08:00:16,734 INFO SystemMonitor:2566 [interfaces.py:start():188] Started cpu monitoring +2024-05-23 08:00:16,735 INFO SystemMonitor:2566 [interfaces.py:start():188] Started disk monitoring +2024-05-23 08:00:16,735 INFO SystemMonitor:2566 [interfaces.py:start():188] Started memory monitoring +2024-05-23 08:00:16,735 INFO SystemMonitor:2566 [interfaces.py:start():188] Started network monitoring +2024-05-23 08:00:16,800 DEBUG HandlerThread:2566 [system_info.py:probe():150] Probing system +2024-05-23 08:00:16,804 DEBUG HandlerThread:2566 [system_info.py:_probe_git():135] Probing git +2024-05-23 08:00:16,813 ERROR HandlerThread:2566 [gitlib.py:root():92] git root error: Cmd('git') failed due to: exit code(128) + cmdline: git rev-parse --show-toplevel + stderr: 'fatal: detected dubious ownership in repository at '/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness' +To add an exception for this directory, call: + + git config --global --add safe.directory /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness' +2024-05-23 08:00:16,813 DEBUG HandlerThread:2566 [system_info.py:_probe_git():143] Probing git done +2024-05-23 08:00:16,813 DEBUG HandlerThread:2566 [system_info.py:probe():198] Probing system done +2024-05-23 08:00:16,813 DEBUG HandlerThread:2566 [system_monitor.py:probe():223] {'os': 'Linux-5.15.0-92-generic-x86_64-with-glibc2.35', 'python': '3.10.12', 'heartbeatAt': '2024-05-23T08:00:16.800877', 'startedAt': '2024-05-23T08:00:16.091228', 'docker': None, 'cuda': None, 'args': ('--model', 'hf', '--model_args', 'pretrained=/mnt/weka/peacock/experiments/llama/checkpoint/llamav2-3b//hf_ckpt//global_step20000', '--tasks', 'hellaswag,arc_easy,openbookqa,winogrande,sst2,mrpc', '--batch_size', 'auto', '--wandb_args', 'project=bharatgpt,group=trial_expt_2'), 'state': 'running', 'program': '-m lm_eval.__main__', 'codePathLocal': None, 'git': {'remote': 'https://github.com/EleutherAI/lm-evaluation-harness', 'commit': None}, 'email': None, 'root': '/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness', 'host': 'peacock-evaluation-worker-0', 'username': 'root', 'executable': '/usr/bin/python3', 'cpu_count': 80, 'cpu_count_logical': 160, 'cpu_freq': {'current': 2327.4999875000003, 'min': 800.0, 'max': 3400.0}, 'cpu_freq_per_core': [{'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 3391.994, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}], 'disk': {'/': {'total': 877.6341285705566, 'used': 211.6374168395996}}, 'memory': {'total': 1007.4379806518555}} +2024-05-23 08:00:16,813 INFO HandlerThread:2566 [system_monitor.py:probe():224] Finished collecting system info +2024-05-23 08:00:16,813 INFO HandlerThread:2566 [system_monitor.py:probe():227] Publishing system info +2024-05-23 08:00:16,816 INFO HandlerThread:2566 [system_monitor.py:probe():229] Finished publishing system info +2024-05-23 08:00:16,821 DEBUG SenderThread:2566 [sender.py:send():378] send: files +2024-05-23 08:00:16,821 INFO SenderThread:2566 [sender.py:_save_file():1389] saving file wandb-metadata.json with policy now +2024-05-23 08:00:16,993 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: python_packages +2024-05-23 08:00:16,993 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: python_packages +2024-05-23 08:00:16,994 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: stop_status +2024-05-23 08:00:16,995 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: stop_status +2024-05-23 08:00:17,114 DEBUG SenderThread:2566 [sender.py:send():378] send: telemetry +2024-05-23 08:00:17,451 INFO wandb-upload_0:2566 [upload_job.py:push():130] Uploaded file /tmp/tmpod_tnz34wandb/66is5q3l-wandb-metadata.json +2024-05-23 08:00:17,606 INFO Thread-12 :2566 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/requirements.txt +2024-05-23 08:00:17,606 INFO Thread-12 :2566 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log +2024-05-23 08:00:17,606 INFO Thread-12 :2566 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-metadata.json +2024-05-23 08:00:19,606 INFO Thread-12 :2566 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log +2024-05-23 08:00:22,116 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: status_report +2024-05-23 08:00:27,445 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: status_report +2024-05-23 08:00:27,611 INFO Thread-12 :2566 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log +2024-05-23 08:00:27,749 DEBUG SenderThread:2566 [sender.py:send():378] send: exit +2024-05-23 08:00:27,749 INFO SenderThread:2566 [sender.py:send_exit():585] handling exit code: 1 +2024-05-23 08:00:27,749 INFO SenderThread:2566 [sender.py:send_exit():587] handling runtime: 11 +2024-05-23 08:00:27,751 INFO SenderThread:2566 [sender.py:_save_file():1389] saving file wandb-summary.json with policy end +2024-05-23 08:00:27,751 INFO SenderThread:2566 [sender.py:send_exit():593] send defer +2024-05-23 08:00:27,751 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,751 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 0 +2024-05-23 08:00:27,751 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,751 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 0 +2024-05-23 08:00:27,751 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 1 +2024-05-23 08:00:27,752 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,752 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 1 +2024-05-23 08:00:27,752 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,752 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 1 +2024-05-23 08:00:27,752 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 2 +2024-05-23 08:00:27,752 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,752 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 2 +2024-05-23 08:00:27,752 INFO HandlerThread:2566 [system_monitor.py:finish():203] Stopping system monitor +2024-05-23 08:00:27,752 DEBUG SystemMonitor:2566 [system_monitor.py:_start():172] Starting system metrics aggregation loop +2024-05-23 08:00:27,752 DEBUG SystemMonitor:2566 [system_monitor.py:_start():179] Finished system metrics aggregation loop +2024-05-23 08:00:27,752 DEBUG SystemMonitor:2566 [system_monitor.py:_start():183] Publishing last batch of metrics +2024-05-23 08:00:27,753 INFO HandlerThread:2566 [interfaces.py:finish():200] Joined cpu monitor +2024-05-23 08:00:27,753 INFO HandlerThread:2566 [interfaces.py:finish():200] Joined disk monitor +2024-05-23 08:00:27,753 INFO HandlerThread:2566 [interfaces.py:finish():200] Joined memory monitor +2024-05-23 08:00:27,753 INFO HandlerThread:2566 [interfaces.py:finish():200] Joined network monitor +2024-05-23 08:00:27,753 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,753 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 2 +2024-05-23 08:00:27,753 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 3 +2024-05-23 08:00:27,753 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,754 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 3 +2024-05-23 08:00:27,754 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,754 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 3 +2024-05-23 08:00:27,754 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 4 +2024-05-23 08:00:27,754 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,754 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 4 +2024-05-23 08:00:27,754 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,754 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 4 +2024-05-23 08:00:27,754 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 5 +2024-05-23 08:00:27,754 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,754 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 5 +2024-05-23 08:00:27,754 DEBUG SenderThread:2566 [sender.py:send():378] send: summary +2024-05-23 08:00:27,755 INFO SenderThread:2566 [sender.py:_save_file():1389] saving file wandb-summary.json with policy end +2024-05-23 08:00:27,755 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,755 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 5 +2024-05-23 08:00:27,755 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 6 +2024-05-23 08:00:27,755 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,756 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 6 +2024-05-23 08:00:27,756 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,756 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 6 +2024-05-23 08:00:27,760 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: status_report +2024-05-23 08:00:27,834 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 7 +2024-05-23 08:00:27,834 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:27,834 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 7 +2024-05-23 08:00:27,835 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:27,835 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 7 +2024-05-23 08:00:28,613 INFO Thread-12 :2566 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/config.yaml +2024-05-23 08:00:28,613 INFO Thread-12 :2566 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-summary.json +2024-05-23 08:00:28,749 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-23 08:00:29,137 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 8 +2024-05-23 08:00:29,137 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: poll_exit +2024-05-23 08:00:29,137 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:29,138 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 8 +2024-05-23 08:00:29,138 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:29,138 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 8 +2024-05-23 08:00:29,138 INFO SenderThread:2566 [job_builder.py:build():432] Attempting to build job artifact +2024-05-23 08:00:29,138 INFO SenderThread:2566 [job_builder.py:_get_source_type():576] no source found +2024-05-23 08:00:29,138 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 9 +2024-05-23 08:00:29,138 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:29,139 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 9 +2024-05-23 08:00:29,139 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:29,139 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 9 +2024-05-23 08:00:29,139 INFO SenderThread:2566 [dir_watcher.py:finish():358] shutting down directory watcher +2024-05-23 08:00:29,614 INFO SenderThread:2566 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log +2024-05-23 08:00:29,614 INFO SenderThread:2566 [dir_watcher.py:finish():388] scan: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files +2024-05-23 08:00:29,614 INFO SenderThread:2566 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/config.yaml config.yaml +2024-05-23 08:00:29,615 INFO SenderThread:2566 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log output.log +2024-05-23 08:00:29,617 INFO SenderThread:2566 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/requirements.txt requirements.txt +2024-05-23 08:00:29,619 INFO SenderThread:2566 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-summary.json wandb-summary.json +2024-05-23 08:00:29,619 INFO SenderThread:2566 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-metadata.json wandb-metadata.json +2024-05-23 08:00:29,619 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 10 +2024-05-23 08:00:29,619 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:29,619 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 10 +2024-05-23 08:00:29,620 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:29,620 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 10 +2024-05-23 08:00:29,620 INFO SenderThread:2566 [file_pusher.py:finish():169] shutting down file pusher +2024-05-23 08:00:29,749 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-23 08:00:29,750 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: poll_exit +2024-05-23 08:00:29,954 INFO wandb-upload_0:2566 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/config.yaml +2024-05-23 08:00:30,216 INFO wandb-upload_1:2566 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/output.log +2024-05-23 08:00:30,220 INFO wandb-upload_2:2566 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/requirements.txt +2024-05-23 08:00:30,231 INFO wandb-upload_3:2566 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/files/wandb-summary.json +2024-05-23 08:00:30,432 INFO Thread-11 (_thread_body):2566 [sender.py:transition_state():613] send defer: 11 +2024-05-23 08:00:30,432 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:30,432 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 11 +2024-05-23 08:00:30,432 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:30,432 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 11 +2024-05-23 08:00:30,432 INFO SenderThread:2566 [file_pusher.py:join():175] waiting for file pusher +2024-05-23 08:00:30,432 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 12 +2024-05-23 08:00:30,432 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:30,432 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 12 +2024-05-23 08:00:30,433 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:30,433 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 12 +2024-05-23 08:00:30,433 INFO SenderThread:2566 [file_stream.py:finish():601] file stream finish called +2024-05-23 08:00:30,506 INFO SenderThread:2566 [file_stream.py:finish():605] file stream finish is done +2024-05-23 08:00:30,506 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 13 +2024-05-23 08:00:30,507 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:30,507 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 13 +2024-05-23 08:00:30,507 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:30,507 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 13 +2024-05-23 08:00:30,507 INFO SenderThread:2566 [sender.py:transition_state():613] send defer: 14 +2024-05-23 08:00:30,507 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: defer +2024-05-23 08:00:30,507 INFO HandlerThread:2566 [handler.py:handle_request_defer():184] handle defer: 14 +2024-05-23 08:00:30,507 DEBUG SenderThread:2566 [sender.py:send():378] send: final +2024-05-23 08:00:30,507 DEBUG SenderThread:2566 [sender.py:send():378] send: footer +2024-05-23 08:00:30,507 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: defer +2024-05-23 08:00:30,507 INFO SenderThread:2566 [sender.py:send_request_defer():609] handle sender defer: 14 +2024-05-23 08:00:30,508 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-23 08:00:30,508 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-23 08:00:30,508 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: server_info +2024-05-23 08:00:30,508 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: get_summary +2024-05-23 08:00:30,508 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: sampled_history +2024-05-23 08:00:30,508 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: internal_messages +2024-05-23 08:00:30,509 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: poll_exit +2024-05-23 08:00:30,509 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: poll_exit +2024-05-23 08:00:30,509 DEBUG SenderThread:2566 [sender.py:send_request():405] send_request: server_info +2024-05-23 08:00:30,563 INFO MainThread:2566 [wandb_run.py:_footer_history_summary_info():3994] rendering history +2024-05-23 08:00:30,563 INFO MainThread:2566 [wandb_run.py:_footer_history_summary_info():4026] rendering summary +2024-05-23 08:00:30,563 INFO MainThread:2566 [wandb_run.py:_footer_sync_info():3953] logging synced files +2024-05-23 08:00:30,563 DEBUG HandlerThread:2566 [handler.py:handle_request():158] handle_request: shutdown +2024-05-23 08:00:30,563 INFO HandlerThread:2566 [handler.py:finish():882] shutting down handler +2024-05-23 08:00:31,509 INFO WriterThread:2566 [datastore.py:close():296] close: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/run-i5wqccww.wandb +2024-05-23 08:00:31,563 INFO SenderThread:2566 [sender.py:finish():1545] shutting down sender +2024-05-23 08:00:31,563 INFO SenderThread:2566 [file_pusher.py:finish():169] shutting down file pusher +2024-05-23 08:00:31,563 INFO SenderThread:2566 [file_pusher.py:join():175] waiting for file pusher diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug.log b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug.log new file mode 100644 index 0000000000000000000000000000000000000000..d0e65271bb578f7b359578af2056853eb4f02669 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug.log @@ -0,0 +1,29 @@ +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Current SDK version is 0.17.0 +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Configure stats pid to 2411 +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Loading settings from /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/settings +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Loading settings from environment variables: {} +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False} +2024-05-23 08:00:16,104 WARNING MainThread:2411 [wandb_setup.py:_flush():76] Could not find program at -m lm_eval.__main__ +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': None, 'program': '-m lm_eval.__main__'} +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_setup.py:_flush():76] Applying login settings: {} +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_init.py:_log_setup():520] Logging user logs to /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug.log +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_init.py:_log_setup():521] Logging internal logs to /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/logs/debug-internal.log +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_init.py:init():560] calling init triggers +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_init.py:init():567] wandb.init called with sweep_config: {} +config: {} +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_init.py:init():610] starting backend +2024-05-23 08:00:16,104 INFO MainThread:2411 [wandb_init.py:init():614] setting up manager +2024-05-23 08:00:16,108 INFO MainThread:2411 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn +2024-05-23 08:00:16,109 INFO MainThread:2411 [wandb_init.py:init():622] backend started and connected +2024-05-23 08:00:16,113 INFO MainThread:2411 [wandb_init.py:init():711] updated telemetry +2024-05-23 08:00:16,122 INFO MainThread:2411 [wandb_init.py:init():744] communicating run to backend with 90.0 second timeout +2024-05-23 08:00:16,607 INFO MainThread:2411 [wandb_run.py:_on_init():2396] communicating current version +2024-05-23 08:00:16,719 INFO MainThread:2411 [wandb_run.py:_on_init():2405] got version response +2024-05-23 08:00:16,719 INFO MainThread:2411 [wandb_init.py:init():795] starting run threads in backend +2024-05-23 08:00:16,994 INFO MainThread:2411 [wandb_run.py:_console_start():2374] atexit reg +2024-05-23 08:00:16,994 INFO MainThread:2411 [wandb_run.py:_redirect():2229] redirect: wrap_raw +2024-05-23 08:00:16,994 INFO MainThread:2411 [wandb_run.py:_redirect():2294] Wrapping output streams. +2024-05-23 08:00:16,994 INFO MainThread:2411 [wandb_run.py:_redirect():2319] Redirects installed. +2024-05-23 08:00:16,997 INFO MainThread:2411 [wandb_init.py:init():838] run started, returning control to user process +2024-05-23 08:00:31,564 WARNING MsgRouterThr:2411 [router.py:message_loop():77] message_loop has been closed diff --git a/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/run-i5wqccww.wandb b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/run-i5wqccww.wandb new file mode 100644 index 0000000000000000000000000000000000000000..7af3b5a437da86e37bab6065ca5f2a886f8073fe Binary files /dev/null and b/lm-evaluation-harness/wandb/run-20240523_080016-i5wqccww/run-i5wqccww.wandb differ diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/config.yaml b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..297fed682df39824390772ba7e6b31bf64a8d76a --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/config.yaml @@ -0,0 +1,44 @@ +wandb_version: 1 + +_wandb: + desc: null + value: + python_version: 3.10.12 + cli_version: 0.17.0 + framework: huggingface + huggingface_version: 4.36.2 + is_jupyter_run: false + is_kaggle_kernel: false + start_time: 1717052684 + t: + 1: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 2: + - 1 + - 5 + - 11 + - 49 + - 51 + - 53 + - 55 + - 71 + - 98 + - 100 + 3: + - 13 + - 23 + 4: 3.10.12 + 5: 0.17.0 + 6: 4.36.2 + 8: + - 5 + 13: linux-x86_64 diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log new file mode 100644 index 0000000000000000000000000000000000000000..11f7a7a27ef74f1ee7815fcc881e5188a1f5e930 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log @@ -0,0 +1,38 @@ + +2024-05-30:07:04:45,449 INFO [__main__.py:251] Verbosity set to INFO +2024-05-30:07:04:54,634 INFO [__main__.py:335] Selected Tasks: ['arc_easy', 'boolq', 'copa', 'mrpc', 'piqa', 'sst2', 'winogrande'] +2024-05-30:07:04:54,635 INFO [evaluator.py:131] Setting random seed to 0 | Setting numpy seed to 1234 | Setting torch manual seed to 1234 +2024-05-30:07:04:54,636 INFO [evaluator.py:177] Initializing hf model, with arguments: {'pretrained': '/mnt/weka/peacock/experiments/llama/eval/checkpoint-english/llamav2-3b/hf/global_step10000', 'tokenizer': '/mnt/weka/peacock/tokenization/trained-tokenizer/enhiben_50k/'} +2024-05-30:07:04:56,924 INFO [huggingface.py:164] Using device 'cuda' +/usr/local/lib/python3.10/dist-packages/habana_frameworks/torch/gpu_migration/torch/cuda/memory.py:36: UserWarning: No need to call empty_cache on HPU. It manages the memory internally in an effcient way. + warnings.warn( +Traceback (most recent call last): + File "/usr/lib/python3.10/runpy.py", line 196, in _run_module_as_main + return _run_code(code, main_globals, None, + File "/usr/lib/python3.10/runpy.py", line 86, in _run_code + exec(code, run_globals) + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/__main__.py", line 417, in + cli_evaluate() + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/__main__.py", line 341, in cli_evaluate + results = evaluator.simple_evaluate( + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/utils.py", line 288, in _wrapper + return fn(*args, **kwargs) + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/evaluator.py", line 180, in simple_evaluate + lm = lm_eval.api.registry.get_model(model).create_from_arg_string( + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/api/model.py", line 134, in create_from_arg_string + return cls(**args, **args2) + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/models/huggingface.py", line 237, in __init__ + self._create_tokenizer( + File "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/lm_eval/models/huggingface.py", line 630, in _create_tokenizer + self.tokenizer = transformers.AutoTokenizer.from_pretrained( + File "/usr/local/lib/python3.10/dist-packages/transformers/models/auto/tokenization_auto.py", line 752, in from_pretrained + config = AutoConfig.from_pretrained( + File "/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py", line 1082, in from_pretrained + config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) + File "/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py", line 644, in get_config_dict + config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) + File "/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py", line 699, in _get_config_dict + resolved_config_file = cached_file( + File "/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py", line 360, in cached_file + raise EnvironmentError( +OSError: /mnt/weka/peacock/tokenization/trained-tokenizer/enhiben_50k/ does not appear to have a file named config.json. Checkout 'https://huggingface.co//mnt/weka/peacock/tokenization/trained-tokenizer/enhiben_50k//main' for available files. \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/requirements.txt b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..7391d0fb5302364497bd6017486c36f0caae0613 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/requirements.txt @@ -0,0 +1,154 @@ +DataProperty==1.0.1 +GitPython==3.1.43 +Jinja2==3.1.4 +Markdown==3.6 +MarkupSafe==2.1.5 +Pillow-SIMD==7.0.0.post3 +PyYAML==6.0 +Werkzeug==3.0.3 +absl-py==2.1.0 +accelerate==0.30.1 +aiohttp==3.9.5 +aiosignal==1.3.1 +async-timeout==4.0.3 +attrs==23.2.0 +av==9.2.0 +cachetools==5.3.3 +certifi==2024.2.2 +cffi==1.15.1 +cfgv==3.4.0 +chardet==5.2.0 +charset-normalizer==3.3.2 +click==8.1.7 +cmake==3.29.2 +colorama==0.4.6 +datasets==2.19.1 +deepspeed==0.12.4+hpu.synapse.v1.15.1 +dill==0.3.8 +distlib==0.3.8 +docker-pycreds==0.4.0 +einops==0.8.0 +evaluate==0.4.2 +exceptiongroup==1.2.1 +expecttest==0.2.1 +filelock==3.14.0 +frozenlist==1.4.1 +fsspec==2024.3.1 +gitdb==4.0.11 +google-auth-oauthlib==0.4.6 +google-auth==2.29.0 +grpcio==1.63.0 +habana-media-loader==1.15.1.15 +habana-pyhlml==1.15.1.15 +habana-torch-dataloader==1.15.1.15 +habana-torch-plugin==1.15.1.15 +habana_gpu_migration==1.15.1.15 +habana_quantization_toolkit==1.15.1.15 +hjson==3.1.0 +huggingface-hub==0.23.2 +identify==2.5.36 +idna==3.7 +iniconfig==2.0.0 +joblib==1.4.2 +jsonlines==4.0.0 +lightning-habana==1.4.0 +lightning-utilities==0.11.2 +lightning==2.2.0.post0 +lm_eval==0.4.2 +lm_eval==0.4.2 +lm_eval==0.4.2 +lxml==5.2.2 +mbstrdecoder==1.1.3 +more-itertools==10.2.0 +mpi4py==3.1.4 +mpmath==1.3.0 +multidict==6.0.5 +multiprocess==0.70.16 +networkx==3.3 +ninja==1.11.1.1 +nltk==3.8.1 +nodeenv==1.8.0 +numexpr==2.10.0 +numpy==1.23.5 +oauthlib==3.2.2 +packaging==24.0 +pandas==2.0.1 +pathspec==0.12.1 +pathvalidate==3.2.0 +peft==0.11.1 +perfetto==0.7.0 +pip==22.0.2 +pip==23.3.1 +platformdirs==4.2.1 +pluggy==1.5.0 +portalocker==2.8.2 +pre-commit==3.3.3 +pretty-errors==1.2.25 +protobuf==3.20.3 +psutil==5.9.8 +py-cpuinfo==9.0.0 +pyarrow-hotfix==0.6 +pyarrow==16.1.0 +pyasn1==0.6.0 +pyasn1_modules==0.4.0 +pybind11==2.10.4 +pycparser==2.22 +pydantic==1.10.13 +pynvml==8.0.4 +pytablewriter==1.2.0 +pytest==8.2.0 +python-dateutil==2.9.0.post0 +pytorch-lightning==2.2.4 +pytz==2024.1 +regex==2023.5.5 +requests-oauthlib==2.0.0 +requests==2.31.0 +rouge_score==0.1.2 +rsa==4.9 +sacrebleu==2.4.2 +safetensors==0.4.3 +scikit-learn==1.5.0 +scipy==1.13.1 +sentencepiece==0.2.0 +sentry-sdk==2.3.1 +setproctitle==1.3.3 +setuptools==59.6.0 +setuptools==69.5.1 +six==1.16.0 +smmap==5.0.1 +sqlitedict==2.1.0 +symengine==0.11.0 +sympy==1.12 +tabledata==1.3.3 +tabulate==0.9.0 +tcolorpy==0.1.6 +tdqm==0.0.1 +tensorboard-data-server==0.6.1 +tensorboard-plugin-wit==1.8.1 +tensorboard==2.11.2 +threadpoolctl==3.5.0 +tokenizers==0.15.2 +tomli==2.0.1 +torch==2.2.0a0+git8964477 +torch_tb_profiler==0.4.0 +torchaudio==2.2.0+08901ad +torchdata==0.7.1+5e6f7b7 +torchmetrics==1.4.0 +torchtext==0.17.0+400da5c +torchvision==0.17.0+b2383d4 +tqdm-multiprocess==0.0.11 +tqdm==4.66.4 +transformers==4.36.2 +typepy==1.3.2 +typing_extensions==4.11.0 +tzdata==2024.1 +urllib3==1.26.18 +virtualenv==20.26.1 +wandb==0.17.0 +wheel==0.37.1 +wheel==0.43.0 +word2number==1.1 +xxhash==3.4.1 +yamllint==1.35.1 +yarl==1.9.4 +zstandard==0.22.0 \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-metadata.json b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-metadata.json new file mode 100644 index 0000000000000000000000000000000000000000..21be30045fa6aca09414739b6ece42e3777554d1 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-metadata.json @@ -0,0 +1,850 @@ +{ + "os": "Linux-5.15.0-92-generic-x86_64-with-glibc2.35", + "python": "3.10.12", + "heartbeatAt": "2024-05-30T07:04:45.242301", + "startedAt": "2024-05-30T07:04:44.753569", + "docker": null, + "cuda": null, + "args": [ + "--model", + "hf", + "--model_args", + "pretrained=/mnt/weka/peacock/experiments/llama/eval/checkpoint-english/llamav2-3b/hf/global_step10000,tokenizer=/mnt/weka/peacock/tokenization/trained-tokenizer/enhiben_50k/", + "--tasks", + "winogrande,sst2,mrpc,arc_easy,copa,piqa,boolq", + "--batch_size", + "auto", + "--wandb_args", + "project=english-eval,group=exp2,name=global_step10000" + ], + "state": "running", + "program": "-m lm_eval.__main__", + "codePathLocal": null, + "git": { + "remote": "https://github.com/EleutherAI/lm-evaluation-harness", + "commit": null + }, + "email": null, + "root": "/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness", + "host": "peacock-evaluation-debug-5-worker-0", + "username": "root", + "executable": "/usr/bin/python3", + "cpu_count": 80, + "cpu_count_logical": 160, + "cpu_freq": { + "current": 2334.3043875, + "min": 800.0, + "max": 3400.0 + }, + "cpu_freq_per_core": [ + { + "current": 3399.997, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 3400.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + }, + { + "current": 2300.0, + "min": 800.0, + "max": 3400.0 + } + ], + "disk": { + "/": { + "total": 877.6341285705566, + "used": 212.06907272338867 + } + }, + "memory": { + "total": 1007.4379348754883 + } +} diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-summary.json b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-summary.json new file mode 100644 index 0000000000000000000000000000000000000000..0396467f7569a8166ce6a4890676d52689b450a7 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-summary.json @@ -0,0 +1 @@ +{"_wandb": {"runtime": 38}} \ No newline at end of file diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug-internal.log b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug-internal.log new file mode 100644 index 0000000000000000000000000000000000000000..a13965a720e4d55ce0fe5ed1cc41f8aa5a8b3037 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug-internal.log @@ -0,0 +1,194 @@ +2024-05-30 07:04:44,779 INFO StreamThr :900 [internal.py:wandb_internal():85] W&B internal server running at pid: 900, started at: 2024-05-30 07:04:44.774031 +2024-05-30 07:04:44,780 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status +2024-05-30 07:04:44,781 INFO WriterThread:900 [datastore.py:open_for_write():87] open: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/run-z0193g8n.wandb +2024-05-30 07:04:44,783 DEBUG SenderThread:900 [sender.py:send():378] send: header +2024-05-30 07:04:44,787 DEBUG SenderThread:900 [sender.py:send():378] send: run +2024-05-30 07:04:45,050 INFO SenderThread:900 [dir_watcher.py:__init__():211] watching files in: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files +2024-05-30 07:04:45,050 INFO SenderThread:900 [sender.py:_start_run_threads():1123] run started: z0193g8n with start time 1717052684.7741 +2024-05-30 07:04:45,050 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: check_version +2024-05-30 07:04:45,051 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: check_version +2024-05-30 07:04:45,166 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: run_start +2024-05-30 07:04:45,169 DEBUG HandlerThread:900 [system_info.py:__init__():26] System info init +2024-05-30 07:04:45,169 DEBUG HandlerThread:900 [system_info.py:__init__():41] System info init done +2024-05-30 07:04:45,169 INFO HandlerThread:900 [system_monitor.py:start():194] Starting system monitor +2024-05-30 07:04:45,169 INFO SystemMonitor:900 [system_monitor.py:_start():158] Starting system asset monitoring threads +2024-05-30 07:04:45,169 INFO HandlerThread:900 [system_monitor.py:probe():214] Collecting system info +2024-05-30 07:04:45,176 INFO SystemMonitor:900 [interfaces.py:start():188] Started cpu monitoring +2024-05-30 07:04:45,176 INFO SystemMonitor:900 [interfaces.py:start():188] Started disk monitoring +2024-05-30 07:04:45,177 INFO SystemMonitor:900 [interfaces.py:start():188] Started memory monitoring +2024-05-30 07:04:45,179 INFO SystemMonitor:900 [interfaces.py:start():188] Started network monitoring +2024-05-30 07:04:45,242 DEBUG HandlerThread:900 [system_info.py:probe():150] Probing system +2024-05-30 07:04:45,245 DEBUG HandlerThread:900 [system_info.py:_probe_git():135] Probing git +2024-05-30 07:04:45,256 ERROR HandlerThread:900 [gitlib.py:root():92] git root error: Cmd('git') failed due to: exit code(128) + cmdline: git rev-parse --show-toplevel + stderr: 'fatal: detected dubious ownership in repository at '/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness' +To add an exception for this directory, call: + + git config --global --add safe.directory /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness' +2024-05-30 07:04:45,256 DEBUG HandlerThread:900 [system_info.py:_probe_git():143] Probing git done +2024-05-30 07:04:45,256 DEBUG HandlerThread:900 [system_info.py:probe():198] Probing system done +2024-05-30 07:04:45,256 DEBUG HandlerThread:900 [system_monitor.py:probe():223] {'os': 'Linux-5.15.0-92-generic-x86_64-with-glibc2.35', 'python': '3.10.12', 'heartbeatAt': '2024-05-30T07:04:45.242301', 'startedAt': '2024-05-30T07:04:44.753569', 'docker': None, 'cuda': None, 'args': ('--model', 'hf', '--model_args', 'pretrained=/mnt/weka/peacock/experiments/llama/eval/checkpoint-english/llamav2-3b/hf/global_step10000,tokenizer=/mnt/weka/peacock/tokenization/trained-tokenizer/enhiben_50k/', '--tasks', 'winogrande,sst2,mrpc,arc_easy,copa,piqa,boolq', '--batch_size', 'auto', '--wandb_args', 'project=english-eval,group=exp2,name=global_step10000'), 'state': 'running', 'program': '-m lm_eval.__main__', 'codePathLocal': None, 'git': {'remote': 'https://github.com/EleutherAI/lm-evaluation-harness', 'commit': None}, 'email': None, 'root': '/mnt/weka/peacock/idc/cronscript/lm-evaluation-harness', 'host': 'peacock-evaluation-debug-5-worker-0', 'username': 'root', 'executable': '/usr/bin/python3', 'cpu_count': 80, 'cpu_count_logical': 160, 'cpu_freq': {'current': 2334.3043875, 'min': 800.0, 'max': 3400.0}, 'cpu_freq_per_core': [{'current': 3399.997, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 3400.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}, {'current': 2300.0, 'min': 800.0, 'max': 3400.0}], 'disk': {'/': {'total': 877.6341285705566, 'used': 212.06907272338867}}, 'memory': {'total': 1007.4379348754883}} +2024-05-30 07:04:45,256 INFO HandlerThread:900 [system_monitor.py:probe():224] Finished collecting system info +2024-05-30 07:04:45,256 INFO HandlerThread:900 [system_monitor.py:probe():227] Publishing system info +2024-05-30 07:04:45,260 INFO HandlerThread:900 [system_monitor.py:probe():229] Finished publishing system info +2024-05-30 07:04:45,267 DEBUG SenderThread:900 [sender.py:send():378] send: files +2024-05-30 07:04:45,267 INFO SenderThread:900 [sender.py:_save_file():1389] saving file wandb-metadata.json with policy now +2024-05-30 07:04:45,440 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: python_packages +2024-05-30 07:04:45,441 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: python_packages +2024-05-30 07:04:45,441 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: stop_status +2024-05-30 07:04:45,444 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: stop_status +2024-05-30 07:04:45,586 DEBUG SenderThread:900 [sender.py:send():378] send: telemetry +2024-05-30 07:04:45,875 INFO wandb-upload_0:900 [upload_job.py:push():130] Uploaded file /tmp/tmp82kuj3okwandb/jhd1abkj-wandb-metadata.json +2024-05-30 07:04:46,051 INFO Thread-12 :900 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:04:46,051 INFO Thread-12 :900 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-metadata.json +2024-05-30 07:04:46,051 INFO Thread-12 :900 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/requirements.txt +2024-05-30 07:04:48,050 INFO Thread-12 :900 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:04:50,590 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:04:55,636 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:04:56,056 INFO Thread-12 :900 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:04:58,061 INFO Thread-12 :900 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:05:00,442 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: stop_status +2024-05-30 07:05:00,442 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: stop_status +2024-05-30 07:05:01,600 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:05:06,601 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:05:11,601 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:05:15,442 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: stop_status +2024-05-30 07:05:15,443 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: stop_status +2024-05-30 07:05:17,606 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:05:18,128 INFO Thread-12 :900 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/config.yaml +2024-05-30 07:05:22,337 INFO Thread-12 :900 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:05:23,254 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:05:23,264 DEBUG SenderThread:900 [sender.py:send():378] send: exit +2024-05-30 07:05:23,264 INFO SenderThread:900 [sender.py:send_exit():585] handling exit code: 1 +2024-05-30 07:05:23,264 INFO SenderThread:900 [sender.py:send_exit():587] handling runtime: 38 +2024-05-30 07:05:23,266 INFO SenderThread:900 [sender.py:_save_file():1389] saving file wandb-summary.json with policy end +2024-05-30 07:05:23,266 INFO SenderThread:900 [sender.py:send_exit():593] send defer +2024-05-30 07:05:23,266 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,266 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 0 +2024-05-30 07:05:23,267 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,267 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 0 +2024-05-30 07:05:23,267 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 1 +2024-05-30 07:05:23,267 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,267 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 1 +2024-05-30 07:05:23,267 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,267 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 1 +2024-05-30 07:05:23,267 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 2 +2024-05-30 07:05:23,267 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,267 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 2 +2024-05-30 07:05:23,267 INFO HandlerThread:900 [system_monitor.py:finish():203] Stopping system monitor +2024-05-30 07:05:23,268 DEBUG SystemMonitor:900 [system_monitor.py:_start():172] Starting system metrics aggregation loop +2024-05-30 07:05:23,268 DEBUG SystemMonitor:900 [system_monitor.py:_start():179] Finished system metrics aggregation loop +2024-05-30 07:05:23,268 DEBUG SystemMonitor:900 [system_monitor.py:_start():183] Publishing last batch of metrics +2024-05-30 07:05:23,270 INFO HandlerThread:900 [interfaces.py:finish():200] Joined cpu monitor +2024-05-30 07:05:23,270 INFO HandlerThread:900 [interfaces.py:finish():200] Joined disk monitor +2024-05-30 07:05:23,270 INFO HandlerThread:900 [interfaces.py:finish():200] Joined memory monitor +2024-05-30 07:05:23,270 INFO HandlerThread:900 [interfaces.py:finish():200] Joined network monitor +2024-05-30 07:05:23,271 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,271 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 2 +2024-05-30 07:05:23,271 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 3 +2024-05-30 07:05:23,271 DEBUG SenderThread:900 [sender.py:send():378] send: stats +2024-05-30 07:05:23,272 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,272 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 3 +2024-05-30 07:05:23,272 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,272 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 3 +2024-05-30 07:05:23,272 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 4 +2024-05-30 07:05:23,272 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,272 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 4 +2024-05-30 07:05:23,272 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,273 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 4 +2024-05-30 07:05:23,273 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 5 +2024-05-30 07:05:23,273 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,273 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 5 +2024-05-30 07:05:23,273 DEBUG SenderThread:900 [sender.py:send():378] send: summary +2024-05-30 07:05:23,273 INFO SenderThread:900 [sender.py:_save_file():1389] saving file wandb-summary.json with policy end +2024-05-30 07:05:23,274 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,274 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 5 +2024-05-30 07:05:23,274 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 6 +2024-05-30 07:05:23,274 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,274 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 6 +2024-05-30 07:05:23,274 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,274 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 6 +2024-05-30 07:05:23,274 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 7 +2024-05-30 07:05:23,274 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: status_report +2024-05-30 07:05:23,274 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,274 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 7 +2024-05-30 07:05:23,274 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,274 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 7 +2024-05-30 07:05:23,338 INFO Thread-12 :900 [dir_watcher.py:_on_file_created():271] file/dir created: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-summary.json +2024-05-30 07:05:23,964 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 8 +2024-05-30 07:05:23,964 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,965 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 8 +2024-05-30 07:05:23,965 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,965 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 8 +2024-05-30 07:05:23,965 INFO SenderThread:900 [job_builder.py:build():432] Attempting to build job artifact +2024-05-30 07:05:23,966 INFO SenderThread:900 [job_builder.py:_get_source_type():576] no source found +2024-05-30 07:05:23,966 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 9 +2024-05-30 07:05:23,966 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:23,966 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 9 +2024-05-30 07:05:23,966 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:23,966 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 9 +2024-05-30 07:05:23,966 INFO SenderThread:900 [dir_watcher.py:finish():358] shutting down directory watcher +2024-05-30 07:05:24,264 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-30 07:05:24,339 INFO SenderThread:900 [dir_watcher.py:_on_file_modified():288] file/dir modified: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:05:24,340 INFO SenderThread:900 [dir_watcher.py:finish():388] scan: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files +2024-05-30 07:05:24,340 INFO SenderThread:900 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-metadata.json wandb-metadata.json +2024-05-30 07:05:24,340 INFO SenderThread:900 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/config.yaml config.yaml +2024-05-30 07:05:24,340 INFO SenderThread:900 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log output.log +2024-05-30 07:05:24,342 INFO SenderThread:900 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-summary.json wandb-summary.json +2024-05-30 07:05:24,343 INFO SenderThread:900 [dir_watcher.py:finish():402] scan save: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/requirements.txt requirements.txt +2024-05-30 07:05:24,343 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 10 +2024-05-30 07:05:24,343 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: poll_exit +2024-05-30 07:05:24,343 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:24,343 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 10 +2024-05-30 07:05:24,343 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:24,343 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 10 +2024-05-30 07:05:24,343 INFO SenderThread:900 [file_pusher.py:finish():169] shutting down file pusher +2024-05-30 07:05:24,601 INFO wandb-upload_0:900 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/config.yaml +2024-05-30 07:05:25,021 INFO wandb-upload_2:900 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/wandb-summary.json +2024-05-30 07:05:25,040 INFO wandb-upload_3:900 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/requirements.txt +2024-05-30 07:05:25,078 INFO wandb-upload_1:900 [upload_job.py:push():130] Uploaded file /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/files/output.log +2024-05-30 07:05:25,264 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-30 07:05:25,264 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: poll_exit +2024-05-30 07:05:25,278 INFO Thread-11 (_thread_body):900 [sender.py:transition_state():613] send defer: 11 +2024-05-30 07:05:25,278 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:25,278 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 11 +2024-05-30 07:05:25,279 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:25,279 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 11 +2024-05-30 07:05:25,279 INFO SenderThread:900 [file_pusher.py:join():175] waiting for file pusher +2024-05-30 07:05:25,279 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 12 +2024-05-30 07:05:25,279 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:25,279 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 12 +2024-05-30 07:05:25,279 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:25,279 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 12 +2024-05-30 07:05:25,279 INFO SenderThread:900 [file_stream.py:finish():601] file stream finish called +2024-05-30 07:05:25,353 INFO SenderThread:900 [file_stream.py:finish():605] file stream finish is done +2024-05-30 07:05:25,353 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 13 +2024-05-30 07:05:25,353 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:25,353 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 13 +2024-05-30 07:05:25,353 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:25,353 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 13 +2024-05-30 07:05:25,353 INFO SenderThread:900 [sender.py:transition_state():613] send defer: 14 +2024-05-30 07:05:25,353 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: defer +2024-05-30 07:05:25,353 INFO HandlerThread:900 [handler.py:handle_request_defer():184] handle defer: 14 +2024-05-30 07:05:25,354 DEBUG SenderThread:900 [sender.py:send():378] send: final +2024-05-30 07:05:25,354 DEBUG SenderThread:900 [sender.py:send():378] send: footer +2024-05-30 07:05:25,354 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: defer +2024-05-30 07:05:25,354 INFO SenderThread:900 [sender.py:send_request_defer():609] handle sender defer: 14 +2024-05-30 07:05:25,354 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-30 07:05:25,354 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: poll_exit +2024-05-30 07:05:25,355 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: poll_exit +2024-05-30 07:05:25,355 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: server_info +2024-05-30 07:05:25,355 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: get_summary +2024-05-30 07:05:25,355 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: sampled_history +2024-05-30 07:05:25,355 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: internal_messages +2024-05-30 07:05:25,355 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: poll_exit +2024-05-30 07:05:25,355 DEBUG SenderThread:900 [sender.py:send_request():405] send_request: server_info +2024-05-30 07:05:25,408 INFO MainThread:900 [wandb_run.py:_footer_history_summary_info():3994] rendering history +2024-05-30 07:05:25,408 INFO MainThread:900 [wandb_run.py:_footer_history_summary_info():4026] rendering summary +2024-05-30 07:05:25,408 INFO MainThread:900 [wandb_run.py:_footer_sync_info():3953] logging synced files +2024-05-30 07:05:25,409 DEBUG HandlerThread:900 [handler.py:handle_request():158] handle_request: shutdown +2024-05-30 07:05:25,409 INFO HandlerThread:900 [handler.py:finish():882] shutting down handler +2024-05-30 07:05:26,355 INFO WriterThread:900 [datastore.py:close():296] close: /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/run-z0193g8n.wandb +2024-05-30 07:05:26,408 INFO SenderThread:900 [sender.py:finish():1545] shutting down sender +2024-05-30 07:05:26,408 INFO SenderThread:900 [file_pusher.py:finish():169] shutting down file pusher +2024-05-30 07:05:26,408 INFO SenderThread:900 [file_pusher.py:join():175] waiting for file pusher diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug.log b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug.log new file mode 100644 index 0000000000000000000000000000000000000000..72dd1bf6d485e2d5e9e32df5883928ec3f4aeb17 --- /dev/null +++ b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug.log @@ -0,0 +1,29 @@ +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Current SDK version is 0.17.0 +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Configure stats pid to 744 +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Loading settings from /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/settings +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Loading settings from environment variables: {} +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False} +2024-05-30 07:04:44,768 WARNING MainThread:744 [wandb_setup.py:_flush():76] Could not find program at -m lm_eval.__main__ +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': None, 'program': '-m lm_eval.__main__'} +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_setup.py:_flush():76] Applying login settings: {} +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_init.py:_log_setup():520] Logging user logs to /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug.log +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_init.py:_log_setup():521] Logging internal logs to /mnt/weka/peacock/idc/cronscript/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/logs/debug-internal.log +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_init.py:init():560] calling init triggers +2024-05-30 07:04:44,768 INFO MainThread:744 [wandb_init.py:init():567] wandb.init called with sweep_config: {} +config: {} +2024-05-30 07:04:44,769 INFO MainThread:744 [wandb_init.py:init():610] starting backend +2024-05-30 07:04:44,769 INFO MainThread:744 [wandb_init.py:init():614] setting up manager +2024-05-30 07:04:44,773 INFO MainThread:744 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn +2024-05-30 07:04:44,773 INFO MainThread:744 [wandb_init.py:init():622] backend started and connected +2024-05-30 07:04:44,777 INFO MainThread:744 [wandb_init.py:init():711] updated telemetry +2024-05-30 07:04:44,786 INFO MainThread:744 [wandb_init.py:init():744] communicating run to backend with 90.0 second timeout +2024-05-30 07:04:45,050 INFO MainThread:744 [wandb_run.py:_on_init():2396] communicating current version +2024-05-30 07:04:45,160 INFO MainThread:744 [wandb_run.py:_on_init():2405] got version response +2024-05-30 07:04:45,161 INFO MainThread:744 [wandb_init.py:init():795] starting run threads in backend +2024-05-30 07:04:45,442 INFO MainThread:744 [wandb_run.py:_console_start():2374] atexit reg +2024-05-30 07:04:45,442 INFO MainThread:744 [wandb_run.py:_redirect():2229] redirect: wrap_raw +2024-05-30 07:04:45,442 INFO MainThread:744 [wandb_run.py:_redirect():2294] Wrapping output streams. +2024-05-30 07:04:45,442 INFO MainThread:744 [wandb_run.py:_redirect():2319] Redirects installed. +2024-05-30 07:04:45,446 INFO MainThread:744 [wandb_init.py:init():838] run started, returning control to user process +2024-05-30 07:05:26,410 WARNING MsgRouterThr:744 [router.py:message_loop():77] message_loop has been closed diff --git a/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/run-z0193g8n.wandb b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/run-z0193g8n.wandb new file mode 100644 index 0000000000000000000000000000000000000000..c9576de82afc5e32406aa445c964579351c0a6e4 Binary files /dev/null and b/lm-evaluation-harness/wandb/run-20240530_070444-z0193g8n/run-z0193g8n.wandb differ diff --git a/venv/bin/python b/venv/bin/python new file mode 100644 index 0000000000000000000000000000000000000000..64e8728adfafdb3e95b983fb0960f9a09cd3bac9 --- /dev/null +++ b/venv/bin/python @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45692c3da2492563eabf0a8f5dc18d20dc9c34ffe3a18202563e00bae684be91 +size 5904904 diff --git a/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/LICENSE b/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/RECORD b/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..233a08ec2fd3ce50d261bb314ec96477710812e9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/RECORD @@ -0,0 +1,163 @@ +../../../bin/accelerate,sha256=LgZXmEHjJPZ2aFn20UFn--3xou6O--NhlD7cEFKIkCk,262 +../../../bin/accelerate-config,sha256=XmI-Ruh2UNKOFB-hQjTOw5V5gCVjoCBqAU-tkW7TRuk,254 +../../../bin/accelerate-estimate-memory,sha256=JiaILN1tzI_0gXCaVi97m3c0XkPhumhQMQph_yHGIEo,256 +../../../bin/accelerate-launch,sha256=rCwKRBWCoNP0usF_G3iKt3JqN5dO0-ZOH9p07eb2A_k,254 +accelerate-0.29.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +accelerate-0.29.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +accelerate-0.29.3.dist-info/METADATA,sha256=DNiQffLlP8RQMZZvHtnR0loDVK60yC3FhB8UQKkthgo,18942 +accelerate-0.29.3.dist-info/RECORD,, +accelerate-0.29.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +accelerate-0.29.3.dist-info/entry_points.txt,sha256=Z_KV59tIt4oZtUDEQ0w8JThJ6_1dd8vR8heH24DeAXI,238 +accelerate-0.29.3.dist-info/top_level.txt,sha256=esVfdxTidsjQ90zsN_rPpjLFJ4ijRlx4mnLrG09hlt4,11 +accelerate/__init__.py,sha256=UUqSsQQDFMm6aAZGCgNyrbTFPtwkguZA2KnoPb0XbWo,1456 +accelerate/__pycache__/__init__.cpython-310.pyc,, +accelerate/__pycache__/accelerator.cpython-310.pyc,, +accelerate/__pycache__/big_modeling.cpython-310.pyc,, +accelerate/__pycache__/checkpointing.cpython-310.pyc,, +accelerate/__pycache__/data_loader.cpython-310.pyc,, +accelerate/__pycache__/hooks.cpython-310.pyc,, +accelerate/__pycache__/inference.cpython-310.pyc,, +accelerate/__pycache__/launchers.cpython-310.pyc,, +accelerate/__pycache__/local_sgd.cpython-310.pyc,, +accelerate/__pycache__/logging.cpython-310.pyc,, +accelerate/__pycache__/memory_utils.cpython-310.pyc,, +accelerate/__pycache__/optimizer.cpython-310.pyc,, +accelerate/__pycache__/scheduler.cpython-310.pyc,, +accelerate/__pycache__/state.cpython-310.pyc,, +accelerate/__pycache__/tracking.cpython-310.pyc,, +accelerate/accelerator.py,sha256=rh4-KBMCkCGLldjKo1CRtBIbsXG76fJqYWdgOugaw7w,143024 +accelerate/big_modeling.py,sha256=pmtLTKTf8mJK1E2o51E3H5TBAuw_zLX_7pWtogtbP1w,29278 +accelerate/checkpointing.py,sha256=vFyLNg9-8qsPBYhAkcm-WwKEeK5Lrq9qLrQWNGFKoPk,11378 +accelerate/commands/__init__.py,sha256=m1PPTDT4ziIAvM0-FDSgIMIZ69Konn126s6LwuzH6v8,606 +accelerate/commands/__pycache__/__init__.cpython-310.pyc,, +accelerate/commands/__pycache__/accelerate_cli.cpython-310.pyc,, +accelerate/commands/__pycache__/env.cpython-310.pyc,, +accelerate/commands/__pycache__/estimate.cpython-310.pyc,, +accelerate/commands/__pycache__/launch.cpython-310.pyc,, +accelerate/commands/__pycache__/test.cpython-310.pyc,, +accelerate/commands/__pycache__/tpu.cpython-310.pyc,, +accelerate/commands/__pycache__/utils.cpython-310.pyc,, +accelerate/commands/accelerate_cli.py,sha256=i3nge5Wj8i4zkV0CVIk9P8veleRZbTZY0AU4fJOrKF8,1749 +accelerate/commands/config/__init__.py,sha256=iJK8dgj3pc5Vdr1E7UuGoFu-BlybyXLxYDoTg9gXngE,1645 +accelerate/commands/config/__pycache__/__init__.cpython-310.pyc,, +accelerate/commands/config/__pycache__/cluster.cpython-310.pyc,, +accelerate/commands/config/__pycache__/config.cpython-310.pyc,, +accelerate/commands/config/__pycache__/config_args.cpython-310.pyc,, +accelerate/commands/config/__pycache__/config_utils.cpython-310.pyc,, +accelerate/commands/config/__pycache__/default.cpython-310.pyc,, +accelerate/commands/config/__pycache__/sagemaker.cpython-310.pyc,, +accelerate/commands/config/__pycache__/update.cpython-310.pyc,, +accelerate/commands/config/cluster.py,sha256=lA55beGeo0fAowfffKhf8nGcy6lBjaOxTtV-Yg_Rz6s,29926 +accelerate/commands/config/config.py,sha256=FuRlQvOjgATEtyqOSsGD-KEtOCvACOHjs2C-krrtldk,3035 +accelerate/commands/config/config_args.py,sha256=hE42coVnn0UU-ysqp2ZH-jlqaXoPaHt5E_3qxT42GIM,10024 +accelerate/commands/config/config_utils.py,sha256=DcjIV1mDInFmct2_XQ-9KYAkREINs6YuHRbZe5HFjT8,2926 +accelerate/commands/config/default.py,sha256=3-SdEhl_zXM9S3f-FxkSVtiBQ5VY-QNsC4O26u60bss,5350 +accelerate/commands/config/sagemaker.py,sha256=GjHE2-h4tRr1P_PFtMF3miiAtJlzkbHbMb6kFXqn8eo,10341 +accelerate/commands/config/update.py,sha256=NXW1J7GkUHpg71QlIXsmMB_0z8S8IZo2FWax5POwrhc,2395 +accelerate/commands/env.py,sha256=HXXUozMFlxs0b-bU2a3nEcXwYz-5EBkfCvE9svqeN2U,3595 +accelerate/commands/estimate.py,sha256=shEn2nXyHmz94zpAzV2R8__lcNYW9f9djl7bOHoo04k,12398 +accelerate/commands/launch.py,sha256=rYmkdc0Kbcux4TOqBG_sJN-NNc4nmV90vuwHqhGNfWw,41439 +accelerate/commands/menu/__init__.py,sha256=uqSlBM0TFHBwzdv3p3SXfpAk1lZFp4h1a7mbBdscPHs,645 +accelerate/commands/menu/__pycache__/__init__.cpython-310.pyc,, +accelerate/commands/menu/__pycache__/cursor.cpython-310.pyc,, +accelerate/commands/menu/__pycache__/helpers.cpython-310.pyc,, +accelerate/commands/menu/__pycache__/input.cpython-310.pyc,, +accelerate/commands/menu/__pycache__/keymap.cpython-310.pyc,, +accelerate/commands/menu/__pycache__/selection_menu.cpython-310.pyc,, +accelerate/commands/menu/cursor.py,sha256=-lmpJVAzvNc0c3EOtSuLoKB59zqylVCbYyWLPnrOmvQ,2028 +accelerate/commands/menu/helpers.py,sha256=KrSB5fJjH4MUEUAQJ6bYaN16AYcnl9UalDrPD3DYeeg,1483 +accelerate/commands/menu/input.py,sha256=Uj9eDp8-Mb0Fe49nuogqo9W_RCfYd6udfjiPKx7Wjmg,2537 +accelerate/commands/menu/keymap.py,sha256=eXj-suyYs1m5dEHoUKN4mKAMLc8DWHnwhP6G6JSU0jQ,4086 +accelerate/commands/menu/selection_menu.py,sha256=bxy-DHaKKC6SCToOlMBv5_z0MdUzylEg6Sio9OuV3GM,4921 +accelerate/commands/test.py,sha256=YrPYEaAACOGZ6btn2MV6NbMSEdBUcMWADLbQWaZSHtk,2149 +accelerate/commands/tpu.py,sha256=KyxDP7IuveidZrbW4rx2s8Ku3o_ptI6tzwr_R7ck0os,5548 +accelerate/commands/utils.py,sha256=ilcfE32oHh28EToM00nc_SR6upfZiuxUI0AjjZu8KYY,3995 +accelerate/data_loader.py,sha256=qQojnHAW0cjTL7jLQN_g-oHlRZBkKzti3ifk84Izuw4,48307 +accelerate/hooks.py,sha256=x0FBwwoy6PKSwulavYTpc4gERIoB7RHGPF0Qe6qjXNA,31244 +accelerate/inference.py,sha256=Ci7kkw2cocNpuvmbo1ytW2QgcI_HKWoXkIdonFOr0tg,7977 +accelerate/launchers.py,sha256=iFDZ7seDdRwHAHy1BbVPmPccAONiPdV2aBOHNuT2ZD8,11375 +accelerate/local_sgd.py,sha256=v0-AxldUSCYCI-rqjLiEHsVtSqyEIWTC5ppn7CW7qfY,4002 +accelerate/logging.py,sha256=kvUvk33r_7T2BNzIwqRZBOhuC-50Ju4rm4HbsM6h2G8,4897 +accelerate/memory_utils.py,sha256=3R5LoeHl6GgTZ-IMPrDZMdaEehWarGdPqODushb-6pg,862 +accelerate/optimizer.py,sha256=H7e1XwEysZ_GFR8V_3bHjFAY7zzrzO8samCyW_r7dZo,7453 +accelerate/scheduler.py,sha256=des_4M_Tt1W8gCYZZbLla0GHBEgJY3Wx2EGBQPTzeiY,4238 +accelerate/state.py,sha256=yOpKq0xf-yY7qPeQMKWqG05PiU_uUsIkyGqyAlOIJNQ,50409 +accelerate/test_utils/__init__.py,sha256=amEDYw-ztgIvHkYT3mv3ixk1QJirUnf6jfPJzqUUYkQ,1459 +accelerate/test_utils/__pycache__/__init__.cpython-310.pyc,, +accelerate/test_utils/__pycache__/examples.cpython-310.pyc,, +accelerate/test_utils/__pycache__/testing.cpython-310.pyc,, +accelerate/test_utils/__pycache__/training.cpython-310.pyc,, +accelerate/test_utils/examples.py,sha256=jRm1S9TkmeoLaqprBvtVFN4LesiaDZtKMNIoLNY2euw,7281 +accelerate/test_utils/scripts/__init__.py,sha256=m1PPTDT4ziIAvM0-FDSgIMIZ69Konn126s6LwuzH6v8,606 +accelerate/test_utils/scripts/__pycache__/__init__.cpython-310.pyc,, +accelerate/test_utils/scripts/__pycache__/test_cli.cpython-310.pyc,, +accelerate/test_utils/scripts/__pycache__/test_distributed_data_loop.cpython-310.pyc,, +accelerate/test_utils/scripts/__pycache__/test_notebook.cpython-310.pyc,, +accelerate/test_utils/scripts/__pycache__/test_ops.cpython-310.pyc,, +accelerate/test_utils/scripts/__pycache__/test_script.cpython-310.pyc,, +accelerate/test_utils/scripts/__pycache__/test_sync.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__init__.py,sha256=m1PPTDT4ziIAvM0-FDSgIMIZ69Konn126s6LwuzH6v8,606 +accelerate/test_utils/scripts/external_deps/__pycache__/__init__.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__pycache__/test_checkpointing.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__pycache__/test_metrics.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__pycache__/test_peak_memory_usage.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__pycache__/test_performance.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__pycache__/test_pippy.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/__pycache__/test_zero3_integration.cpython-310.pyc,, +accelerate/test_utils/scripts/external_deps/test_checkpointing.py,sha256=zILzHevzqxB1NPPDrJ1furaitI8MTvhBeG9QzzL0bmE,10668 +accelerate/test_utils/scripts/external_deps/test_metrics.py,sha256=67-S1qeCpCL9ceaH22RsIsBJscMS7VQWaO4Krcszzbw,12133 +accelerate/test_utils/scripts/external_deps/test_peak_memory_usage.py,sha256=D0YnKCxkI4ZwDOmZ5Ev6hL9jPyP7SU4WffpVFiK14bs,11072 +accelerate/test_utils/scripts/external_deps/test_performance.py,sha256=8fV3wCM1H9HVRRyC5C4EGWt-9aHILX_y3-E7LfSiv7M,9803 +accelerate/test_utils/scripts/external_deps/test_pippy.py,sha256=RdMoD1rlLKMyjyl0soSqR3iDbGidS6-z5GHo3bJUOw8,4647 +accelerate/test_utils/scripts/external_deps/test_zero3_integration.py,sha256=bJ0Jio-6OCyS2FIgFmZi3duqG1gbkOoTEcHsrORYIL4,1503 +accelerate/test_utils/scripts/test_cli.py,sha256=qfk1aYFtdvYFCYPkl05602SNGvk08QTv0xZVVcFVtzM,833 +accelerate/test_utils/scripts/test_distributed_data_loop.py,sha256=VqFPKNRu8yx2MoZ4nHy5wRocEthSymcIA2mg1knqDq8,8315 +accelerate/test_utils/scripts/test_notebook.py,sha256=Q4OOWHa_GMmzwfiq71BTpKYmhCHLC02J42OO94ut9xk,1629 +accelerate/test_utils/scripts/test_ops.py,sha256=BcGn3xJT2wUJ0Yk_6VLNkneSv9z24JeAoQjsgdIIRr4,6170 +accelerate/test_utils/scripts/test_script.py,sha256=QyHRWvHQm1XWkAH7YilQ0gZe3zwvEkyqD6JXmneWqak,32059 +accelerate/test_utils/scripts/test_sync.py,sha256=3kltq-GuUjOVuo6_FOuWiPyc5f3pGiqiwEAbex5x_-o,18263 +accelerate/test_utils/testing.py,sha256=HIp7n6qPMh8KPbwEzNWu5mzfxnQRcU15EQ1AQKehpo0,20571 +accelerate/test_utils/training.py,sha256=8k_YAQ21MzUdb2aFWq1t2fihW1b-iBGh1OJSL3whY68,4019 +accelerate/tracking.py,sha256=WLY-H1DTsxrz4BVzle7QZMp0Irg84yFMbA1e6JaY3pM,39789 +accelerate/utils/__init__.py,sha256=SEP34Od2TbTZt7AbhPJoWWDxFoNMeNEyAuVfaPgVu7k,6065 +accelerate/utils/__pycache__/__init__.cpython-310.pyc,, +accelerate/utils/__pycache__/bnb.cpython-310.pyc,, +accelerate/utils/__pycache__/constants.cpython-310.pyc,, +accelerate/utils/__pycache__/dataclasses.cpython-310.pyc,, +accelerate/utils/__pycache__/deepspeed.cpython-310.pyc,, +accelerate/utils/__pycache__/environment.cpython-310.pyc,, +accelerate/utils/__pycache__/fsdp_utils.cpython-310.pyc,, +accelerate/utils/__pycache__/imports.cpython-310.pyc,, +accelerate/utils/__pycache__/launch.cpython-310.pyc,, +accelerate/utils/__pycache__/megatron_lm.cpython-310.pyc,, +accelerate/utils/__pycache__/memory.cpython-310.pyc,, +accelerate/utils/__pycache__/modeling.cpython-310.pyc,, +accelerate/utils/__pycache__/offload.cpython-310.pyc,, +accelerate/utils/__pycache__/operations.cpython-310.pyc,, +accelerate/utils/__pycache__/other.cpython-310.pyc,, +accelerate/utils/__pycache__/random.cpython-310.pyc,, +accelerate/utils/__pycache__/rich.cpython-310.pyc,, +accelerate/utils/__pycache__/torch_xla.cpython-310.pyc,, +accelerate/utils/__pycache__/tqdm.cpython-310.pyc,, +accelerate/utils/__pycache__/transformer_engine.cpython-310.pyc,, +accelerate/utils/__pycache__/versions.cpython-310.pyc,, +accelerate/utils/bnb.py,sha256=3i59dy8EcBYJEnT2alJ5_M-zeIpFsrceQ4bImiJJKOk,20570 +accelerate/utils/constants.py,sha256=e6Bpf7gSZLFkvfr-1B1841b6lVoKJ5uyyf5kefe0aT4,2566 +accelerate/utils/dataclasses.py,sha256=QSP-gYjXz68s0PAseKwLHRBQUnzcBQwPk80otV4X20k,74253 +accelerate/utils/deepspeed.py,sha256=1JFnz-dY6xP9yHywnX8bzZNq-d-8Cpg5CvVNLZ74b_0,10276 +accelerate/utils/environment.py,sha256=8eVGMCu7xT1y0Hxochnxz_RghDePtWo2TghDlOm5Gf0,10409 +accelerate/utils/fsdp_utils.py,sha256=QURWBtK8D00zppqJko0yeznEovXvnkRLI0NpPPkog1Q,10667 +accelerate/utils/imports.py,sha256=gYj_W3E5V83dYlSqqYE89OAK6JonzwhlcEjsJcOpB3E,12232 +accelerate/utils/launch.py,sha256=hHpcnR0NrSmqaT7AIaeIeXOAJVIhWnWdq3kA1XSnOYs,27459 +accelerate/utils/megatron_lm.py,sha256=IfHrtMiPSwuzh5ri96rTTIcEluuMNuIj3O8Y4jW6Fzk,57124 +accelerate/utils/memory.py,sha256=VxJCU-tMX8uE34GbJnxtDXYPHh4D9p2Y-d6rkGxqSa0,5200 +accelerate/utils/modeling.py,sha256=OfTHPg7oM9-jzYotLZjJKj6TrhCTFV3qOtQAOhKXmzQ,80246 +accelerate/utils/offload.py,sha256=qjaVai81wbkA0YH2WkmOXvZT0BRphygfRV_4Ua4j4U4,7837 +accelerate/utils/operations.py,sha256=zsmRx8mP2eoImPc42pOmBIqaHX7RDugw8AZ_HF3onpg,30610 +accelerate/utils/other.py,sha256=kgON65EhzQN3oQZqzgAOmmNC2vsQkeO77qEuzN7Zv7c,12283 +accelerate/utils/random.py,sha256=t-HsLQRm8etSiLSyONCU9wNhj-0VjDUyDme9p6RxDNU,4881 +accelerate/utils/rich.py,sha256=8JZX_uGMQX-BufdXxJpdne7BWd1KyLHSgbiGxrDMYr8,847 +accelerate/utils/torch_xla.py,sha256=Pq1tuqN0X_pWDVza6YgjfO45uoJdoRVRForLeLQzFus,1908 +accelerate/utils/tqdm.py,sha256=9Ovx4GL8AvjSaBd_OysoUGPW9ZJ3ZBOde6776HMEMOA,1344 +accelerate/utils/transformer_engine.py,sha256=gNPkOv_D1SDLm6nVZtxWIjyA6snxWtAQeBWUZLIErJE,3582 +accelerate/utils/versions.py,sha256=UgmcbjBm--6CIx1ZamSAMjAK_B_2l48LbeaNygqej8M,2149 diff --git a/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/WHEEL b/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a --- /dev/null +++ b/venv/lib/python3.10/site-packages/accelerate-0.29.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/pathvalidate/__init__.py b/venv/lib/python3.10/site-packages/pathvalidate/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f1b3d6243d800c28469efad4bb452e40c76e6703 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/__init__.py @@ -0,0 +1,81 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from .__version__ import __author__, __copyright__, __email__, __license__, __version__ +from ._base import AbstractSanitizer, AbstractValidator +from ._common import ( + ascii_symbols, + normalize_platform, + replace_ansi_escape, + replace_unprintable_char, + unprintable_ascii_chars, + validate_pathtype, + validate_unprintable_char, +) +from ._const import Platform +from ._filename import ( + FileNameSanitizer, + FileNameValidator, + is_valid_filename, + sanitize_filename, + validate_filename, +) +from ._filepath import ( + FilePathSanitizer, + FilePathValidator, + is_valid_filepath, + sanitize_filepath, + validate_filepath, +) +from ._ltsv import sanitize_ltsv_label, validate_ltsv_label +from ._symbol import replace_symbol, validate_symbol +from .error import ( + ErrorReason, + InvalidCharError, + InvalidReservedNameError, + NullNameError, + ReservedNameError, + ValidationError, + ValidReservedNameError, +) + + +__all__ = ( + "__author__", + "__copyright__", + "__email__", + "__license__", + "__version__", + "AbstractSanitizer", + "AbstractValidator", + "Platform", + "ascii_symbols", + "normalize_platform", + "replace_ansi_escape", + "replace_unprintable_char", + "unprintable_ascii_chars", + "validate_pathtype", + "validate_unprintable_char", + "FileNameSanitizer", + "FileNameValidator", + "is_valid_filename", + "sanitize_filename", + "validate_filename", + "FilePathSanitizer", + "FilePathValidator", + "is_valid_filepath", + "sanitize_filepath", + "validate_filepath", + "sanitize_ltsv_label", + "validate_ltsv_label", + "replace_symbol", + "validate_symbol", + "ErrorReason", + "InvalidCharError", + "InvalidReservedNameError", + "NullNameError", + "ReservedNameError", + "ValidationError", + "ValidReservedNameError", +) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_base.py b/venv/lib/python3.10/site-packages/pathvalidate/_base.py new file mode 100644 index 0000000000000000000000000000000000000000..b829410c962bae5c5ecc061e2bc4661cd0d85b6e --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_base.py @@ -0,0 +1,237 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import abc +import os +import sys +from typing import ClassVar, Optional, Sequence, Tuple + +from ._common import normalize_platform, unprintable_ascii_chars +from ._const import DEFAULT_MIN_LEN, Platform +from ._types import PathType, PlatformType +from .error import ReservedNameError, ValidationError +from .handler import NullValueHandler, ReservedNameHandler, ValidationErrorHandler + + +class BaseFile: + _INVALID_PATH_CHARS: ClassVar[str] = "".join(unprintable_ascii_chars) + _INVALID_FILENAME_CHARS: ClassVar[str] = _INVALID_PATH_CHARS + "/" + _INVALID_WIN_PATH_CHARS: ClassVar[str] = _INVALID_PATH_CHARS + ':*?"<>|\t\n\r\x0b\x0c' + _INVALID_WIN_FILENAME_CHARS: ClassVar[str] = ( + _INVALID_FILENAME_CHARS + _INVALID_WIN_PATH_CHARS + "\\" + ) + + @property + def platform(self) -> Platform: + return self.__platform + + @property + def reserved_keywords(self) -> Tuple[str, ...]: + return self._additional_reserved_names + + @property + def max_len(self) -> int: + return self._max_len + + def __init__( + self, + max_len: int, + fs_encoding: Optional[str], + additional_reserved_names: Optional[Sequence[str]] = None, + platform_max_len: Optional[int] = None, + platform: Optional[PlatformType] = None, + ) -> None: + if additional_reserved_names is None: + additional_reserved_names = tuple() + self._additional_reserved_names = tuple(n.upper() for n in additional_reserved_names) + + self.__platform = normalize_platform(platform) + + if platform_max_len is None: + platform_max_len = self._get_default_max_path_len() + + if max_len <= 0: + self._max_len = platform_max_len + else: + self._max_len = max_len + + self._max_len = min(self._max_len, platform_max_len) + + if fs_encoding: + self._fs_encoding = fs_encoding + else: + self._fs_encoding = sys.getfilesystemencoding() + + def _is_posix(self) -> bool: + return self.platform == Platform.POSIX + + def _is_universal(self) -> bool: + return self.platform == Platform.UNIVERSAL + + def _is_linux(self, include_universal: bool = False) -> bool: + if include_universal: + return self.platform in (Platform.UNIVERSAL, Platform.LINUX) + + return self.platform == Platform.LINUX + + def _is_windows(self, include_universal: bool = False) -> bool: + if include_universal: + return self.platform in (Platform.UNIVERSAL, Platform.WINDOWS) + + return self.platform == Platform.WINDOWS + + def _is_macos(self, include_universal: bool = False) -> bool: + if include_universal: + return self.platform in (Platform.UNIVERSAL, Platform.MACOS) + + return self.platform == Platform.MACOS + + def _get_default_max_path_len(self) -> int: + if self._is_linux(): + return 4096 + + if self._is_windows(): + return 260 + + if self._is_posix() or self._is_macos(): + return 1024 + + return 260 # universal + + +class AbstractValidator(BaseFile, metaclass=abc.ABCMeta): + def __init__( + self, + max_len: int, + fs_encoding: Optional[str], + check_reserved: bool, + additional_reserved_names: Optional[Sequence[str]] = None, + platform_max_len: Optional[int] = None, + platform: Optional[PlatformType] = None, + ) -> None: + self._check_reserved = check_reserved + + super().__init__( + max_len, + fs_encoding, + additional_reserved_names=additional_reserved_names, + platform_max_len=platform_max_len, + platform=platform, + ) + + @abc.abstractproperty + def min_len(self) -> int: # pragma: no cover + pass + + @abc.abstractmethod + def validate(self, value: PathType) -> None: # pragma: no cover + pass + + def is_valid(self, value: PathType) -> bool: + try: + self.validate(value) + except (TypeError, ValidationError): + return False + + return True + + def _is_reserved_keyword(self, value: str) -> bool: + return value in self.reserved_keywords + + +class AbstractSanitizer(BaseFile, metaclass=abc.ABCMeta): + def __init__( + self, + validator: AbstractValidator, + max_len: int, + fs_encoding: Optional[str], + validate_after_sanitize: bool, + null_value_handler: Optional[ValidationErrorHandler] = None, + reserved_name_handler: Optional[ValidationErrorHandler] = None, + additional_reserved_names: Optional[Sequence[str]] = None, + platform_max_len: Optional[int] = None, + platform: Optional[PlatformType] = None, + ) -> None: + super().__init__( + max_len=max_len, + fs_encoding=fs_encoding, + additional_reserved_names=additional_reserved_names, + platform_max_len=platform_max_len, + platform=platform, + ) + + if null_value_handler is None: + null_value_handler = NullValueHandler.return_null_string + self._null_value_handler = null_value_handler + + if reserved_name_handler is None: + reserved_name_handler = ReservedNameHandler.add_trailing_underscore + self._reserved_name_handler = reserved_name_handler + + self._validate_after_sanitize = validate_after_sanitize + + self._validator = validator + + @abc.abstractmethod + def sanitize(self, value: PathType, replacement_text: str = "") -> PathType: # pragma: no cover + pass + + +class BaseValidator(AbstractValidator): + @property + def min_len(self) -> int: + return self._min_len + + def __init__( + self, + min_len: int, + max_len: int, + fs_encoding: Optional[str], + check_reserved: bool, + additional_reserved_names: Optional[Sequence[str]] = None, + platform_max_len: Optional[int] = None, + platform: Optional[PlatformType] = None, + ) -> None: + if min_len <= 0: + min_len = DEFAULT_MIN_LEN + self._min_len = max(min_len, 1) + + super().__init__( + max_len=max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + platform_max_len=platform_max_len, + platform=platform, + ) + + self._validate_max_len() + + def _validate_reserved_keywords(self, name: str) -> None: + if not self._check_reserved: + return + + root_name = self.__extract_root_name(name) + base_name = os.path.basename(name).upper() + + if self._is_reserved_keyword(root_name.upper()) or self._is_reserved_keyword( + base_name.upper() + ): + raise ReservedNameError( + f"'{root_name}' is a reserved name", + reusable_name=False, + reserved_name=root_name, + platform=self.platform, + ) + + def _validate_max_len(self) -> None: + if self.max_len < 1: + raise ValueError("max_len must be greater or equal to one") + + if self.min_len > self.max_len: + raise ValueError("min_len must be lower than max_len") + + @staticmethod + def __extract_root_name(path: str) -> str: + return os.path.splitext(os.path.basename(path))[0] diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_common.py b/venv/lib/python3.10/site-packages/pathvalidate/_common.py new file mode 100644 index 0000000000000000000000000000000000000000..5df96322d063eca62831e4d2e79fa2079029c9b6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_common.py @@ -0,0 +1,140 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import platform +import re +import string +from pathlib import PurePath +from typing import Any, List, Optional + +from ._const import Platform +from ._types import PathType, PlatformType + + +_re_whitespaces = re.compile(r"^[\s]+$") + + +def validate_pathtype( + text: PathType, allow_whitespaces: bool = False, error_msg: Optional[str] = None +) -> None: + from .error import ErrorReason, ValidationError + + if _is_not_null_string(text) or isinstance(text, PurePath): + return + + if allow_whitespaces and _re_whitespaces.search(str(text)): + return + + if is_null_string(text): + raise ValidationError(reason=ErrorReason.NULL_NAME) + + raise TypeError(f"text must be a string: actual={type(text)}") + + +def to_str(name: PathType) -> str: + if isinstance(name, PurePath): + return str(name) + + return name + + +def is_null_string(value: Any) -> bool: + if value is None: + return True + + try: + return len(value.strip()) == 0 + except AttributeError: + return False + + +def _is_not_null_string(value: Any) -> bool: + try: + return len(value.strip()) > 0 + except AttributeError: + return False + + +def _get_unprintable_ascii_chars() -> List[str]: + return [chr(c) for c in range(128) if chr(c) not in string.printable] + + +unprintable_ascii_chars = tuple(_get_unprintable_ascii_chars()) + + +def _get_ascii_symbols() -> List[str]: + symbol_list: List[str] = [] + + for i in range(128): + c = chr(i) + + if c in unprintable_ascii_chars or c in string.digits + string.ascii_letters: + continue + + symbol_list.append(c) + + return symbol_list + + +ascii_symbols = tuple(_get_ascii_symbols()) + +__RE_UNPRINTABLE_CHARS = re.compile( + "[{}]".format(re.escape("".join(unprintable_ascii_chars))), re.UNICODE +) +__RE_ANSI_ESCAPE = re.compile( + r"(?:\x1B[@-Z\\-_]|[\x80-\x9A\x9C-\x9F]|(?:\x1B\[|\x9B)[0-?]*[ -/]*[@-~])" +) + + +def validate_unprintable_char(text: str) -> None: + from .error import InvalidCharError + + match_list = __RE_UNPRINTABLE_CHARS.findall(to_str(text)) + if match_list: + raise InvalidCharError(f"unprintable character found: {match_list}") + + +def replace_unprintable_char(text: str, replacement_text: str = "") -> str: + try: + return __RE_UNPRINTABLE_CHARS.sub(replacement_text, text) + except (TypeError, AttributeError): + raise TypeError("text must be a string") + + +def replace_ansi_escape(text: str, replacement_text: str = "") -> str: + try: + return __RE_ANSI_ESCAPE.sub(replacement_text, text) + except (TypeError, AttributeError): + raise TypeError("text must be a string") + + +def normalize_platform(name: Optional[PlatformType]) -> Platform: + if isinstance(name, Platform): + return name + + if not name: + return Platform.UNIVERSAL + + name = name.strip().casefold() + + if name == "posix": + return Platform.POSIX + + if name == "auto": + name = platform.system().casefold() + + if name in ["linux"]: + return Platform.LINUX + + if name and name.startswith("win"): + return Platform.WINDOWS + + if name in ["mac", "macos", "darwin"]: + return Platform.MACOS + + return Platform.UNIVERSAL + + +def findall_to_str(match: List[Any]) -> str: + return ", ".join([repr(text) for text in match]) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_const.py b/venv/lib/python3.10/site-packages/pathvalidate/_const.py new file mode 100644 index 0000000000000000000000000000000000000000..6a53dfdcd45425fb9db7e68bbbd15847a5c0b161 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_const.py @@ -0,0 +1,40 @@ +import enum + + +DEFAULT_MIN_LEN = 1 +INVALID_CHAR_ERR_MSG_TMPL = "invalids=({invalid}), value={value}" + + +_NTFS_RESERVED_FILE_NAMES = ( + "$Mft", + "$MftMirr", + "$LogFile", + "$Volume", + "$AttrDef", + "$Bitmap", + "$Boot", + "$BadClus", + "$Secure", + "$Upcase", + "$Extend", + "$Quota", + "$ObjId", + "$Reparse", +) # Only in root directory + + +@enum.unique +class Platform(enum.Enum): + """ + Platform specifier enumeration. + """ + + #: POSIX compatible platform. + POSIX = "POSIX" + + #: platform independent. note that absolute paths cannot specify this. + UNIVERSAL = "universal" + + LINUX = "Linux" + WINDOWS = "Windows" + MACOS = "macOS" diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_filename.py b/venv/lib/python3.10/site-packages/pathvalidate/_filename.py new file mode 100644 index 0000000000000000000000000000000000000000..707c55fe0013d86373af03b948e597d5fe672601 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_filename.py @@ -0,0 +1,466 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import itertools +import ntpath +import posixpath +import re +import warnings +from pathlib import Path, PurePath +from typing import Optional, Pattern, Sequence, Tuple + +from ._base import AbstractSanitizer, AbstractValidator, BaseFile, BaseValidator +from ._common import findall_to_str, to_str, validate_pathtype +from ._const import DEFAULT_MIN_LEN, INVALID_CHAR_ERR_MSG_TMPL, Platform +from ._types import PathType, PlatformType +from .error import ErrorAttrKey, ErrorReason, InvalidCharError, ValidationError +from .handler import ReservedNameHandler, ValidationErrorHandler + + +_DEFAULT_MAX_FILENAME_LEN = 255 +_RE_INVALID_FILENAME = re.compile(f"[{re.escape(BaseFile._INVALID_FILENAME_CHARS):s}]", re.UNICODE) +_RE_INVALID_WIN_FILENAME = re.compile( + f"[{re.escape(BaseFile._INVALID_WIN_FILENAME_CHARS):s}]", re.UNICODE +) + + +class FileNameSanitizer(AbstractSanitizer): + def __init__( + self, + max_len: int = _DEFAULT_MAX_FILENAME_LEN, + fs_encoding: Optional[str] = None, + platform: Optional[PlatformType] = None, + null_value_handler: Optional[ValidationErrorHandler] = None, + reserved_name_handler: Optional[ValidationErrorHandler] = None, + additional_reserved_names: Optional[Sequence[str]] = None, + validate_after_sanitize: bool = False, + validator: Optional[AbstractValidator] = None, + ) -> None: + if validator: + fname_validator = validator + else: + fname_validator = FileNameValidator( + min_len=DEFAULT_MIN_LEN, + max_len=max_len, + fs_encoding=fs_encoding, + check_reserved=True, + additional_reserved_names=additional_reserved_names, + platform=platform, + ) + + super().__init__( + max_len=max_len, + fs_encoding=fs_encoding, + null_value_handler=null_value_handler, + reserved_name_handler=reserved_name_handler, + additional_reserved_names=additional_reserved_names, + platform_max_len=_DEFAULT_MAX_FILENAME_LEN, + platform=platform, + validate_after_sanitize=validate_after_sanitize, + validator=fname_validator, + ) + + self._sanitize_regexp = self._get_sanitize_regexp() + + def sanitize(self, value: PathType, replacement_text: str = "") -> PathType: + try: + validate_pathtype(value, allow_whitespaces=not self._is_windows(include_universal=True)) + except ValidationError as e: + if e.reason == ErrorReason.NULL_NAME: + if isinstance(value, PurePath): + raise + + return self._null_value_handler(e) + raise + + sanitized_filename = self._sanitize_regexp.sub(replacement_text, str(value)) + sanitized_filename = sanitized_filename[: self.max_len] + + try: + self._validator.validate(sanitized_filename) + except ValidationError as e: + if e.reason == ErrorReason.RESERVED_NAME: + replacement_word = self._reserved_name_handler(e) + if e.reserved_name != replacement_word: + sanitized_filename = re.sub( + re.escape(e.reserved_name), replacement_word, sanitized_filename + ) + elif e.reason == ErrorReason.INVALID_CHARACTER and self._is_windows( + include_universal=True + ): + # Do not start a file or directory name with a space + sanitized_filename = sanitized_filename.lstrip(" ") + + # Do not end a file or directory name with a space or a period + sanitized_filename = sanitized_filename.rstrip(" ") + if sanitized_filename not in (".", ".."): + sanitized_filename = sanitized_filename.rstrip(" .") + elif e.reason == ErrorReason.NULL_NAME: + sanitized_filename = self._null_value_handler(e) + + if self._validate_after_sanitize: + try: + self._validator.validate(sanitized_filename) + except ValidationError as e: + raise ValidationError( + description=str(e), + reason=ErrorReason.INVALID_AFTER_SANITIZE, + platform=self.platform, + ) + + if isinstance(value, PurePath): + return Path(sanitized_filename) + + return sanitized_filename + + def _get_sanitize_regexp(self) -> Pattern[str]: + if self._is_windows(include_universal=True): + return _RE_INVALID_WIN_FILENAME + + return _RE_INVALID_FILENAME + + +class FileNameValidator(BaseValidator): + _WINDOWS_RESERVED_FILE_NAMES = ("CON", "PRN", "AUX", "CLOCK$", "NUL") + tuple( + f"{name:s}{num:d}" for name, num in itertools.product(("COM", "LPT"), range(1, 10)) + ) + _MACOS_RESERVED_FILE_NAMES = (":",) + + @property + def reserved_keywords(self) -> Tuple[str, ...]: + common_keywords = super().reserved_keywords + + if self._is_universal(): + word_set = set( + common_keywords + + self._WINDOWS_RESERVED_FILE_NAMES + + self._MACOS_RESERVED_FILE_NAMES + ) + elif self._is_windows(): + word_set = set(common_keywords + self._WINDOWS_RESERVED_FILE_NAMES) + elif self._is_posix() or self._is_macos(): + word_set = set(common_keywords + self._MACOS_RESERVED_FILE_NAMES) + else: + word_set = set(common_keywords) + + return tuple(sorted(word_set)) + + def __init__( + self, + min_len: int = DEFAULT_MIN_LEN, + max_len: int = _DEFAULT_MAX_FILENAME_LEN, + fs_encoding: Optional[str] = None, + platform: Optional[PlatformType] = None, + check_reserved: bool = True, + additional_reserved_names: Optional[Sequence[str]] = None, + ) -> None: + super().__init__( + min_len=min_len, + max_len=max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + platform_max_len=_DEFAULT_MAX_FILENAME_LEN, + platform=platform, + ) + + def validate(self, value: PathType) -> None: + validate_pathtype(value, allow_whitespaces=not self._is_windows(include_universal=True)) + + unicode_filename = to_str(value) + byte_ct = len(unicode_filename.encode(self._fs_encoding)) + + self.validate_abspath(unicode_filename) + + err_kwargs = { + ErrorAttrKey.REASON: ErrorReason.INVALID_LENGTH, + ErrorAttrKey.PLATFORM: self.platform, + ErrorAttrKey.FS_ENCODING: self._fs_encoding, + ErrorAttrKey.BYTE_COUNT: byte_ct, + } + if byte_ct > self.max_len: + raise ValidationError( + [ + f"filename is too long: expected<={self.max_len:d} bytes, actual={byte_ct:d} bytes" + ], + **err_kwargs, + ) + if byte_ct < self.min_len: + raise ValidationError( + [ + f"filename is too short: expected>={self.min_len:d} bytes, actual={byte_ct:d} bytes" + ], + **err_kwargs, + ) + + self._validate_reserved_keywords(unicode_filename) + self.__validate_universal_filename(unicode_filename) + + if self._is_windows(include_universal=True): + self.__validate_win_filename(unicode_filename) + + def validate_abspath(self, value: str) -> None: + err = ValidationError( + description=f"found an absolute path ({value}), expected a filename", + platform=self.platform, + reason=ErrorReason.FOUND_ABS_PATH, + ) + + if self._is_windows(include_universal=True): + if ntpath.isabs(value): + raise err + + if posixpath.isabs(value): + raise err + + def __validate_universal_filename(self, unicode_filename: str) -> None: + match = _RE_INVALID_FILENAME.findall(unicode_filename) + if match: + raise InvalidCharError( + INVALID_CHAR_ERR_MSG_TMPL.format( + invalid=findall_to_str(match), value=repr(unicode_filename) + ), + platform=Platform.UNIVERSAL, + ) + + def __validate_win_filename(self, unicode_filename: str) -> None: + match = _RE_INVALID_WIN_FILENAME.findall(unicode_filename) + if match: + raise InvalidCharError( + INVALID_CHAR_ERR_MSG_TMPL.format( + invalid=findall_to_str(match), value=repr(unicode_filename) + ), + platform=Platform.WINDOWS, + ) + + if unicode_filename in (".", ".."): + return + + KB2829981_err_tmpl = "{}. Refer: https://learn.microsoft.com/en-us/troubleshoot/windows-client/shell-experience/file-folder-name-whitespace-characters" # noqa: E501 + + if unicode_filename[-1] in (" ", "."): + raise InvalidCharError( + INVALID_CHAR_ERR_MSG_TMPL.format( + invalid=re.escape(unicode_filename[-1]), value=repr(unicode_filename) + ), + platform=Platform.WINDOWS, + description=KB2829981_err_tmpl.format( + "Do not end a file or directory name with a space or a period" + ), + ) + + if unicode_filename[0] in (" "): + raise InvalidCharError( + INVALID_CHAR_ERR_MSG_TMPL.format( + invalid=re.escape(unicode_filename[0]), value=repr(unicode_filename) + ), + platform=Platform.WINDOWS, + description=KB2829981_err_tmpl.format( + "Do not start a file or directory name with a space" + ), + ) + + +def validate_filename( + filename: PathType, + platform: Optional[PlatformType] = None, + min_len: int = DEFAULT_MIN_LEN, + max_len: int = _DEFAULT_MAX_FILENAME_LEN, + fs_encoding: Optional[str] = None, + check_reserved: bool = True, + additional_reserved_names: Optional[Sequence[str]] = None, +) -> None: + """Verifying whether the ``filename`` is a valid file name or not. + + Args: + filename: + Filename to validate. + platform: + Target platform name of the filename. + + .. include:: platform.txt + min_len: + Minimum byte length of the ``filename``. The value must be greater or equal to one. + Defaults to ``1``. + max_len: + Maximum byte length of the ``filename``. The value must be lower than: + + - ``Linux``: 4096 + - ``macOS``: 1024 + - ``Windows``: 260 + - ``universal``: 260 + + Defaults to ``255``. + fs_encoding: + Filesystem encoding that used to calculate the byte length of the filename. + If |None|, get the value from the execution environment. + check_reserved: + If |True|, check reserved names of the ``platform``. + additional_reserved_names: + Additional reserved names to check. + Case insensitive. + + Raises: + ValidationError (ErrorReason.INVALID_LENGTH): + If the ``filename`` is longer than ``max_len`` characters. + ValidationError (ErrorReason.INVALID_CHARACTER): + If the ``filename`` includes invalid character(s) for a filename: + |invalid_filename_chars|. + The following characters are also invalid for Windows platforms: + |invalid_win_filename_chars|. + ValidationError (ErrorReason.RESERVED_NAME): + If the ``filename`` equals reserved name by OS. + Windows reserved name is as follows: + ``"CON"``, ``"PRN"``, ``"AUX"``, ``"NUL"``, ``"COM[1-9]"``, ``"LPT[1-9]"``. + + Example: + :ref:`example-validate-filename` + + See Also: + `Naming Files, Paths, and Namespaces - Win32 apps | Microsoft Docs + `__ + """ + + FileNameValidator( + platform=platform, + min_len=min_len, + max_len=max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + ).validate(filename) + + +def is_valid_filename( + filename: PathType, + platform: Optional[PlatformType] = None, + min_len: int = DEFAULT_MIN_LEN, + max_len: Optional[int] = None, + fs_encoding: Optional[str] = None, + check_reserved: bool = True, + additional_reserved_names: Optional[Sequence[str]] = None, +) -> bool: + """Check whether the ``filename`` is a valid name or not. + + Args: + filename: + A filename to be checked. + platform: + Target platform name of the filename. + + Example: + :ref:`example-is-valid-filename` + + See Also: + :py:func:`.validate_filename()` + """ + + return FileNameValidator( + platform=platform, + min_len=min_len, + max_len=-1 if max_len is None else max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + ).is_valid(filename) + + +def sanitize_filename( + filename: PathType, + replacement_text: str = "", + platform: Optional[PlatformType] = None, + max_len: Optional[int] = _DEFAULT_MAX_FILENAME_LEN, + fs_encoding: Optional[str] = None, + check_reserved: Optional[bool] = None, + null_value_handler: Optional[ValidationErrorHandler] = None, + reserved_name_handler: Optional[ValidationErrorHandler] = None, + additional_reserved_names: Optional[Sequence[str]] = None, + validate_after_sanitize: bool = False, +) -> PathType: + """Make a valid filename from a string. + + To make a valid filename, the function does the following: + + - Replace invalid characters as file names included in the ``filename`` + with the ``replacement_text``. Invalid characters are: + + - unprintable characters + - |invalid_filename_chars| + - for Windows (or universal) only: |invalid_win_filename_chars| + + - Replace a value if a sanitized value is a reserved name by operating systems + with a specified handler by ``reserved_name_handler``. + + Args: + filename: Filename to sanitize. + replacement_text: + Replacement text for invalid characters. Defaults to ``""``. + platform: + Target platform name of the filename. + + .. include:: platform.txt + max_len: + Maximum byte length of the ``filename``. + Truncate the name length if the ``filename`` length exceeds this value. + Defaults to ``255``. + fs_encoding: + Filesystem encoding that used to calculate the byte length of the filename. + If |None|, get the value from the execution environment. + check_reserved: + [Deprecated] Use 'reserved_name_handler' instead. + null_value_handler: + Function called when a value after sanitization is an empty string. + You can specify predefined handlers: + + - :py:func:`~.handler.NullValueHandler.return_null_string` + - :py:func:`~.handler.NullValueHandler.return_timestamp` + - :py:func:`~.handler.raise_error` + + Defaults to :py:func:`.handler.NullValueHandler.return_null_string` that just return ``""``. + reserved_name_handler: + Function called when a value after sanitization is a reserved name. + You can specify predefined handlers: + + - :py:meth:`~.handler.ReservedNameHandler.add_leading_underscore` + - :py:meth:`~.handler.ReservedNameHandler.add_trailing_underscore` + - :py:meth:`~.handler.ReservedNameHandler.as_is` + - :py:func:`~.handler.raise_error` + + Defaults to :py:func:`.handler.add_trailing_underscore`. + additional_reserved_names: + Additional reserved names to sanitize. + Case insensitive. + validate_after_sanitize: + Execute validation after sanitization to the file name. + + Returns: + Same type as the ``filename`` (str or PathLike object): + Sanitized filename. + + Raises: + ValueError: + If the ``filename`` is an invalid filename. + + Example: + :ref:`example-sanitize-filename` + """ + + if check_reserved is not None: + warnings.warn( + "'check_reserved' is deprecated. Use 'reserved_name_handler' instead.", + DeprecationWarning, + ) + + if check_reserved is False: + reserved_name_handler = ReservedNameHandler.as_is + + return FileNameSanitizer( + platform=platform, + max_len=-1 if max_len is None else max_len, + fs_encoding=fs_encoding, + null_value_handler=null_value_handler, + reserved_name_handler=reserved_name_handler, + additional_reserved_names=additional_reserved_names, + validate_after_sanitize=validate_after_sanitize, + ).sanitize(filename, replacement_text) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_filepath.py b/venv/lib/python3.10/site-packages/pathvalidate/_filepath.py new file mode 100644 index 0000000000000000000000000000000000000000..f880d34beff00d58b0c8f2da6f9930729ac0bdc3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_filepath.py @@ -0,0 +1,516 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import ntpath +import os.path +import posixpath +import re +import warnings +from pathlib import Path, PurePath +from typing import List, Optional, Pattern, Sequence, Tuple + +from ._base import AbstractSanitizer, AbstractValidator, BaseFile, BaseValidator +from ._common import findall_to_str, to_str, validate_pathtype +from ._const import _NTFS_RESERVED_FILE_NAMES, DEFAULT_MIN_LEN, INVALID_CHAR_ERR_MSG_TMPL, Platform +from ._filename import FileNameSanitizer, FileNameValidator +from ._types import PathType, PlatformType +from .error import ErrorAttrKey, ErrorReason, InvalidCharError, ReservedNameError, ValidationError +from .handler import ReservedNameHandler, ValidationErrorHandler + + +_RE_INVALID_PATH = re.compile(f"[{re.escape(BaseFile._INVALID_PATH_CHARS):s}]", re.UNICODE) +_RE_INVALID_WIN_PATH = re.compile(f"[{re.escape(BaseFile._INVALID_WIN_PATH_CHARS):s}]", re.UNICODE) + + +class FilePathSanitizer(AbstractSanitizer): + def __init__( + self, + max_len: int = -1, + fs_encoding: Optional[str] = None, + platform: Optional[PlatformType] = None, + null_value_handler: Optional[ValidationErrorHandler] = None, + reserved_name_handler: Optional[ValidationErrorHandler] = None, + additional_reserved_names: Optional[Sequence[str]] = None, + normalize: bool = True, + validate_after_sanitize: bool = False, + validator: Optional[AbstractValidator] = None, + ) -> None: + if validator: + fpath_validator = validator + else: + fpath_validator = FilePathValidator( + min_len=DEFAULT_MIN_LEN, + max_len=max_len, + fs_encoding=fs_encoding, + check_reserved=True, + additional_reserved_names=additional_reserved_names, + platform=platform, + ) + super().__init__( + max_len=max_len, + fs_encoding=fs_encoding, + validator=fpath_validator, + null_value_handler=null_value_handler, + reserved_name_handler=reserved_name_handler, + additional_reserved_names=additional_reserved_names, + platform=platform, + validate_after_sanitize=validate_after_sanitize, + ) + + self._sanitize_regexp = self._get_sanitize_regexp() + self.__fname_sanitizer = FileNameSanitizer( + max_len=self.max_len, + fs_encoding=fs_encoding, + null_value_handler=null_value_handler, + reserved_name_handler=reserved_name_handler, + additional_reserved_names=additional_reserved_names, + platform=self.platform, + validate_after_sanitize=validate_after_sanitize, + ) + self.__normalize = normalize + + if self._is_windows(include_universal=True): + self.__split_drive = ntpath.splitdrive + else: + self.__split_drive = posixpath.splitdrive + + def sanitize(self, value: PathType, replacement_text: str = "") -> PathType: + try: + validate_pathtype(value, allow_whitespaces=not self._is_windows(include_universal=True)) + except ValidationError as e: + if e.reason == ErrorReason.NULL_NAME: + if isinstance(value, PurePath): + raise + + return self._null_value_handler(e) + raise + + unicode_filepath = to_str(value) + + drive, unicode_filepath = self.__split_drive(unicode_filepath) + unicode_filepath = self._sanitize_regexp.sub(replacement_text, unicode_filepath) + if self.__normalize and unicode_filepath: + unicode_filepath = os.path.normpath(unicode_filepath) + sanitized_path = unicode_filepath + + sanitized_entries: List[str] = [] + if drive: + sanitized_entries.append(drive) + for entry in sanitized_path.replace("\\", "/").split("/"): + if entry in _NTFS_RESERVED_FILE_NAMES: + sanitized_entries.append(f"{entry}_") + continue + + sanitized_entry = str( + self.__fname_sanitizer.sanitize(entry, replacement_text=replacement_text) + ) + if not sanitized_entry: + if not sanitized_entries: + sanitized_entries.append("") + continue + + sanitized_entries.append(sanitized_entry) + + sanitized_path = self.__get_path_separator().join(sanitized_entries) + try: + self._validator.validate(sanitized_path) + except ValidationError as e: + if e.reason == ErrorReason.NULL_NAME: + sanitized_path = self._null_value_handler(e) + + if self._validate_after_sanitize: + self._validator.validate(sanitized_path) + + if isinstance(value, PurePath): + return Path(sanitized_path) + + return sanitized_path + + def _get_sanitize_regexp(self) -> Pattern[str]: + if self._is_windows(include_universal=True): + return _RE_INVALID_WIN_PATH + + return _RE_INVALID_PATH + + def __get_path_separator(self) -> str: + if self._is_windows(): + return "\\" + + return "/" + + +class FilePathValidator(BaseValidator): + _RE_NTFS_RESERVED = re.compile( + "|".join(f"^/{re.escape(pattern)}$" for pattern in _NTFS_RESERVED_FILE_NAMES), + re.IGNORECASE, + ) + _MACOS_RESERVED_FILE_PATHS = ("/", ":") + + @property + def reserved_keywords(self) -> Tuple[str, ...]: + common_keywords = super().reserved_keywords + + if any([self._is_universal(), self._is_posix(), self._is_macos()]): + return common_keywords + self._MACOS_RESERVED_FILE_PATHS + + if self._is_linux(): + return common_keywords + ("/",) + + return common_keywords + + def __init__( + self, + min_len: int = DEFAULT_MIN_LEN, + max_len: int = -1, + fs_encoding: Optional[str] = None, + platform: Optional[PlatformType] = None, + check_reserved: bool = True, + additional_reserved_names: Optional[Sequence[str]] = None, + ) -> None: + super().__init__( + min_len=min_len, + max_len=max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + platform=platform, + ) + + self.__fname_validator = FileNameValidator( + min_len=min_len, + max_len=max_len, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + platform=platform, + ) + + if self._is_windows(include_universal=True): + self.__split_drive = ntpath.splitdrive + else: + self.__split_drive = posixpath.splitdrive + + def validate(self, value: PathType) -> None: + validate_pathtype(value, allow_whitespaces=not self._is_windows(include_universal=True)) + self.validate_abspath(value) + + _drive, tail = self.__split_drive(value) + if not tail: + return + + unicode_filepath = to_str(tail) + byte_ct = len(unicode_filepath.encode(self._fs_encoding)) + err_kwargs = { + ErrorAttrKey.REASON: ErrorReason.INVALID_LENGTH, + ErrorAttrKey.PLATFORM: self.platform, + ErrorAttrKey.FS_ENCODING: self._fs_encoding, + ErrorAttrKey.BYTE_COUNT: byte_ct, + } + + if byte_ct > self.max_len: + raise ValidationError( + [ + f"file path is too long: expected<={self.max_len:d} bytes, actual={byte_ct:d} bytes" + ], + **err_kwargs, + ) + if byte_ct < self.min_len: + raise ValidationError( + [ + "file path is too short: expected>={:d} bytes, actual={:d} bytes".format( + self.min_len, byte_ct + ) + ], + **err_kwargs, + ) + + self._validate_reserved_keywords(unicode_filepath) + unicode_filepath = unicode_filepath.replace("\\", "/") + for entry in unicode_filepath.split("/"): + if not entry or entry in (".", ".."): + continue + + self.__fname_validator._validate_reserved_keywords(entry) + + if self._is_windows(include_universal=True): + self.__validate_win_filepath(unicode_filepath) + else: + self.__validate_unix_filepath(unicode_filepath) + + def validate_abspath(self, value: PathType) -> None: + is_posix_abs = posixpath.isabs(value) + is_nt_abs = ntpath.isabs(value) + err_object = ValidationError( + description=( + "an invalid absolute file path ({}) for the platform ({}).".format( + value, self.platform.value + ) + + " to avoid the error, specify an appropriate platform corresponding to" + + " the path format or 'auto'." + ), + platform=self.platform, + reason=ErrorReason.MALFORMED_ABS_PATH, + ) + + if any([self._is_windows() and is_nt_abs, self._is_linux() and is_posix_abs]): + return + + if self._is_universal() and any([is_posix_abs, is_nt_abs]): + ValidationError( + description=( + ("POSIX style" if is_posix_abs else "NT style") + + " absolute file path found. expected a platform-independent file path." + ), + platform=self.platform, + reason=ErrorReason.MALFORMED_ABS_PATH, + ) + + if self._is_windows(include_universal=True) and is_posix_abs: + raise err_object + + drive, _tail = ntpath.splitdrive(value) + if not self._is_windows() and drive and is_nt_abs: + raise err_object + + def __validate_unix_filepath(self, unicode_filepath: str) -> None: + match = _RE_INVALID_PATH.findall(unicode_filepath) + if match: + raise InvalidCharError( + INVALID_CHAR_ERR_MSG_TMPL.format( + invalid=findall_to_str(match), value=repr(unicode_filepath) + ) + ) + + def __validate_win_filepath(self, unicode_filepath: str) -> None: + match = _RE_INVALID_WIN_PATH.findall(unicode_filepath) + if match: + raise InvalidCharError( + INVALID_CHAR_ERR_MSG_TMPL.format( + invalid=findall_to_str(match), value=repr(unicode_filepath) + ), + platform=Platform.WINDOWS, + ) + + _drive, value = self.__split_drive(unicode_filepath) + if value: + match_reserved = self._RE_NTFS_RESERVED.search(value) + if match_reserved: + reserved_name = match_reserved.group() + raise ReservedNameError( + f"'{reserved_name}' is a reserved name", + reusable_name=False, + reserved_name=reserved_name, + platform=self.platform, + ) + + +def validate_filepath( + file_path: PathType, + platform: Optional[PlatformType] = None, + min_len: int = DEFAULT_MIN_LEN, + max_len: Optional[int] = None, + fs_encoding: Optional[str] = None, + check_reserved: bool = True, + additional_reserved_names: Optional[Sequence[str]] = None, +) -> None: + """Verifying whether the ``file_path`` is a valid file path or not. + + Args: + file_path (PathType): + File path to be validated. + platform (Optional[PlatformType], optional): + Target platform name of the file path. + + .. include:: platform.txt + min_len (int, optional): + Minimum byte length of the ``file_path``. The value must be greater or equal to one. + Defaults to ``1``. + max_len (Optional[int], optional): + Maximum byte length of the ``file_path``. If the value is |None| or minus, + automatically determined by the ``platform``: + + - ``Linux``: 4096 + - ``macOS``: 1024 + - ``Windows``: 260 + - ``universal``: 260 + fs_encoding (Optional[str], optional): + Filesystem encoding that used to calculate the byte length of the file path. + If |None|, get the value from the execution environment. + check_reserved (bool, optional): + If |True|, check reserved names of the ``platform``. + Defaults to |True|. + additional_reserved_names (Optional[Sequence[str]], optional): + Additional reserved names to check. + + Raises: + ValidationError (ErrorReason.INVALID_CHARACTER): + If the ``file_path`` includes invalid char(s): + |invalid_file_path_chars|. + The following characters are also invalid for Windows platforms: + |invalid_win_file_path_chars| + ValidationError (ErrorReason.INVALID_LENGTH): + If the ``file_path`` is longer than ``max_len`` characters. + ValidationError: + If ``file_path`` include invalid values. + + Example: + :ref:`example-validate-file-path` + + See Also: + `Naming Files, Paths, and Namespaces - Win32 apps | Microsoft Docs + `__ + """ + + FilePathValidator( + platform=platform, + min_len=min_len, + max_len=-1 if max_len is None else max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + ).validate(file_path) + + +def is_valid_filepath( + file_path: PathType, + platform: Optional[PlatformType] = None, + min_len: int = DEFAULT_MIN_LEN, + max_len: Optional[int] = None, + fs_encoding: Optional[str] = None, + check_reserved: bool = True, + additional_reserved_names: Optional[Sequence[str]] = None, +) -> bool: + """Check whether the ``file_path`` is a valid name or not. + + Args: + file_path: + A filepath to be checked. + platform: + Target platform name of the file path. + + Example: + :ref:`example-is-valid-filepath` + + See Also: + :py:func:`.validate_filepath()` + """ + + return FilePathValidator( + platform=platform, + min_len=min_len, + max_len=-1 if max_len is None else max_len, + fs_encoding=fs_encoding, + check_reserved=check_reserved, + additional_reserved_names=additional_reserved_names, + ).is_valid(file_path) + + +def sanitize_filepath( + file_path: PathType, + replacement_text: str = "", + platform: Optional[PlatformType] = None, + max_len: Optional[int] = None, + fs_encoding: Optional[str] = None, + check_reserved: Optional[bool] = None, + null_value_handler: Optional[ValidationErrorHandler] = None, + reserved_name_handler: Optional[ValidationErrorHandler] = None, + additional_reserved_names: Optional[Sequence[str]] = None, + normalize: bool = True, + validate_after_sanitize: bool = False, +) -> PathType: + """Make a valid file path from a string. + + To make a valid file path, the function does the following: + + - Replace invalid characters for a file path within the ``file_path`` + with the ``replacement_text``. Invalid characters are as follows: + + - unprintable characters + - |invalid_file_path_chars| + - for Windows (or universal) only: |invalid_win_file_path_chars| + + - Replace a value if a sanitized value is a reserved name by operating systems + with a specified handler by ``reserved_name_handler``. + + Args: + file_path: + File path to sanitize. + replacement_text: + Replacement text for invalid characters. + Defaults to ``""``. + platform: + Target platform name of the file path. + + .. include:: platform.txt + max_len: + Maximum byte length of the file path. + Truncate the path if the value length exceeds the `max_len`. + If the value is |None| or minus, ``max_len`` will automatically determined by the ``platform``: + + - ``Linux``: 4096 + - ``macOS``: 1024 + - ``Windows``: 260 + - ``universal``: 260 + fs_encoding: + Filesystem encoding that used to calculate the byte length of the file path. + If |None|, get the value from the execution environment. + check_reserved: + [Deprecated] Use 'reserved_name_handler' instead. + null_value_handler: + Function called when a value after sanitization is an empty string. + You can specify predefined handlers: + + - :py:func:`.handler.NullValueHandler.return_null_string` + - :py:func:`.handler.NullValueHandler.return_timestamp` + - :py:func:`.handler.raise_error` + + Defaults to :py:func:`.handler.NullValueHandler.return_null_string` that just return ``""``. + reserved_name_handler: + Function called when a value after sanitization is one of the reserved names. + You can specify predefined handlers: + + - :py:meth:`~.handler.ReservedNameHandler.add_leading_underscore` + - :py:meth:`~.handler.ReservedNameHandler.add_trailing_underscore` + - :py:meth:`~.handler.ReservedNameHandler.as_is` + - :py:func:`~.handler.raise_error` + + Defaults to :py:func:`.handler.add_trailing_underscore`. + additional_reserved_names: + Additional reserved names to sanitize. + Case insensitive. + normalize: + If |True|, normalize the the file path. + validate_after_sanitize: + Execute validation after sanitization to the file path. + + Returns: + Same type as the argument (str or PathLike object): + Sanitized filepath. + + Raises: + ValueError: + If the ``file_path`` is an invalid file path. + + Example: + :ref:`example-sanitize-file-path` + """ + + if check_reserved is not None: + warnings.warn( + "'check_reserved' is deprecated. Use 'reserved_name_handler' instead.", + DeprecationWarning, + ) + + if check_reserved is False: + reserved_name_handler = ReservedNameHandler.as_is + + return FilePathSanitizer( + platform=platform, + max_len=-1 if max_len is None else max_len, + fs_encoding=fs_encoding, + normalize=normalize, + null_value_handler=null_value_handler, + reserved_name_handler=reserved_name_handler, + additional_reserved_names=additional_reserved_names, + validate_after_sanitize=validate_after_sanitize, + ).sanitize(file_path, replacement_text) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_ltsv.py b/venv/lib/python3.10/site-packages/pathvalidate/_ltsv.py new file mode 100644 index 0000000000000000000000000000000000000000..825558070fece3892c9f172bb12cab3629afb34a --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_ltsv.py @@ -0,0 +1,43 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import re + +from ._common import to_str, validate_pathtype +from .error import InvalidCharError + + +__RE_INVALID_LTSV_LABEL = re.compile("[^0-9A-Za-z_.-]", re.UNICODE) + + +def validate_ltsv_label(label: str) -> None: + """ + Verifying whether ``label`` is a valid + `Labeled Tab-separated Values (LTSV) `__ label or not. + + :param label: Label to validate. + :raises pathvalidate.ValidationError: + If invalid character(s) found in the ``label`` for a LTSV format label. + """ + + validate_pathtype(label, allow_whitespaces=False) + + match_list = __RE_INVALID_LTSV_LABEL.findall(to_str(label)) + if match_list: + raise InvalidCharError(f"invalid character found for a LTSV format label: {match_list}") + + +def sanitize_ltsv_label(label: str, replacement_text: str = "") -> str: + """ + Replace all of the symbols in text. + + :param label: Input text. + :param replacement_text: Replacement text. + :return: A replacement string. + :rtype: str + """ + + validate_pathtype(label, allow_whitespaces=False) + + return __RE_INVALID_LTSV_LABEL.sub(replacement_text, to_str(label)) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_symbol.py b/venv/lib/python3.10/site-packages/pathvalidate/_symbol.py new file mode 100644 index 0000000000000000000000000000000000000000..4d37cd5aa57c764f67e98e0a1c9611de982bd40a --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_symbol.py @@ -0,0 +1,92 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import re +from typing import Sequence + +from ._common import ascii_symbols, to_str, unprintable_ascii_chars +from .error import InvalidCharError + + +__RE_SYMBOL = re.compile( + "[{}]".format(re.escape("".join(ascii_symbols + unprintable_ascii_chars))), re.UNICODE +) + + +def validate_symbol(text: str) -> None: + """ + Verifying whether symbol(s) included in the ``text`` or not. + + Args: + text: + Input text to validate. + + Raises: + ValidationError (ErrorReason.INVALID_CHARACTER): + If symbol(s) included in the ``text``. + """ + + match_list = __RE_SYMBOL.findall(to_str(text)) + if match_list: + raise InvalidCharError(f"invalid symbols found: {match_list}") + + +def replace_symbol( + text: str, + replacement_text: str = "", + exclude_symbols: Sequence[str] = [], + is_replace_consecutive_chars: bool = False, + is_strip: bool = False, +) -> str: + """ + Replace all of the symbols in the ``text``. + + Args: + text: + Input text. + replacement_text: + Replacement text. + exclude_symbols: + Symbols that exclude from the replacement. + is_replace_consecutive_chars: + If |True|, replace consecutive multiple ``replacement_text`` characters + to a single character. + is_strip: + If |True|, strip ``replacement_text`` from the beginning/end of the replacement text. + + Returns: + A replacement string. + + Example: + + :ref:`example-sanitize-symbol` + """ + + if exclude_symbols: + regexp = re.compile( + "[{}]".format( + re.escape( + "".join(set(ascii_symbols + unprintable_ascii_chars) - set(exclude_symbols)) + ) + ), + re.UNICODE, + ) + else: + regexp = __RE_SYMBOL + + try: + new_text = regexp.sub(replacement_text, to_str(text)) + except TypeError: + raise TypeError("text must be a string") + + if not replacement_text: + return new_text + + if is_replace_consecutive_chars: + new_text = re.sub(f"{re.escape(replacement_text)}+", replacement_text, new_text) + + if is_strip: + new_text = new_text.strip(replacement_text) + + return new_text diff --git a/venv/lib/python3.10/site-packages/pathvalidate/_types.py b/venv/lib/python3.10/site-packages/pathvalidate/_types.py new file mode 100644 index 0000000000000000000000000000000000000000..c946842489cb1998c4e9a2dbcb95ec9286e0a528 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/_types.py @@ -0,0 +1,8 @@ +from pathlib import Path +from typing import TypeVar + +from ._const import Platform + + +PathType = TypeVar("PathType", str, Path) +PlatformType = TypeVar("PlatformType", str, Platform) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/argparse.py b/venv/lib/python3.10/site-packages/pathvalidate/argparse.py new file mode 100644 index 0000000000000000000000000000000000000000..baeafe2751f007276ed14fa273f4fe206673d744 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/argparse.py @@ -0,0 +1,47 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +from argparse import ArgumentTypeError + +from ._filename import sanitize_filename, validate_filename +from ._filepath import sanitize_filepath, validate_filepath +from .error import ValidationError + + +def validate_filename_arg(value: str) -> str: + if not value: + return "" + + try: + validate_filename(value) + except ValidationError as e: + raise ArgumentTypeError(e) + + return value + + +def validate_filepath_arg(value: str) -> str: + if not value: + return "" + + try: + validate_filepath(value, platform="auto") + except ValidationError as e: + raise ArgumentTypeError(e) + + return value + + +def sanitize_filename_arg(value: str) -> str: + if not value: + return "" + + return sanitize_filename(value) + + +def sanitize_filepath_arg(value: str) -> str: + if not value: + return "" + + return sanitize_filepath(value, platform="auto") diff --git a/venv/lib/python3.10/site-packages/pathvalidate/click.py b/venv/lib/python3.10/site-packages/pathvalidate/click.py new file mode 100644 index 0000000000000000000000000000000000000000..76be6ea45aa974cb98ed5990628cd0cac766c03f --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/click.py @@ -0,0 +1,48 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + +import click +from click.core import Context, Option + +from ._filename import sanitize_filename, validate_filename +from ._filepath import sanitize_filepath, validate_filepath +from .error import ValidationError + + +def validate_filename_arg(ctx: Context, param: Option, value: str) -> str: + if not value: + return "" + + try: + validate_filename(value) + except ValidationError as e: + raise click.BadParameter(str(e)) + + return value + + +def validate_filepath_arg(ctx: Context, param: Option, value: str) -> str: + if not value: + return "" + + try: + validate_filepath(value) + except ValidationError as e: + raise click.BadParameter(str(e)) + + return value + + +def sanitize_filename_arg(ctx: Context, param: Option, value: str) -> str: + if not value: + return "" + + return sanitize_filename(value) + + +def sanitize_filepath_arg(ctx: Context, param: Option, value: str) -> str: + if not value: + return "" + + return sanitize_filepath(value) diff --git a/venv/lib/python3.10/site-packages/pathvalidate/handler.py b/venv/lib/python3.10/site-packages/pathvalidate/handler.py new file mode 100644 index 0000000000000000000000000000000000000000..0671891d6f57e959ec0027d7d975707414ed5d15 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pathvalidate/handler.py @@ -0,0 +1,139 @@ +""" +.. codeauthor:: Tsuyoshi Hombashi +""" + + +import warnings +from datetime import datetime +from typing import Callable + +from .error import ValidationError + + +ValidationErrorHandler = Callable[[ValidationError], str] + + +def return_null_string(e: ValidationError) -> str: + """Null value handler that always returns an empty string. + + Args: + e (ValidationError): A validation error. + + Returns: + str: An empty string. + """ + + warnings.warn( + "'return_null_string' is deprecated. Use 'NullValueHandler.return_null_string' instead.", + DeprecationWarning, + ) + + return "" + + +def return_timestamp(e: ValidationError) -> str: + """Null value handler that returns a timestamp of when the function was called. + + Args: + e (ValidationError): A validation error. + + Returns: + str: A timestamp. + """ + + warnings.warn( + "'return_timestamp' is deprecated. Use 'NullValueHandler.reserved_name_handler' instead.", + DeprecationWarning, + ) + + return str(datetime.now().timestamp()) + + +def raise_error(e: ValidationError) -> str: + """Null value handler that always raises an exception. + + Args: + e (ValidationError): A validation error. + + Raises: + ValidationError: Always raised. + """ + + raise e + + +class NullValueHandler: + @classmethod + def return_null_string(cls, e: ValidationError) -> str: + """Null value handler that always returns an empty string. + + Args: + e (ValidationError): A validation error. + + Returns: + str: An empty string. + """ + + return "" + + @classmethod + def return_timestamp(cls, e: ValidationError) -> str: + """Null value handler that returns a timestamp of when the function was called. + + Args: + e (ValidationError): A validation error. + + Returns: + str: A timestamp. + """ + + return str(datetime.now().timestamp()) + + +class ReservedNameHandler: + @classmethod + def add_leading_underscore(cls, e: ValidationError) -> str: + """Reserved name handler that adds a leading underscore (``"_"``) to the name + except for ``"."`` and ``".."``. + + Args: + e (ValidationError): A reserved name error. + + Returns: + str: The converted name. + """ + + if e.reserved_name in (".", "..") or e.reusable_name: + return e.reserved_name + + return f"_{e.reserved_name}" + + @classmethod + def add_trailing_underscore(cls, e: ValidationError) -> str: + """Reserved name handler that adds a trailing underscore (``"_"``) to the name + except for ``"."`` and ``".."``. + + Args: + e (ValidationError): A reserved name error. + + Returns: + str: The converted name. + """ + + if e.reserved_name in (".", "..") or e.reusable_name: + return e.reserved_name + + return f"{e.reserved_name}_" + + @classmethod + def as_is(cls, e: ValidationError) -> str: + """Reserved name handler that returns the name as is. + + Args: + e (ValidationError): A reserved name error. + + Returns: + str: The name as is. + """ + + return e.reserved_name diff --git a/venv/lib/python3.10/site-packages/pathvalidate/py.typed b/venv/lib/python3.10/site-packages/pathvalidate/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11cb9b83385b85c6ae24a2e2949d43d0d406080c Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/__version__.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/__version__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a5798b01b346709d744625b8769099584e4eeb7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/__version__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ffbddb1a52f34ae93ae1b71742ddf7b2e8da29b Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/adapters.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/adapters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..828a14d0042f9bdf65f0e41917e017063cbbbf45 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/adapters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/api.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab82505ecd1dae8432d9731e4eecfbdcefc28854 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/api.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/auth.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03cabc048d028894f19bfbc0f0581a0c0427691b Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/auth.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/certs.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/certs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c96286fd6f1e77e06424d12be212d4aceced90c Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/certs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a25d452206af26823e92ab33aa154b8020a4c4c Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/cookies.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/cookies.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d4a2fcf2260680838630492e702541ac512b1389 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/cookies.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea7b391ff5a7e7997db583f3b11215026f22d91f Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/help.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/help.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43d97842247b52c993e6fff599fefdbd76a2005d Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/help.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/hooks.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/hooks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0388d96157c1e77c375d7091f3b46d7f6da49215 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/hooks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/models.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92a6fdcc1ef24306fa770773e6dd6a2126094eda Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/models.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/packages.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/packages.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04a4d3f5e33bc33f4d2dd8e9a4cd1829ea0d7fad Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/packages.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/sessions.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/sessions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..21222e25943cc9abe1d0363eb1c39efe8b83323a Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/sessions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/status_codes.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/status_codes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3553e676d8dc6758b4c49a451d37b6561c091ef2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/status_codes.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/structures.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/structures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..22e9bb0057ae2dd6befd921d86aa727600579729 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/structures.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/requests/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fee0f686e05a2ade098321d7258db2fe3f2284b0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/requests/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/requests/__version__.py b/venv/lib/python3.10/site-packages/requests/__version__.py new file mode 100644 index 0000000000000000000000000000000000000000..5063c3f8ee7980493efcc30c24f7e7582714aa81 --- /dev/null +++ b/venv/lib/python3.10/site-packages/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.31.0" +__build__ = 0x023100 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache 2.0" +__copyright__ = "Copyright Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/venv/lib/python3.10/site-packages/requests/packages.py b/venv/lib/python3.10/site-packages/requests/packages.py new file mode 100644 index 0000000000000000000000000000000000000000..77c45c9e90cdf2bcd60eea3cac9c8cf56cca2c08 --- /dev/null +++ b/venv/lib/python3.10/site-packages/requests/packages.py @@ -0,0 +1,28 @@ +import sys + +try: + import chardet +except ImportError: + import warnings + + import charset_normalizer as chardet + + warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ("urllib3", "idna"): + locals()[package] = __import__(package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == package or mod.startswith(f"{package}."): + sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] + +target = chardet.__name__ +for mod in list(sys.modules): + if mod == target or mod.startswith(f"{target}."): + target = target.replace(target, "chardet") + sys.modules[f"requests.packages.{target}"] = sys.modules[mod] +# Kinda cool, though, right? diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..68b7d66c97d66c58de883ed0c451af2b3183e6f3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/LICENSE @@ -0,0 +1,203 @@ +Copyright 2018- The Hugging Face team. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..eba5ca4d48278b929238257b6ad1adb625e4d017 --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/METADATA @@ -0,0 +1,1045 @@ +Metadata-Version: 2.1 +Name: transformers +Version: 4.40.1 +Summary: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow +Home-page: https://github.com/huggingface/transformers +Author: The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/transformers/graphs/contributors) +Author-email: transformers@huggingface.co +License: Apache 2.0 License +Keywords: NLP vision speech deep learning transformer pytorch tensorflow jax BERT GPT-2 Wav2Vec2 ViT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.8.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock +Requires-Dist: huggingface-hub (<1.0,>=0.19.3) +Requires-Dist: numpy (>=1.17) +Requires-Dist: packaging (>=20.0) +Requires-Dist: pyyaml (>=5.1) +Requires-Dist: regex (!=2019.12.17) +Requires-Dist: requests +Requires-Dist: tokenizers (<0.20,>=0.19) +Requires-Dist: safetensors (>=0.4.1) +Requires-Dist: tqdm (>=4.27) +Provides-Extra: accelerate +Requires-Dist: accelerate (>=0.21.0) ; extra == 'accelerate' +Provides-Extra: agents +Requires-Dist: diffusers ; extra == 'agents' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'agents' +Requires-Dist: datasets (!=2.5.0) ; extra == 'agents' +Requires-Dist: torch ; extra == 'agents' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'agents' +Requires-Dist: opencv-python ; extra == 'agents' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'agents' +Provides-Extra: all +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'all' +Requires-Dist: onnxconverter-common ; extra == 'all' +Requires-Dist: tf2onnx ; extra == 'all' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'all' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'all' +Requires-Dist: torch ; extra == 'all' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'all' +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'all' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'all' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'all' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'all' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'all' +Requires-Dist: protobuf ; extra == 'all' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'all' +Requires-Dist: torchaudio ; extra == 'all' +Requires-Dist: librosa ; extra == 'all' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'all' +Requires-Dist: phonemizer ; extra == 'all' +Requires-Dist: kenlm ; extra == 'all' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'all' +Requires-Dist: optuna ; extra == 'all' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'all' +Requires-Dist: sigopt ; extra == 'all' +Requires-Dist: timm ; extra == 'all' +Requires-Dist: torchvision ; extra == 'all' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'all' +Requires-Dist: decord (==0.6.0) ; extra == 'all' +Requires-Dist: av (==9.2.0) ; extra == 'all' +Provides-Extra: audio +Requires-Dist: librosa ; extra == 'audio' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'audio' +Requires-Dist: phonemizer ; extra == 'audio' +Requires-Dist: kenlm ; extra == 'audio' +Provides-Extra: codecarbon +Requires-Dist: codecarbon (==1.2.0) ; extra == 'codecarbon' +Provides-Extra: deepspeed +Requires-Dist: deepspeed (>=0.9.3) ; extra == 'deepspeed' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'deepspeed' +Provides-Extra: deepspeed-testing +Requires-Dist: deepspeed (>=0.9.3) ; extra == 'deepspeed-testing' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'deepspeed-testing' +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'deepspeed-testing' +Requires-Dist: pytest-xdist ; extra == 'deepspeed-testing' +Requires-Dist: timeout-decorator ; extra == 'deepspeed-testing' +Requires-Dist: parameterized ; extra == 'deepspeed-testing' +Requires-Dist: psutil ; extra == 'deepspeed-testing' +Requires-Dist: datasets (!=2.5.0) ; extra == 'deepspeed-testing' +Requires-Dist: dill (<0.3.5) ; extra == 'deepspeed-testing' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'deepspeed-testing' +Requires-Dist: pytest-timeout ; extra == 'deepspeed-testing' +Requires-Dist: ruff (==0.1.5) ; extra == 'deepspeed-testing' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'deepspeed-testing' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'deepspeed-testing' +Requires-Dist: nltk ; extra == 'deepspeed-testing' +Requires-Dist: GitPython (<3.1.19) ; extra == 'deepspeed-testing' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'deepspeed-testing' +Requires-Dist: protobuf ; extra == 'deepspeed-testing' +Requires-Dist: sacremoses ; extra == 'deepspeed-testing' +Requires-Dist: rjieba ; extra == 'deepspeed-testing' +Requires-Dist: beautifulsoup4 ; extra == 'deepspeed-testing' +Requires-Dist: tensorboard ; extra == 'deepspeed-testing' +Requires-Dist: pydantic ; extra == 'deepspeed-testing' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'deepspeed-testing' +Requires-Dist: faiss-cpu ; extra == 'deepspeed-testing' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'deepspeed-testing' +Requires-Dist: optuna ; extra == 'deepspeed-testing' +Provides-Extra: dev +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'dev' +Requires-Dist: onnxconverter-common ; extra == 'dev' +Requires-Dist: tf2onnx ; extra == 'dev' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'dev' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'dev' +Requires-Dist: torch ; extra == 'dev' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'dev' +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'dev' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'dev' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'dev' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'dev' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'dev' +Requires-Dist: protobuf ; extra == 'dev' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'dev' +Requires-Dist: torchaudio ; extra == 'dev' +Requires-Dist: librosa ; extra == 'dev' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'dev' +Requires-Dist: phonemizer ; extra == 'dev' +Requires-Dist: kenlm ; extra == 'dev' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'dev' +Requires-Dist: optuna ; extra == 'dev' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'dev' +Requires-Dist: sigopt ; extra == 'dev' +Requires-Dist: timm ; extra == 'dev' +Requires-Dist: torchvision ; extra == 'dev' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'dev' +Requires-Dist: decord (==0.6.0) ; extra == 'dev' +Requires-Dist: av (==9.2.0) ; extra == 'dev' +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'dev' +Requires-Dist: pytest-xdist ; extra == 'dev' +Requires-Dist: timeout-decorator ; extra == 'dev' +Requires-Dist: parameterized ; extra == 'dev' +Requires-Dist: psutil ; extra == 'dev' +Requires-Dist: datasets (!=2.5.0) ; extra == 'dev' +Requires-Dist: dill (<0.3.5) ; extra == 'dev' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'dev' +Requires-Dist: pytest-timeout ; extra == 'dev' +Requires-Dist: ruff (==0.1.5) ; extra == 'dev' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'dev' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'dev' +Requires-Dist: nltk ; extra == 'dev' +Requires-Dist: GitPython (<3.1.19) ; extra == 'dev' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev' +Requires-Dist: sacremoses ; extra == 'dev' +Requires-Dist: rjieba ; extra == 'dev' +Requires-Dist: beautifulsoup4 ; extra == 'dev' +Requires-Dist: tensorboard ; extra == 'dev' +Requires-Dist: pydantic ; extra == 'dev' +Requires-Dist: faiss-cpu ; extra == 'dev' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'dev' +Requires-Dist: isort (>=5.5.4) ; extra == 'dev' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev' +Requires-Dist: fugashi (>=1.0) ; extra == 'dev' +Requires-Dist: ipadic (<2.0,>=1.0.0) ; extra == 'dev' +Requires-Dist: unidic-lite (>=1.0.7) ; extra == 'dev' +Requires-Dist: unidic (>=1.0.2) ; extra == 'dev' +Requires-Dist: sudachipy (>=0.6.6) ; extra == 'dev' +Requires-Dist: sudachidict-core (>=20220729) ; extra == 'dev' +Requires-Dist: rhoknp (<1.3.1,>=1.1.0) ; extra == 'dev' +Requires-Dist: hf-doc-builder ; extra == 'dev' +Requires-Dist: scikit-learn ; extra == 'dev' +Provides-Extra: dev-tensorflow +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'dev-tensorflow' +Requires-Dist: pytest-xdist ; extra == 'dev-tensorflow' +Requires-Dist: timeout-decorator ; extra == 'dev-tensorflow' +Requires-Dist: parameterized ; extra == 'dev-tensorflow' +Requires-Dist: psutil ; extra == 'dev-tensorflow' +Requires-Dist: datasets (!=2.5.0) ; extra == 'dev-tensorflow' +Requires-Dist: dill (<0.3.5) ; extra == 'dev-tensorflow' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'dev-tensorflow' +Requires-Dist: pytest-timeout ; extra == 'dev-tensorflow' +Requires-Dist: ruff (==0.1.5) ; extra == 'dev-tensorflow' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'dev-tensorflow' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'dev-tensorflow' +Requires-Dist: nltk ; extra == 'dev-tensorflow' +Requires-Dist: GitPython (<3.1.19) ; extra == 'dev-tensorflow' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev-tensorflow' +Requires-Dist: protobuf ; extra == 'dev-tensorflow' +Requires-Dist: sacremoses ; extra == 'dev-tensorflow' +Requires-Dist: rjieba ; extra == 'dev-tensorflow' +Requires-Dist: beautifulsoup4 ; extra == 'dev-tensorflow' +Requires-Dist: tensorboard ; extra == 'dev-tensorflow' +Requires-Dist: pydantic ; extra == 'dev-tensorflow' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'dev-tensorflow' +Requires-Dist: faiss-cpu ; extra == 'dev-tensorflow' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'dev-tensorflow' +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'dev-tensorflow' +Requires-Dist: onnxconverter-common ; extra == 'dev-tensorflow' +Requires-Dist: tf2onnx ; extra == 'dev-tensorflow' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'dev-tensorflow' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'dev-tensorflow' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'dev-tensorflow' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'dev-tensorflow' +Requires-Dist: isort (>=5.5.4) ; extra == 'dev-tensorflow' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev-tensorflow' +Requires-Dist: hf-doc-builder ; extra == 'dev-tensorflow' +Requires-Dist: scikit-learn ; extra == 'dev-tensorflow' +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'dev-tensorflow' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'dev-tensorflow' +Requires-Dist: librosa ; extra == 'dev-tensorflow' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'dev-tensorflow' +Requires-Dist: phonemizer ; extra == 'dev-tensorflow' +Requires-Dist: kenlm ; extra == 'dev-tensorflow' +Provides-Extra: dev-torch +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'dev-torch' +Requires-Dist: pytest-xdist ; extra == 'dev-torch' +Requires-Dist: timeout-decorator ; extra == 'dev-torch' +Requires-Dist: parameterized ; extra == 'dev-torch' +Requires-Dist: psutil ; extra == 'dev-torch' +Requires-Dist: datasets (!=2.5.0) ; extra == 'dev-torch' +Requires-Dist: dill (<0.3.5) ; extra == 'dev-torch' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'dev-torch' +Requires-Dist: pytest-timeout ; extra == 'dev-torch' +Requires-Dist: ruff (==0.1.5) ; extra == 'dev-torch' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'dev-torch' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'dev-torch' +Requires-Dist: nltk ; extra == 'dev-torch' +Requires-Dist: GitPython (<3.1.19) ; extra == 'dev-torch' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev-torch' +Requires-Dist: protobuf ; extra == 'dev-torch' +Requires-Dist: sacremoses ; extra == 'dev-torch' +Requires-Dist: rjieba ; extra == 'dev-torch' +Requires-Dist: beautifulsoup4 ; extra == 'dev-torch' +Requires-Dist: tensorboard ; extra == 'dev-torch' +Requires-Dist: pydantic ; extra == 'dev-torch' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'dev-torch' +Requires-Dist: faiss-cpu ; extra == 'dev-torch' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'dev-torch' +Requires-Dist: torch ; extra == 'dev-torch' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'dev-torch' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'dev-torch' +Requires-Dist: torchaudio ; extra == 'dev-torch' +Requires-Dist: librosa ; extra == 'dev-torch' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'dev-torch' +Requires-Dist: phonemizer ; extra == 'dev-torch' +Requires-Dist: kenlm ; extra == 'dev-torch' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'dev-torch' +Requires-Dist: optuna ; extra == 'dev-torch' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'dev-torch' +Requires-Dist: sigopt ; extra == 'dev-torch' +Requires-Dist: timm ; extra == 'dev-torch' +Requires-Dist: torchvision ; extra == 'dev-torch' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'dev-torch' +Requires-Dist: isort (>=5.5.4) ; extra == 'dev-torch' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev-torch' +Requires-Dist: fugashi (>=1.0) ; extra == 'dev-torch' +Requires-Dist: ipadic (<2.0,>=1.0.0) ; extra == 'dev-torch' +Requires-Dist: unidic-lite (>=1.0.7) ; extra == 'dev-torch' +Requires-Dist: unidic (>=1.0.2) ; extra == 'dev-torch' +Requires-Dist: sudachipy (>=0.6.6) ; extra == 'dev-torch' +Requires-Dist: sudachidict-core (>=20220729) ; extra == 'dev-torch' +Requires-Dist: rhoknp (<1.3.1,>=1.1.0) ; extra == 'dev-torch' +Requires-Dist: hf-doc-builder ; extra == 'dev-torch' +Requires-Dist: scikit-learn ; extra == 'dev-torch' +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'dev-torch' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'dev-torch' +Provides-Extra: docs +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'docs' +Requires-Dist: onnxconverter-common ; extra == 'docs' +Requires-Dist: tf2onnx ; extra == 'docs' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'docs' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'docs' +Requires-Dist: torch ; extra == 'docs' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'docs' +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'docs' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'docs' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'docs' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'docs' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'docs' +Requires-Dist: protobuf ; extra == 'docs' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'docs' +Requires-Dist: torchaudio ; extra == 'docs' +Requires-Dist: librosa ; extra == 'docs' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'docs' +Requires-Dist: phonemizer ; extra == 'docs' +Requires-Dist: kenlm ; extra == 'docs' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'docs' +Requires-Dist: optuna ; extra == 'docs' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'docs' +Requires-Dist: sigopt ; extra == 'docs' +Requires-Dist: timm ; extra == 'docs' +Requires-Dist: torchvision ; extra == 'docs' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'docs' +Requires-Dist: decord (==0.6.0) ; extra == 'docs' +Requires-Dist: av (==9.2.0) ; extra == 'docs' +Requires-Dist: hf-doc-builder ; extra == 'docs' +Provides-Extra: docs_specific +Requires-Dist: hf-doc-builder ; extra == 'docs_specific' +Provides-Extra: flax +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'flax' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'flax' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'flax' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'flax' +Provides-Extra: flax-speech +Requires-Dist: librosa ; extra == 'flax-speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'flax-speech' +Requires-Dist: phonemizer ; extra == 'flax-speech' +Requires-Dist: kenlm ; extra == 'flax-speech' +Provides-Extra: ftfy +Requires-Dist: ftfy ; extra == 'ftfy' +Provides-Extra: integrations +Requires-Dist: optuna ; extra == 'integrations' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'integrations' +Requires-Dist: sigopt ; extra == 'integrations' +Provides-Extra: ja +Requires-Dist: fugashi (>=1.0) ; extra == 'ja' +Requires-Dist: ipadic (<2.0,>=1.0.0) ; extra == 'ja' +Requires-Dist: unidic-lite (>=1.0.7) ; extra == 'ja' +Requires-Dist: unidic (>=1.0.2) ; extra == 'ja' +Requires-Dist: sudachipy (>=0.6.6) ; extra == 'ja' +Requires-Dist: sudachidict-core (>=20220729) ; extra == 'ja' +Requires-Dist: rhoknp (<1.3.1,>=1.1.0) ; extra == 'ja' +Provides-Extra: modelcreation +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'modelcreation' +Provides-Extra: natten +Requires-Dist: natten (<0.15.0,>=0.14.6) ; extra == 'natten' +Provides-Extra: onnx +Requires-Dist: onnxconverter-common ; extra == 'onnx' +Requires-Dist: tf2onnx ; extra == 'onnx' +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'onnx' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'onnx' +Provides-Extra: onnxruntime +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'onnxruntime' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'onnxruntime' +Provides-Extra: optuna +Requires-Dist: optuna ; extra == 'optuna' +Provides-Extra: quality +Requires-Dist: datasets (!=2.5.0) ; extra == 'quality' +Requires-Dist: isort (>=5.5.4) ; extra == 'quality' +Requires-Dist: ruff (==0.1.5) ; extra == 'quality' +Requires-Dist: GitPython (<3.1.19) ; extra == 'quality' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'quality' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'quality' +Provides-Extra: ray +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'ray' +Provides-Extra: retrieval +Requires-Dist: faiss-cpu ; extra == 'retrieval' +Requires-Dist: datasets (!=2.5.0) ; extra == 'retrieval' +Provides-Extra: sagemaker +Requires-Dist: sagemaker (>=2.31.0) ; extra == 'sagemaker' +Provides-Extra: sentencepiece +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'sentencepiece' +Requires-Dist: protobuf ; extra == 'sentencepiece' +Provides-Extra: serving +Requires-Dist: pydantic ; extra == 'serving' +Requires-Dist: uvicorn ; extra == 'serving' +Requires-Dist: fastapi ; extra == 'serving' +Requires-Dist: starlette ; extra == 'serving' +Provides-Extra: sigopt +Requires-Dist: sigopt ; extra == 'sigopt' +Provides-Extra: sklearn +Requires-Dist: scikit-learn ; extra == 'sklearn' +Provides-Extra: speech +Requires-Dist: torchaudio ; extra == 'speech' +Requires-Dist: librosa ; extra == 'speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'speech' +Requires-Dist: phonemizer ; extra == 'speech' +Requires-Dist: kenlm ; extra == 'speech' +Provides-Extra: testing +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'testing' +Requires-Dist: pytest-xdist ; extra == 'testing' +Requires-Dist: timeout-decorator ; extra == 'testing' +Requires-Dist: parameterized ; extra == 'testing' +Requires-Dist: psutil ; extra == 'testing' +Requires-Dist: datasets (!=2.5.0) ; extra == 'testing' +Requires-Dist: dill (<0.3.5) ; extra == 'testing' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'testing' +Requires-Dist: pytest-timeout ; extra == 'testing' +Requires-Dist: ruff (==0.1.5) ; extra == 'testing' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'testing' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'testing' +Requires-Dist: nltk ; extra == 'testing' +Requires-Dist: GitPython (<3.1.19) ; extra == 'testing' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'testing' +Requires-Dist: protobuf ; extra == 'testing' +Requires-Dist: sacremoses ; extra == 'testing' +Requires-Dist: rjieba ; extra == 'testing' +Requires-Dist: beautifulsoup4 ; extra == 'testing' +Requires-Dist: tensorboard ; extra == 'testing' +Requires-Dist: pydantic ; extra == 'testing' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'testing' +Requires-Dist: faiss-cpu ; extra == 'testing' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'testing' +Provides-Extra: tf +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'tf' +Requires-Dist: onnxconverter-common ; extra == 'tf' +Requires-Dist: tf2onnx ; extra == 'tf' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'tf' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'tf' +Provides-Extra: tf-cpu +Requires-Dist: tensorflow-cpu (<2.16,>=2.6) ; extra == 'tf-cpu' +Requires-Dist: onnxconverter-common ; extra == 'tf-cpu' +Requires-Dist: tf2onnx ; extra == 'tf-cpu' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'tf-cpu' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'tf-cpu' +Provides-Extra: tf-speech +Requires-Dist: librosa ; extra == 'tf-speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'tf-speech' +Requires-Dist: phonemizer ; extra == 'tf-speech' +Requires-Dist: kenlm ; extra == 'tf-speech' +Provides-Extra: timm +Requires-Dist: timm ; extra == 'timm' +Provides-Extra: tokenizers +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'tokenizers' +Provides-Extra: torch +Requires-Dist: torch ; extra == 'torch' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'torch' +Provides-Extra: torch-speech +Requires-Dist: torchaudio ; extra == 'torch-speech' +Requires-Dist: librosa ; extra == 'torch-speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'torch-speech' +Requires-Dist: phonemizer ; extra == 'torch-speech' +Requires-Dist: kenlm ; extra == 'torch-speech' +Provides-Extra: torch-vision +Requires-Dist: torchvision ; extra == 'torch-vision' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'torch-vision' +Provides-Extra: torchhub +Requires-Dist: filelock ; extra == 'torchhub' +Requires-Dist: huggingface-hub (<1.0,>=0.19.3) ; extra == 'torchhub' +Requires-Dist: importlib-metadata ; extra == 'torchhub' +Requires-Dist: numpy (>=1.17) ; extra == 'torchhub' +Requires-Dist: packaging (>=20.0) ; extra == 'torchhub' +Requires-Dist: protobuf ; extra == 'torchhub' +Requires-Dist: regex (!=2019.12.17) ; extra == 'torchhub' +Requires-Dist: requests ; extra == 'torchhub' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'torchhub' +Requires-Dist: torch ; extra == 'torchhub' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'torchhub' +Requires-Dist: tqdm (>=4.27) ; extra == 'torchhub' +Provides-Extra: video +Requires-Dist: decord (==0.6.0) ; extra == 'video' +Requires-Dist: av (==9.2.0) ; extra == 'video' +Provides-Extra: vision +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'vision' + + + +

+ + + + Hugging Face Transformers Library + +
+
+

+ +

+ + Build + + + GitHub + + + Documentation + + + GitHub release + + + Contributor Covenant + + DOI +

+ +

+

+ English | + 简体中文 | + 繁體中文 | + 한국어 | + Español | + 日本語 | + हिन्दी | + Русский | + Рortuguês | + తెలుగు | + Français | + Deutsch | + Tiếng Việt | +

+

+ +

+

State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow

+

+ +

+ +

+ +🤗 Transformers provides thousands of pretrained models to perform tasks on different modalities such as text, vision, and audio. + +These models can be applied on: + +* 📝 Text, for tasks like text classification, information extraction, question answering, summarization, translation, and text generation, in over 100 languages. +* 🖼️ Images, for tasks like image classification, object detection, and segmentation. +* 🗣️ Audio, for tasks like speech recognition and audio classification. + +Transformer models can also perform tasks on **several modalities combined**, such as table question answering, optical character recognition, information extraction from scanned documents, video classification, and visual question answering. + +🤗 Transformers provides APIs to quickly download and use those pretrained models on a given text, fine-tune them on your own datasets and then share them with the community on our [model hub](https://huggingface.co/models). At the same time, each python module defining an architecture is fully standalone and can be modified to enable quick research experiments. + +🤗 Transformers is backed by the three most popular deep learning libraries — [Jax](https://jax.readthedocs.io/en/latest/), [PyTorch](https://pytorch.org/) and [TensorFlow](https://www.tensorflow.org/) — with a seamless integration between them. It's straightforward to train your models with one before loading them for inference with the other. + +## Online demos + +You can test most of our models directly on their pages from the [model hub](https://huggingface.co/models). We also offer [private model hosting, versioning, & an inference API](https://huggingface.co/pricing) for public and private models. + +Here are a few examples: + +In Natural Language Processing: +- [Masked word completion with BERT](https://huggingface.co/google-bert/bert-base-uncased?text=Paris+is+the+%5BMASK%5D+of+France) +- [Named Entity Recognition with Electra](https://huggingface.co/dbmdz/electra-large-discriminator-finetuned-conll03-english?text=My+name+is+Sarah+and+I+live+in+London+city) +- [Text generation with Mistral](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2) +- [Natural Language Inference with RoBERTa](https://huggingface.co/FacebookAI/roberta-large-mnli?text=The+dog+was+lost.+Nobody+lost+any+animal) +- [Summarization with BART](https://huggingface.co/facebook/bart-large-cnn?text=The+tower+is+324+metres+%281%2C063+ft%29+tall%2C+about+the+same+height+as+an+81-storey+building%2C+and+the+tallest+structure+in+Paris.+Its+base+is+square%2C+measuring+125+metres+%28410+ft%29+on+each+side.+During+its+construction%2C+the+Eiffel+Tower+surpassed+the+Washington+Monument+to+become+the+tallest+man-made+structure+in+the+world%2C+a+title+it+held+for+41+years+until+the+Chrysler+Building+in+New+York+City+was+finished+in+1930.+It+was+the+first+structure+to+reach+a+height+of+300+metres.+Due+to+the+addition+of+a+broadcasting+aerial+at+the+top+of+the+tower+in+1957%2C+it+is+now+taller+than+the+Chrysler+Building+by+5.2+metres+%2817+ft%29.+Excluding+transmitters%2C+the+Eiffel+Tower+is+the+second+tallest+free-standing+structure+in+France+after+the+Millau+Viaduct) +- [Question answering with DistilBERT](https://huggingface.co/distilbert/distilbert-base-uncased-distilled-squad?text=Which+name+is+also+used+to+describe+the+Amazon+rainforest+in+English%3F&context=The+Amazon+rainforest+%28Portuguese%3A+Floresta+Amaz%C3%B4nica+or+Amaz%C3%B4nia%3B+Spanish%3A+Selva+Amaz%C3%B3nica%2C+Amazon%C3%ADa+or+usually+Amazonia%3B+French%3A+For%C3%AAt+amazonienne%3B+Dutch%3A+Amazoneregenwoud%29%2C+also+known+in+English+as+Amazonia+or+the+Amazon+Jungle%2C+is+a+moist+broadleaf+forest+that+covers+most+of+the+Amazon+basin+of+South+America.+This+basin+encompasses+7%2C000%2C000+square+kilometres+%282%2C700%2C000+sq+mi%29%2C+of+which+5%2C500%2C000+square+kilometres+%282%2C100%2C000+sq+mi%29+are+covered+by+the+rainforest.+This+region+includes+territory+belonging+to+nine+nations.+The+majority+of+the+forest+is+contained+within+Brazil%2C+with+60%25+of+the+rainforest%2C+followed+by+Peru+with+13%25%2C+Colombia+with+10%25%2C+and+with+minor+amounts+in+Venezuela%2C+Ecuador%2C+Bolivia%2C+Guyana%2C+Suriname+and+French+Guiana.+States+or+departments+in+four+nations+contain+%22Amazonas%22+in+their+names.+The+Amazon+represents+over+half+of+the+planet%27s+remaining+rainforests%2C+and+comprises+the+largest+and+most+biodiverse+tract+of+tropical+rainforest+in+the+world%2C+with+an+estimated+390+billion+individual+trees+divided+into+16%2C000+species) +- [Translation with T5](https://huggingface.co/google-t5/t5-base?text=My+name+is+Wolfgang+and+I+live+in+Berlin) + +In Computer Vision: +- [Image classification with ViT](https://huggingface.co/google/vit-base-patch16-224) +- [Object Detection with DETR](https://huggingface.co/facebook/detr-resnet-50) +- [Semantic Segmentation with SegFormer](https://huggingface.co/nvidia/segformer-b0-finetuned-ade-512-512) +- [Panoptic Segmentation with Mask2Former](https://huggingface.co/facebook/mask2former-swin-large-coco-panoptic) +- [Depth Estimation with Depth Anything](https://huggingface.co/docs/transformers/main/model_doc/depth_anything) +- [Video Classification with VideoMAE](https://huggingface.co/docs/transformers/model_doc/videomae) +- [Universal Segmentation with OneFormer](https://huggingface.co/shi-labs/oneformer_ade20k_dinat_large) + +In Audio: +- [Automatic Speech Recognition with Whisper](https://huggingface.co/openai/whisper-large-v3) +- [Keyword Spotting with Wav2Vec2](https://huggingface.co/superb/wav2vec2-base-superb-ks) +- [Audio Classification with Audio Spectrogram Transformer](https://huggingface.co/MIT/ast-finetuned-audioset-10-10-0.4593) + +In Multimodal tasks: +- [Table Question Answering with TAPAS](https://huggingface.co/google/tapas-base-finetuned-wtq) +- [Visual Question Answering with ViLT](https://huggingface.co/dandelin/vilt-b32-finetuned-vqa) +- [Image captioning with LLaVa](https://huggingface.co/llava-hf/llava-1.5-7b-hf) +- [Zero-shot Image Classification with SigLIP](https://huggingface.co/google/siglip-so400m-patch14-384) +- [Document Question Answering with LayoutLM](https://huggingface.co/impira/layoutlm-document-qa) +- [Zero-shot Video Classification with X-CLIP](https://huggingface.co/docs/transformers/model_doc/xclip) +- [Zero-shot Object Detection with OWLv2](https://huggingface.co/docs/transformers/en/model_doc/owlv2) +- [Zero-shot Image Segmentation with CLIPSeg](https://huggingface.co/docs/transformers/model_doc/clipseg) +- [Automatic Mask Generation with SAM](https://huggingface.co/docs/transformers/model_doc/sam) + + +## 100 projects using Transformers + +Transformers is more than a toolkit to use pretrained models: it's a community of projects built around it and the +Hugging Face Hub. We want Transformers to enable developers, researchers, students, professors, engineers, and anyone +else to build their dream projects. + +In order to celebrate the 100,000 stars of transformers, we have decided to put the spotlight on the +community, and we have created the [awesome-transformers](./awesome-transformers.md) page which lists 100 +incredible projects built in the vicinity of transformers. + +If you own or use a project that you believe should be part of the list, please open a PR to add it! + +## If you are looking for custom support from the Hugging Face team + + + HuggingFace Expert Acceleration Program +
+ +## Quick tour + +To immediately use a model on a given input (text, image, audio, ...), we provide the `pipeline` API. Pipelines group together a pretrained model with the preprocessing that was used during that model's training. Here is how to quickly use a pipeline to classify positive versus negative texts: + +```python +>>> from transformers import pipeline + +# Allocate a pipeline for sentiment-analysis +>>> classifier = pipeline('sentiment-analysis') +>>> classifier('We are very happy to introduce pipeline to the transformers repository.') +[{'label': 'POSITIVE', 'score': 0.9996980428695679}] +``` + +The second line of code downloads and caches the pretrained model used by the pipeline, while the third evaluates it on the given text. Here, the answer is "positive" with a confidence of 99.97%. + +Many tasks have a pre-trained `pipeline` ready to go, in NLP but also in computer vision and speech. For example, we can easily extract detected objects in an image: + +``` python +>>> import requests +>>> from PIL import Image +>>> from transformers import pipeline + +# Download an image with cute cats +>>> url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png" +>>> image_data = requests.get(url, stream=True).raw +>>> image = Image.open(image_data) + +# Allocate a pipeline for object detection +>>> object_detector = pipeline('object-detection') +>>> object_detector(image) +[{'score': 0.9982201457023621, + 'label': 'remote', + 'box': {'xmin': 40, 'ymin': 70, 'xmax': 175, 'ymax': 117}}, + {'score': 0.9960021376609802, + 'label': 'remote', + 'box': {'xmin': 333, 'ymin': 72, 'xmax': 368, 'ymax': 187}}, + {'score': 0.9954745173454285, + 'label': 'couch', + 'box': {'xmin': 0, 'ymin': 1, 'xmax': 639, 'ymax': 473}}, + {'score': 0.9988006353378296, + 'label': 'cat', + 'box': {'xmin': 13, 'ymin': 52, 'xmax': 314, 'ymax': 470}}, + {'score': 0.9986783862113953, + 'label': 'cat', + 'box': {'xmin': 345, 'ymin': 23, 'xmax': 640, 'ymax': 368}}] +``` + +Here, we get a list of objects detected in the image, with a box surrounding the object and a confidence score. Here is the original image on the left, with the predictions displayed on the right: + +

+ + +

+ +You can learn more about the tasks supported by the `pipeline` API in [this tutorial](https://huggingface.co/docs/transformers/task_summary). + +In addition to `pipeline`, to download and use any of the pretrained models on your given task, all it takes is three lines of code. Here is the PyTorch version: +```python +>>> from transformers import AutoTokenizer, AutoModel + +>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") +>>> model = AutoModel.from_pretrained("google-bert/bert-base-uncased") + +>>> inputs = tokenizer("Hello world!", return_tensors="pt") +>>> outputs = model(**inputs) +``` + +And here is the equivalent code for TensorFlow: +```python +>>> from transformers import AutoTokenizer, TFAutoModel + +>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") +>>> model = TFAutoModel.from_pretrained("google-bert/bert-base-uncased") + +>>> inputs = tokenizer("Hello world!", return_tensors="tf") +>>> outputs = model(**inputs) +``` + +The tokenizer is responsible for all the preprocessing the pretrained model expects and can be called directly on a single string (as in the above examples) or a list. It will output a dictionary that you can use in downstream code or simply directly pass to your model using the ** argument unpacking operator. + +The model itself is a regular [Pytorch `nn.Module`](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) or a [TensorFlow `tf.keras.Model`](https://www.tensorflow.org/api_docs/python/tf/keras/Model) (depending on your backend) which you can use as usual. [This tutorial](https://huggingface.co/docs/transformers/training) explains how to integrate such a model into a classic PyTorch or TensorFlow training loop, or how to use our `Trainer` API to quickly fine-tune on a new dataset. + +## Why should I use transformers? + +1. Easy-to-use state-of-the-art models: + - High performance on natural language understanding & generation, computer vision, and audio tasks. + - Low barrier to entry for educators and practitioners. + - Few user-facing abstractions with just three classes to learn. + - A unified API for using all our pretrained models. + +1. Lower compute costs, smaller carbon footprint: + - Researchers can share trained models instead of always retraining. + - Practitioners can reduce compute time and production costs. + - Dozens of architectures with over 400,000 pretrained models across all modalities. + +1. Choose the right framework for every part of a model's lifetime: + - Train state-of-the-art models in 3 lines of code. + - Move a single model between TF2.0/PyTorch/JAX frameworks at will. + - Seamlessly pick the right framework for training, evaluation, and production. + +1. Easily customize a model or an example to your needs: + - We provide examples for each architecture to reproduce the results published by its original authors. + - Model internals are exposed as consistently as possible. + - Model files can be used independently of the library for quick experiments. + +## Why shouldn't I use transformers? + +- This library is not a modular toolbox of building blocks for neural nets. The code in the model files is not refactored with additional abstractions on purpose, so that researchers can quickly iterate on each of the models without diving into additional abstractions/files. +- The training API is not intended to work on any model but is optimized to work with the models provided by the library. For generic machine learning loops, you should use another library (possibly, [Accelerate](https://huggingface.co/docs/accelerate)). +- While we strive to present as many use cases as possible, the scripts in our [examples folder](https://github.com/huggingface/transformers/tree/main/examples) are just that: examples. It is expected that they won't work out-of-the-box on your specific problem and that you will be required to change a few lines of code to adapt them to your needs. + +## Installation + +### With pip + +This repository is tested on Python 3.8+, Flax 0.4.1+, PyTorch 1.11+, and TensorFlow 2.6+. + +You should install 🤗 Transformers in a [virtual environment](https://docs.python.org/3/library/venv.html). If you're unfamiliar with Python virtual environments, check out the [user guide](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/). + +First, create a virtual environment with the version of Python you're going to use and activate it. + +Then, you will need to install at least one of Flax, PyTorch, or TensorFlow. +Please refer to [TensorFlow installation page](https://www.tensorflow.org/install/), [PyTorch installation page](https://pytorch.org/get-started/locally/#start-locally) and/or [Flax](https://github.com/google/flax#quick-install) and [Jax](https://github.com/google/jax#installation) installation pages regarding the specific installation command for your platform. + +When one of those backends has been installed, 🤗 Transformers can be installed using pip as follows: + +```bash +pip install transformers +``` + +If you'd like to play with the examples or need the bleeding edge of the code and can't wait for a new release, you must [install the library from source](https://huggingface.co/docs/transformers/installation#installing-from-source). + +### With conda + +🤗 Transformers can be installed using conda as follows: + +```shell script +conda install conda-forge::transformers +``` + +> **_NOTE:_** Installing `transformers` from the `huggingface` channel is deprecated. + +Follow the installation pages of Flax, PyTorch or TensorFlow to see how to install them with conda. + +> **_NOTE:_** On Windows, you may be prompted to activate Developer Mode in order to benefit from caching. If this is not an option for you, please let us know in [this issue](https://github.com/huggingface/huggingface_hub/issues/1062). + +## Model architectures + +**[All the model checkpoints](https://huggingface.co/models)** provided by 🤗 Transformers are seamlessly integrated from the huggingface.co [model hub](https://huggingface.co/models), where they are uploaded directly by [users](https://huggingface.co/users) and [organizations](https://huggingface.co/organizations). + +Current number of checkpoints: ![](https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/models&color=brightgreen) + +🤗 Transformers currently provides the following architectures (see [here](https://huggingface.co/docs/transformers/model_summary) for a high-level summary of each them): + +1. **[ALBERT](https://huggingface.co/docs/transformers/model_doc/albert)** (from Google Research and the Toyota Technological Institute at Chicago) released with the paper [ALBERT: A Lite BERT for Self-supervised Learning of Language Representations](https://arxiv.org/abs/1909.11942), by Zhenzhong Lan, Mingda Chen, Sebastian Goodman, Kevin Gimpel, Piyush Sharma, Radu Soricut. +1. **[ALIGN](https://huggingface.co/docs/transformers/model_doc/align)** (from Google Research) released with the paper [Scaling Up Visual and Vision-Language Representation Learning With Noisy Text Supervision](https://arxiv.org/abs/2102.05918) by Chao Jia, Yinfei Yang, Ye Xia, Yi-Ting Chen, Zarana Parekh, Hieu Pham, Quoc V. Le, Yunhsuan Sung, Zhen Li, Tom Duerig. +1. **[AltCLIP](https://huggingface.co/docs/transformers/model_doc/altclip)** (from BAAI) released with the paper [AltCLIP: Altering the Language Encoder in CLIP for Extended Language Capabilities](https://arxiv.org/abs/2211.06679) by Chen, Zhongzhi and Liu, Guang and Zhang, Bo-Wen and Ye, Fulong and Yang, Qinghong and Wu, Ledell. +1. **[Audio Spectrogram Transformer](https://huggingface.co/docs/transformers/model_doc/audio-spectrogram-transformer)** (from MIT) released with the paper [AST: Audio Spectrogram Transformer](https://arxiv.org/abs/2104.01778) by Yuan Gong, Yu-An Chung, James Glass. +1. **[Autoformer](https://huggingface.co/docs/transformers/model_doc/autoformer)** (from Tsinghua University) released with the paper [Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting](https://arxiv.org/abs/2106.13008) by Haixu Wu, Jiehui Xu, Jianmin Wang, Mingsheng Long. +1. **[Bark](https://huggingface.co/docs/transformers/model_doc/bark)** (from Suno) released in the repository [suno-ai/bark](https://github.com/suno-ai/bark) by Suno AI team. +1. **[BART](https://huggingface.co/docs/transformers/model_doc/bart)** (from Facebook) released with the paper [BART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension](https://arxiv.org/abs/1910.13461) by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov, and Luke Zettlemoyer. +1. **[BARThez](https://huggingface.co/docs/transformers/model_doc/barthez)** (from École polytechnique) released with the paper [BARThez: a Skilled Pretrained French Sequence-to-Sequence Model](https://arxiv.org/abs/2010.12321) by Moussa Kamal Eddine, Antoine J.-P. Tixier, Michalis Vazirgiannis. +1. **[BARTpho](https://huggingface.co/docs/transformers/model_doc/bartpho)** (from VinAI Research) released with the paper [BARTpho: Pre-trained Sequence-to-Sequence Models for Vietnamese](https://arxiv.org/abs/2109.09701) by Nguyen Luong Tran, Duong Minh Le and Dat Quoc Nguyen. +1. **[BEiT](https://huggingface.co/docs/transformers/model_doc/beit)** (from Microsoft) released with the paper [BEiT: BERT Pre-Training of Image Transformers](https://arxiv.org/abs/2106.08254) by Hangbo Bao, Li Dong, Furu Wei. +1. **[BERT](https://huggingface.co/docs/transformers/model_doc/bert)** (from Google) released with the paper [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) by Jacob Devlin, Ming-Wei Chang, Kenton Lee, and Kristina Toutanova. +1. **[BERT For Sequence Generation](https://huggingface.co/docs/transformers/model_doc/bert-generation)** (from Google) released with the paper [Leveraging Pre-trained Checkpoints for Sequence Generation Tasks](https://arxiv.org/abs/1907.12461) by Sascha Rothe, Shashi Narayan, Aliaksei Severyn. +1. **[BERTweet](https://huggingface.co/docs/transformers/model_doc/bertweet)** (from VinAI Research) released with the paper [BERTweet: A pre-trained language model for English Tweets](https://aclanthology.org/2020.emnlp-demos.2/) by Dat Quoc Nguyen, Thanh Vu and Anh Tuan Nguyen. +1. **[BigBird-Pegasus](https://huggingface.co/docs/transformers/model_doc/bigbird_pegasus)** (from Google Research) released with the paper [Big Bird: Transformers for Longer Sequences](https://arxiv.org/abs/2007.14062) by Manzil Zaheer, Guru Guruganesh, Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, Amr Ahmed. +1. **[BigBird-RoBERTa](https://huggingface.co/docs/transformers/model_doc/big_bird)** (from Google Research) released with the paper [Big Bird: Transformers for Longer Sequences](https://arxiv.org/abs/2007.14062) by Manzil Zaheer, Guru Guruganesh, Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, Amr Ahmed. +1. **[BioGpt](https://huggingface.co/docs/transformers/model_doc/biogpt)** (from Microsoft Research AI4Science) released with the paper [BioGPT: generative pre-trained transformer for biomedical text generation and mining](https://academic.oup.com/bib/advance-article/doi/10.1093/bib/bbac409/6713511?guestAccessKey=a66d9b5d-4f83-4017-bb52-405815c907b9) by Renqian Luo, Liai Sun, Yingce Xia, Tao Qin, Sheng Zhang, Hoifung Poon and Tie-Yan Liu. +1. **[BiT](https://huggingface.co/docs/transformers/model_doc/bit)** (from Google AI) released with the paper [Big Transfer (BiT): General Visual Representation Learning](https://arxiv.org/abs/1912.11370) by Alexander Kolesnikov, Lucas Beyer, Xiaohua Zhai, Joan Puigcerver, Jessica Yung, Sylvain Gelly, Neil Houlsby. +1. **[Blenderbot](https://huggingface.co/docs/transformers/model_doc/blenderbot)** (from Facebook) released with the paper [Recipes for building an open-domain chatbot](https://arxiv.org/abs/2004.13637) by Stephen Roller, Emily Dinan, Naman Goyal, Da Ju, Mary Williamson, Yinhan Liu, Jing Xu, Myle Ott, Kurt Shuster, Eric M. Smith, Y-Lan Boureau, Jason Weston. +1. **[BlenderbotSmall](https://huggingface.co/docs/transformers/model_doc/blenderbot-small)** (from Facebook) released with the paper [Recipes for building an open-domain chatbot](https://arxiv.org/abs/2004.13637) by Stephen Roller, Emily Dinan, Naman Goyal, Da Ju, Mary Williamson, Yinhan Liu, Jing Xu, Myle Ott, Kurt Shuster, Eric M. Smith, Y-Lan Boureau, Jason Weston. +1. **[BLIP](https://huggingface.co/docs/transformers/model_doc/blip)** (from Salesforce) released with the paper [BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation](https://arxiv.org/abs/2201.12086) by Junnan Li, Dongxu Li, Caiming Xiong, Steven Hoi. +1. **[BLIP-2](https://huggingface.co/docs/transformers/model_doc/blip-2)** (from Salesforce) released with the paper [BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models](https://arxiv.org/abs/2301.12597) by Junnan Li, Dongxu Li, Silvio Savarese, Steven Hoi. +1. **[BLOOM](https://huggingface.co/docs/transformers/model_doc/bloom)** (from BigScience workshop) released by the [BigScience Workshop](https://bigscience.huggingface.co/). +1. **[BORT](https://huggingface.co/docs/transformers/model_doc/bort)** (from Alexa) released with the paper [Optimal Subarchitecture Extraction For BERT](https://arxiv.org/abs/2010.10499) by Adrian de Wynter and Daniel J. Perry. +1. **[BridgeTower](https://huggingface.co/docs/transformers/model_doc/bridgetower)** (from Harbin Institute of Technology/Microsoft Research Asia/Intel Labs) released with the paper [BridgeTower: Building Bridges Between Encoders in Vision-Language Representation Learning](https://arxiv.org/abs/2206.08657) by Xiao Xu, Chenfei Wu, Shachar Rosenman, Vasudev Lal, Wanxiang Che, Nan Duan. +1. **[BROS](https://huggingface.co/docs/transformers/model_doc/bros)** (from NAVER CLOVA) released with the paper [BROS: A Pre-trained Language Model Focusing on Text and Layout for Better Key Information Extraction from Documents](https://arxiv.org/abs/2108.04539) by Teakgyu Hong, Donghyun Kim, Mingi Ji, Wonseok Hwang, Daehyun Nam, Sungrae Park. +1. **[ByT5](https://huggingface.co/docs/transformers/model_doc/byt5)** (from Google Research) released with the paper [ByT5: Towards a token-free future with pre-trained byte-to-byte models](https://arxiv.org/abs/2105.13626) by Linting Xue, Aditya Barua, Noah Constant, Rami Al-Rfou, Sharan Narang, Mihir Kale, Adam Roberts, Colin Raffel. +1. **[CamemBERT](https://huggingface.co/docs/transformers/model_doc/camembert)** (from Inria/Facebook/Sorbonne) released with the paper [CamemBERT: a Tasty French Language Model](https://arxiv.org/abs/1911.03894) by Louis Martin*, Benjamin Muller*, Pedro Javier Ortiz Suárez*, Yoann Dupont, Laurent Romary, Éric Villemonte de la Clergerie, Djamé Seddah and Benoît Sagot. +1. **[CANINE](https://huggingface.co/docs/transformers/model_doc/canine)** (from Google Research) released with the paper [CANINE: Pre-training an Efficient Tokenization-Free Encoder for Language Representation](https://arxiv.org/abs/2103.06874) by Jonathan H. Clark, Dan Garrette, Iulia Turc, John Wieting. +1. **[Chinese-CLIP](https://huggingface.co/docs/transformers/model_doc/chinese_clip)** (from OFA-Sys) released with the paper [Chinese CLIP: Contrastive Vision-Language Pretraining in Chinese](https://arxiv.org/abs/2211.01335) by An Yang, Junshu Pan, Junyang Lin, Rui Men, Yichang Zhang, Jingren Zhou, Chang Zhou. +1. **[CLAP](https://huggingface.co/docs/transformers/model_doc/clap)** (from LAION-AI) released with the paper [Large-scale Contrastive Language-Audio Pretraining with Feature Fusion and Keyword-to-Caption Augmentation](https://arxiv.org/abs/2211.06687) by Yusong Wu, Ke Chen, Tianyu Zhang, Yuchen Hui, Taylor Berg-Kirkpatrick, Shlomo Dubnov. +1. **[CLIP](https://huggingface.co/docs/transformers/model_doc/clip)** (from OpenAI) released with the paper [Learning Transferable Visual Models From Natural Language Supervision](https://arxiv.org/abs/2103.00020) by Alec Radford, Jong Wook Kim, Chris Hallacy, Aditya Ramesh, Gabriel Goh, Sandhini Agarwal, Girish Sastry, Amanda Askell, Pamela Mishkin, Jack Clark, Gretchen Krueger, Ilya Sutskever. +1. **[CLIPSeg](https://huggingface.co/docs/transformers/model_doc/clipseg)** (from University of Göttingen) released with the paper [Image Segmentation Using Text and Image Prompts](https://arxiv.org/abs/2112.10003) by Timo Lüddecke and Alexander Ecker. +1. **[CLVP](https://huggingface.co/docs/transformers/model_doc/clvp)** released with the paper [Better speech synthesis through scaling](https://arxiv.org/abs/2305.07243) by James Betker. +1. **[CodeGen](https://huggingface.co/docs/transformers/model_doc/codegen)** (from Salesforce) released with the paper [A Conversational Paradigm for Program Synthesis](https://arxiv.org/abs/2203.13474) by Erik Nijkamp, Bo Pang, Hiroaki Hayashi, Lifu Tu, Huan Wang, Yingbo Zhou, Silvio Savarese, Caiming Xiong. +1. **[CodeLlama](https://huggingface.co/docs/transformers/model_doc/llama_code)** (from MetaAI) released with the paper [Code Llama: Open Foundation Models for Code](https://ai.meta.com/research/publications/code-llama-open-foundation-models-for-code/) by Baptiste Rozière, Jonas Gehring, Fabian Gloeckle, Sten Sootla, Itai Gat, Xiaoqing Ellen Tan, Yossi Adi, Jingyu Liu, Tal Remez, Jérémy Rapin, Artyom Kozhevnikov, Ivan Evtimov, Joanna Bitton, Manish Bhatt, Cristian Canton Ferrer, Aaron Grattafiori, Wenhan Xiong, Alexandre Défossez, Jade Copet, Faisal Azhar, Hugo Touvron, Louis Martin, Nicolas Usunier, Thomas Scialom, Gabriel Synnaeve. +1. **[Cohere](https://huggingface.co/docs/transformers/model_doc/cohere)** (from Cohere) released with the paper [Command-R: Retrieval Augmented Generation at Production Scale]() by Cohere. +1. **[Conditional DETR](https://huggingface.co/docs/transformers/model_doc/conditional_detr)** (from Microsoft Research Asia) released with the paper [Conditional DETR for Fast Training Convergence](https://arxiv.org/abs/2108.06152) by Depu Meng, Xiaokang Chen, Zejia Fan, Gang Zeng, Houqiang Li, Yuhui Yuan, Lei Sun, Jingdong Wang. +1. **[ConvBERT](https://huggingface.co/docs/transformers/model_doc/convbert)** (from YituTech) released with the paper [ConvBERT: Improving BERT with Span-based Dynamic Convolution](https://arxiv.org/abs/2008.02496) by Zihang Jiang, Weihao Yu, Daquan Zhou, Yunpeng Chen, Jiashi Feng, Shuicheng Yan. +1. **[ConvNeXT](https://huggingface.co/docs/transformers/model_doc/convnext)** (from Facebook AI) released with the paper [A ConvNet for the 2020s](https://arxiv.org/abs/2201.03545) by Zhuang Liu, Hanzi Mao, Chao-Yuan Wu, Christoph Feichtenhofer, Trevor Darrell, Saining Xie. +1. **[ConvNeXTV2](https://huggingface.co/docs/transformers/model_doc/convnextv2)** (from Facebook AI) released with the paper [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](https://arxiv.org/abs/2301.00808) by Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon, Saining Xie. +1. **[CPM](https://huggingface.co/docs/transformers/model_doc/cpm)** (from Tsinghua University) released with the paper [CPM: A Large-scale Generative Chinese Pre-trained Language Model](https://arxiv.org/abs/2012.00413) by Zhengyan Zhang, Xu Han, Hao Zhou, Pei Ke, Yuxian Gu, Deming Ye, Yujia Qin, Yusheng Su, Haozhe Ji, Jian Guan, Fanchao Qi, Xiaozhi Wang, Yanan Zheng, Guoyang Zeng, Huanqi Cao, Shengqi Chen, Daixuan Li, Zhenbo Sun, Zhiyuan Liu, Minlie Huang, Wentao Han, Jie Tang, Juanzi Li, Xiaoyan Zhu, Maosong Sun. +1. **[CPM-Ant](https://huggingface.co/docs/transformers/model_doc/cpmant)** (from OpenBMB) released by the [OpenBMB](https://www.openbmb.org/). +1. **[CTRL](https://huggingface.co/docs/transformers/model_doc/ctrl)** (from Salesforce) released with the paper [CTRL: A Conditional Transformer Language Model for Controllable Generation](https://arxiv.org/abs/1909.05858) by Nitish Shirish Keskar*, Bryan McCann*, Lav R. Varshney, Caiming Xiong and Richard Socher. +1. **[CvT](https://huggingface.co/docs/transformers/model_doc/cvt)** (from Microsoft) released with the paper [CvT: Introducing Convolutions to Vision Transformers](https://arxiv.org/abs/2103.15808) by Haiping Wu, Bin Xiao, Noel Codella, Mengchen Liu, Xiyang Dai, Lu Yuan, Lei Zhang. +1. **[Data2Vec](https://huggingface.co/docs/transformers/model_doc/data2vec)** (from Facebook) released with the paper [Data2Vec: A General Framework for Self-supervised Learning in Speech, Vision and Language](https://arxiv.org/abs/2202.03555) by Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu, Michael Auli. +1. **[DBRX](https://huggingface.co/docs/transformers/main/model_doc/dbrx)** (from Databricks) released with the paper [Introducing DBRX: A New State-of-the-Art Open LLM](https://www.databricks.com/blog/introducing-dbrx-new-state-art-open-llm) by the Mosaic Research Team. +1. **[DeBERTa](https://huggingface.co/docs/transformers/model_doc/deberta)** (from Microsoft) released with the paper [DeBERTa: Decoding-enhanced BERT with Disentangled Attention](https://arxiv.org/abs/2006.03654) by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. +1. **[DeBERTa-v2](https://huggingface.co/docs/transformers/model_doc/deberta-v2)** (from Microsoft) released with the paper [DeBERTa: Decoding-enhanced BERT with Disentangled Attention](https://arxiv.org/abs/2006.03654) by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. +1. **[Decision Transformer](https://huggingface.co/docs/transformers/model_doc/decision_transformer)** (from Berkeley/Facebook/Google) released with the paper [Decision Transformer: Reinforcement Learning via Sequence Modeling](https://arxiv.org/abs/2106.01345) by Lili Chen, Kevin Lu, Aravind Rajeswaran, Kimin Lee, Aditya Grover, Michael Laskin, Pieter Abbeel, Aravind Srinivas, Igor Mordatch. +1. **[Deformable DETR](https://huggingface.co/docs/transformers/model_doc/deformable_detr)** (from SenseTime Research) released with the paper [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159) by Xizhou Zhu, Weijie Su, Lewei Lu, Bin Li, Xiaogang Wang, Jifeng Dai. +1. **[DeiT](https://huggingface.co/docs/transformers/model_doc/deit)** (from Facebook) released with the paper [Training data-efficient image transformers & distillation through attention](https://arxiv.org/abs/2012.12877) by Hugo Touvron, Matthieu Cord, Matthijs Douze, Francisco Massa, Alexandre Sablayrolles, Hervé Jégou. +1. **[DePlot](https://huggingface.co/docs/transformers/model_doc/deplot)** (from Google AI) released with the paper [DePlot: One-shot visual language reasoning by plot-to-table translation](https://arxiv.org/abs/2212.10505) by Fangyu Liu, Julian Martin Eisenschlos, Francesco Piccinno, Syrine Krichene, Chenxi Pang, Kenton Lee, Mandar Joshi, Wenhu Chen, Nigel Collier, Yasemin Altun. +1. **[Depth Anything](https://huggingface.co/docs/transformers/model_doc/depth_anything)** (from University of Hong Kong and TikTok) released with the paper [Depth Anything: Unleashing the Power of Large-Scale Unlabeled Data](https://arxiv.org/abs/2401.10891) by Lihe Yang, Bingyi Kang, Zilong Huang, Xiaogang Xu, Jiashi Feng, Hengshuang Zhao. +1. **[DETA](https://huggingface.co/docs/transformers/model_doc/deta)** (from The University of Texas at Austin) released with the paper [NMS Strikes Back](https://arxiv.org/abs/2212.06137) by Jeffrey Ouyang-Zhang, Jang Hyun Cho, Xingyi Zhou, Philipp Krähenbühl. +1. **[DETR](https://huggingface.co/docs/transformers/model_doc/detr)** (from Facebook) released with the paper [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872) by Nicolas Carion, Francisco Massa, Gabriel Synnaeve, Nicolas Usunier, Alexander Kirillov, Sergey Zagoruyko. +1. **[DialoGPT](https://huggingface.co/docs/transformers/model_doc/dialogpt)** (from Microsoft Research) released with the paper [DialoGPT: Large-Scale Generative Pre-training for Conversational Response Generation](https://arxiv.org/abs/1911.00536) by Yizhe Zhang, Siqi Sun, Michel Galley, Yen-Chun Chen, Chris Brockett, Xiang Gao, Jianfeng Gao, Jingjing Liu, Bill Dolan. +1. **[DiNAT](https://huggingface.co/docs/transformers/model_doc/dinat)** (from SHI Labs) released with the paper [Dilated Neighborhood Attention Transformer](https://arxiv.org/abs/2209.15001) by Ali Hassani and Humphrey Shi. +1. **[DINOv2](https://huggingface.co/docs/transformers/model_doc/dinov2)** (from Meta AI) released with the paper [DINOv2: Learning Robust Visual Features without Supervision](https://arxiv.org/abs/2304.07193) by Maxime Oquab, Timothée Darcet, Théo Moutakanni, Huy Vo, Marc Szafraniec, Vasil Khalidov, Pierre Fernandez, Daniel Haziza, Francisco Massa, Alaaeldin El-Nouby, Mahmoud Assran, Nicolas Ballas, Wojciech Galuba, Russell Howes, Po-Yao Huang, Shang-Wen Li, Ishan Misra, Michael Rabbat, Vasu Sharma, Gabriel Synnaeve, Hu Xu, Hervé Jegou, Julien Mairal, Patrick Labatut, Armand Joulin, Piotr Bojanowski. +1. **[DistilBERT](https://huggingface.co/docs/transformers/model_doc/distilbert)** (from HuggingFace), released together with the paper [DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter](https://arxiv.org/abs/1910.01108) by Victor Sanh, Lysandre Debut and Thomas Wolf. The same method has been applied to compress GPT2 into [DistilGPT2](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation), RoBERTa into [DistilRoBERTa](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation), Multilingual BERT into [DistilmBERT](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation) and a German version of DistilBERT. +1. **[DiT](https://huggingface.co/docs/transformers/model_doc/dit)** (from Microsoft Research) released with the paper [DiT: Self-supervised Pre-training for Document Image Transformer](https://arxiv.org/abs/2203.02378) by Junlong Li, Yiheng Xu, Tengchao Lv, Lei Cui, Cha Zhang, Furu Wei. +1. **[Donut](https://huggingface.co/docs/transformers/model_doc/donut)** (from NAVER), released together with the paper [OCR-free Document Understanding Transformer](https://arxiv.org/abs/2111.15664) by Geewook Kim, Teakgyu Hong, Moonbin Yim, Jeongyeon Nam, Jinyoung Park, Jinyeong Yim, Wonseok Hwang, Sangdoo Yun, Dongyoon Han, Seunghyun Park. +1. **[DPR](https://huggingface.co/docs/transformers/model_doc/dpr)** (from Facebook) released with the paper [Dense Passage Retrieval for Open-Domain Question Answering](https://arxiv.org/abs/2004.04906) by Vladimir Karpukhin, Barlas Oğuz, Sewon Min, Patrick Lewis, Ledell Wu, Sergey Edunov, Danqi Chen, and Wen-tau Yih. +1. **[DPT](https://huggingface.co/docs/transformers/master/model_doc/dpt)** (from Intel Labs) released with the paper [Vision Transformers for Dense Prediction](https://arxiv.org/abs/2103.13413) by René Ranftl, Alexey Bochkovskiy, Vladlen Koltun. +1. **[EfficientFormer](https://huggingface.co/docs/transformers/model_doc/efficientformer)** (from Snap Research) released with the paper [EfficientFormer: Vision Transformers at MobileNetSpeed](https://arxiv.org/abs/2206.01191) by Yanyu Li, Geng Yuan, Yang Wen, Ju Hu, Georgios Evangelidis, Sergey Tulyakov, Yanzhi Wang, Jian Ren. +1. **[EfficientNet](https://huggingface.co/docs/transformers/model_doc/efficientnet)** (from Google Brain) released with the paper [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](https://arxiv.org/abs/1905.11946) by Mingxing Tan, Quoc V. Le. +1. **[ELECTRA](https://huggingface.co/docs/transformers/model_doc/electra)** (from Google Research/Stanford University) released with the paper [ELECTRA: Pre-training text encoders as discriminators rather than generators](https://arxiv.org/abs/2003.10555) by Kevin Clark, Minh-Thang Luong, Quoc V. Le, Christopher D. Manning. +1. **[EnCodec](https://huggingface.co/docs/transformers/model_doc/encodec)** (from Meta AI) released with the paper [High Fidelity Neural Audio Compression](https://arxiv.org/abs/2210.13438) by Alexandre Défossez, Jade Copet, Gabriel Synnaeve, Yossi Adi. +1. **[EncoderDecoder](https://huggingface.co/docs/transformers/model_doc/encoder-decoder)** (from Google Research) released with the paper [Leveraging Pre-trained Checkpoints for Sequence Generation Tasks](https://arxiv.org/abs/1907.12461) by Sascha Rothe, Shashi Narayan, Aliaksei Severyn. +1. **[ERNIE](https://huggingface.co/docs/transformers/model_doc/ernie)** (from Baidu) released with the paper [ERNIE: Enhanced Representation through Knowledge Integration](https://arxiv.org/abs/1904.09223) by Yu Sun, Shuohuan Wang, Yukun Li, Shikun Feng, Xuyi Chen, Han Zhang, Xin Tian, Danxiang Zhu, Hao Tian, Hua Wu. +1. **[ErnieM](https://huggingface.co/docs/transformers/model_doc/ernie_m)** (from Baidu) released with the paper [ERNIE-M: Enhanced Multilingual Representation by Aligning Cross-lingual Semantics with Monolingual Corpora](https://arxiv.org/abs/2012.15674) by Xuan Ouyang, Shuohuan Wang, Chao Pang, Yu Sun, Hao Tian, Hua Wu, Haifeng Wang. +1. **[ESM](https://huggingface.co/docs/transformers/model_doc/esm)** (from Meta AI) are transformer protein language models. **ESM-1b** was released with the paper [Biological structure and function emerge from scaling unsupervised learning to 250 million protein sequences](https://www.pnas.org/content/118/15/e2016239118) by Alexander Rives, Joshua Meier, Tom Sercu, Siddharth Goyal, Zeming Lin, Jason Liu, Demi Guo, Myle Ott, C. Lawrence Zitnick, Jerry Ma, and Rob Fergus. **ESM-1v** was released with the paper [Language models enable zero-shot prediction of the effects of mutations on protein function](https://doi.org/10.1101/2021.07.09.450648) by Joshua Meier, Roshan Rao, Robert Verkuil, Jason Liu, Tom Sercu and Alexander Rives. **ESM-2 and ESMFold** were released with the paper [Language models of protein sequences at the scale of evolution enable accurate structure prediction](https://doi.org/10.1101/2022.07.20.500902) by Zeming Lin, Halil Akin, Roshan Rao, Brian Hie, Zhongkai Zhu, Wenting Lu, Allan dos Santos Costa, Maryam Fazel-Zarandi, Tom Sercu, Sal Candido, Alexander Rives. +1. **[Falcon](https://huggingface.co/docs/transformers/model_doc/falcon)** (from Technology Innovation Institute) by Almazrouei, Ebtesam and Alobeidli, Hamza and Alshamsi, Abdulaziz and Cappelli, Alessandro and Cojocaru, Ruxandra and Debbah, Merouane and Goffinet, Etienne and Heslow, Daniel and Launay, Julien and Malartic, Quentin and Noune, Badreddine and Pannier, Baptiste and Penedo, Guilherme. +1. **[FastSpeech2Conformer](https://huggingface.co/docs/transformers/model_doc/fastspeech2_conformer)** (from ESPnet) released with the paper [Recent Developments On Espnet Toolkit Boosted By Conformer](https://arxiv.org/abs/2010.13956) by Pengcheng Guo, Florian Boyer, Xuankai Chang, Tomoki Hayashi, Yosuke Higuchi, Hirofumi Inaguma, Naoyuki Kamo, Chenda Li, Daniel Garcia-Romero, Jiatong Shi, Jing Shi, Shinji Watanabe, Kun Wei, Wangyou Zhang, and Yuekai Zhang. +1. **[FLAN-T5](https://huggingface.co/docs/transformers/model_doc/flan-t5)** (from Google AI) released in the repository [google-research/t5x](https://github.com/google-research/t5x/blob/main/docs/models.md#flan-t5-checkpoints) by Hyung Won Chung, Le Hou, Shayne Longpre, Barret Zoph, Yi Tay, William Fedus, Eric Li, Xuezhi Wang, Mostafa Dehghani, Siddhartha Brahma, Albert Webson, Shixiang Shane Gu, Zhuyun Dai, Mirac Suzgun, Xinyun Chen, Aakanksha Chowdhery, Sharan Narang, Gaurav Mishra, Adams Yu, Vincent Zhao, Yanping Huang, Andrew Dai, Hongkun Yu, Slav Petrov, Ed H. Chi, Jeff Dean, Jacob Devlin, Adam Roberts, Denny Zhou, Quoc V. Le, and Jason Wei +1. **[FLAN-UL2](https://huggingface.co/docs/transformers/model_doc/flan-ul2)** (from Google AI) released in the repository [google-research/t5x](https://github.com/google-research/t5x/blob/main/docs/models.md#flan-ul2-checkpoints) by Hyung Won Chung, Le Hou, Shayne Longpre, Barret Zoph, Yi Tay, William Fedus, Eric Li, Xuezhi Wang, Mostafa Dehghani, Siddhartha Brahma, Albert Webson, Shixiang Shane Gu, Zhuyun Dai, Mirac Suzgun, Xinyun Chen, Aakanksha Chowdhery, Sharan Narang, Gaurav Mishra, Adams Yu, Vincent Zhao, Yanping Huang, Andrew Dai, Hongkun Yu, Slav Petrov, Ed H. Chi, Jeff Dean, Jacob Devlin, Adam Roberts, Denny Zhou, Quoc V. Le, and Jason Wei +1. **[FlauBERT](https://huggingface.co/docs/transformers/model_doc/flaubert)** (from CNRS) released with the paper [FlauBERT: Unsupervised Language Model Pre-training for French](https://arxiv.org/abs/1912.05372) by Hang Le, Loïc Vial, Jibril Frej, Vincent Segonne, Maximin Coavoux, Benjamin Lecouteux, Alexandre Allauzen, Benoît Crabbé, Laurent Besacier, Didier Schwab. +1. **[FLAVA](https://huggingface.co/docs/transformers/model_doc/flava)** (from Facebook AI) released with the paper [FLAVA: A Foundational Language And Vision Alignment Model](https://arxiv.org/abs/2112.04482) by Amanpreet Singh, Ronghang Hu, Vedanuj Goswami, Guillaume Couairon, Wojciech Galuba, Marcus Rohrbach, and Douwe Kiela. +1. **[FNet](https://huggingface.co/docs/transformers/model_doc/fnet)** (from Google Research) released with the paper [FNet: Mixing Tokens with Fourier Transforms](https://arxiv.org/abs/2105.03824) by James Lee-Thorp, Joshua Ainslie, Ilya Eckstein, Santiago Ontanon. +1. **[FocalNet](https://huggingface.co/docs/transformers/model_doc/focalnet)** (from Microsoft Research) released with the paper [Focal Modulation Networks](https://arxiv.org/abs/2203.11926) by Jianwei Yang, Chunyuan Li, Xiyang Dai, Lu Yuan, Jianfeng Gao. +1. **[Funnel Transformer](https://huggingface.co/docs/transformers/model_doc/funnel)** (from CMU/Google Brain) released with the paper [Funnel-Transformer: Filtering out Sequential Redundancy for Efficient Language Processing](https://arxiv.org/abs/2006.03236) by Zihang Dai, Guokun Lai, Yiming Yang, Quoc V. Le. +1. **[Fuyu](https://huggingface.co/docs/transformers/model_doc/fuyu)** (from ADEPT) Rohan Bavishi, Erich Elsen, Curtis Hawthorne, Maxwell Nye, Augustus Odena, Arushi Somani, Sağnak Taşırlar. Released with the paper [blog post](https://www.adept.ai/blog/fuyu-8b) +1. **[Gemma](https://huggingface.co/docs/transformers/model_doc/gemma)** (from Google) released with the paper [Gemma: Open Models Based on Gemini Technology and Research](https://blog.google/technology/developers/gemma-open-models/) by the Gemma Google team. +1. **[GIT](https://huggingface.co/docs/transformers/model_doc/git)** (from Microsoft Research) released with the paper [GIT: A Generative Image-to-text Transformer for Vision and Language](https://arxiv.org/abs/2205.14100) by Jianfeng Wang, Zhengyuan Yang, Xiaowei Hu, Linjie Li, Kevin Lin, Zhe Gan, Zicheng Liu, Ce Liu, Lijuan Wang. +1. **[GLPN](https://huggingface.co/docs/transformers/model_doc/glpn)** (from KAIST) released with the paper [Global-Local Path Networks for Monocular Depth Estimation with Vertical CutDepth](https://arxiv.org/abs/2201.07436) by Doyeon Kim, Woonghyun Ga, Pyungwhan Ahn, Donggyu Joo, Sehwan Chun, Junmo Kim. +1. **[GPT](https://huggingface.co/docs/transformers/model_doc/openai-gpt)** (from OpenAI) released with the paper [Improving Language Understanding by Generative Pre-Training](https://openai.com/research/language-unsupervised/) by Alec Radford, Karthik Narasimhan, Tim Salimans and Ilya Sutskever. +1. **[GPT Neo](https://huggingface.co/docs/transformers/model_doc/gpt_neo)** (from EleutherAI) released in the repository [EleutherAI/gpt-neo](https://github.com/EleutherAI/gpt-neo) by Sid Black, Stella Biderman, Leo Gao, Phil Wang and Connor Leahy. +1. **[GPT NeoX](https://huggingface.co/docs/transformers/model_doc/gpt_neox)** (from EleutherAI) released with the paper [GPT-NeoX-20B: An Open-Source Autoregressive Language Model](https://arxiv.org/abs/2204.06745) by Sid Black, Stella Biderman, Eric Hallahan, Quentin Anthony, Leo Gao, Laurence Golding, Horace He, Connor Leahy, Kyle McDonell, Jason Phang, Michael Pieler, USVSN Sai Prashanth, Shivanshu Purohit, Laria Reynolds, Jonathan Tow, Ben Wang, Samuel Weinbach +1. **[GPT NeoX Japanese](https://huggingface.co/docs/transformers/model_doc/gpt_neox_japanese)** (from ABEJA) released by Shinya Otani, Takayoshi Makabe, Anuj Arora, and Kyo Hattori. +1. **[GPT-2](https://huggingface.co/docs/transformers/model_doc/gpt2)** (from OpenAI) released with the paper [Language Models are Unsupervised Multitask Learners](https://openai.com/research/better-language-models/) by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever. +1. **[GPT-J](https://huggingface.co/docs/transformers/model_doc/gptj)** (from EleutherAI) released in the repository [kingoflolz/mesh-transformer-jax](https://github.com/kingoflolz/mesh-transformer-jax/) by Ben Wang and Aran Komatsuzaki. +1. **[GPT-Sw3](https://huggingface.co/docs/transformers/model_doc/gpt-sw3)** (from AI-Sweden) released with the paper [Lessons Learned from GPT-SW3: Building the First Large-Scale Generative Language Model for Swedish](http://www.lrec-conf.org/proceedings/lrec2022/pdf/2022.lrec-1.376.pdf) by Ariel Ekgren, Amaru Cuba Gyllensten, Evangelia Gogoulou, Alice Heiman, Severine Verlinden, Joey Öhman, Fredrik Carlsson, Magnus Sahlgren. +1. **[GPTBigCode](https://huggingface.co/docs/transformers/model_doc/gpt_bigcode)** (from BigCode) released with the paper [SantaCoder: don't reach for the stars!](https://arxiv.org/abs/2301.03988) by Loubna Ben Allal, Raymond Li, Denis Kocetkov, Chenghao Mou, Christopher Akiki, Carlos Munoz Ferrandis, Niklas Muennighoff, Mayank Mishra, Alex Gu, Manan Dey, Logesh Kumar Umapathi, Carolyn Jane Anderson, Yangtian Zi, Joel Lamy Poirier, Hailey Schoelkopf, Sergey Troshin, Dmitry Abulkhanov, Manuel Romero, Michael Lappert, Francesco De Toni, Bernardo García del Río, Qian Liu, Shamik Bose, Urvashi Bhattacharyya, Terry Yue Zhuo, Ian Yu, Paulo Villegas, Marco Zocca, Sourab Mangrulkar, David Lansky, Huu Nguyen, Danish Contractor, Luis Villa, Jia Li, Dzmitry Bahdanau, Yacine Jernite, Sean Hughes, Daniel Fried, Arjun Guha, Harm de Vries, Leandro von Werra. +1. **[GPTSAN-japanese](https://huggingface.co/docs/transformers/model_doc/gptsan-japanese)** released in the repository [tanreinama/GPTSAN](https://github.com/tanreinama/GPTSAN/blob/main/report/model.md) by Toshiyuki Sakamoto(tanreinama). +1. **[Graphormer](https://huggingface.co/docs/transformers/model_doc/graphormer)** (from Microsoft) released with the paper [Do Transformers Really Perform Bad for Graph Representation?](https://arxiv.org/abs/2106.05234) by Chengxuan Ying, Tianle Cai, Shengjie Luo, Shuxin Zheng, Guolin Ke, Di He, Yanming Shen, Tie-Yan Liu. +1. **[Grounding DINO](https://huggingface.co/docs/transformers/model_doc/grounding-dino)** (from Institute for AI, Tsinghua-Bosch Joint Center for ML, Tsinghua University, IDEA Research and others) released with the paper [Grounding DINO: Marrying DINO with Grounded Pre-Training for Open-Set Object Detection](https://arxiv.org/abs/2303.05499) by Shilong Liu, Zhaoyang Zeng, Tianhe Ren, Feng Li, Hao Zhang, Jie Yang, Chunyuan Li, Jianwei Yang, Hang Su, Jun Zhu, Lei Zhang. +1. **[GroupViT](https://huggingface.co/docs/transformers/model_doc/groupvit)** (from UCSD, NVIDIA) released with the paper [GroupViT: Semantic Segmentation Emerges from Text Supervision](https://arxiv.org/abs/2202.11094) by Jiarui Xu, Shalini De Mello, Sifei Liu, Wonmin Byeon, Thomas Breuel, Jan Kautz, Xiaolong Wang. +1. **[HerBERT](https://huggingface.co/docs/transformers/model_doc/herbert)** (from Allegro.pl, AGH University of Science and Technology) released with the paper [KLEJ: Comprehensive Benchmark for Polish Language Understanding](https://www.aclweb.org/anthology/2020.acl-main.111.pdf) by Piotr Rybak, Robert Mroczkowski, Janusz Tracz, Ireneusz Gawlik. +1. **[Hubert](https://huggingface.co/docs/transformers/model_doc/hubert)** (from Facebook) released with the paper [HuBERT: Self-Supervised Speech Representation Learning by Masked Prediction of Hidden Units](https://arxiv.org/abs/2106.07447) by Wei-Ning Hsu, Benjamin Bolte, Yao-Hung Hubert Tsai, Kushal Lakhotia, Ruslan Salakhutdinov, Abdelrahman Mohamed. +1. **[I-BERT](https://huggingface.co/docs/transformers/model_doc/ibert)** (from Berkeley) released with the paper [I-BERT: Integer-only BERT Quantization](https://arxiv.org/abs/2101.01321) by Sehoon Kim, Amir Gholami, Zhewei Yao, Michael W. Mahoney, Kurt Keutzer. +1. **[IDEFICS](https://huggingface.co/docs/transformers/model_doc/idefics)** (from HuggingFace) released with the paper [OBELICS: An Open Web-Scale Filtered Dataset of Interleaved Image-Text Documents](https://huggingface.co/papers/2306.16527) by Hugo Laurençon, Lucile Saulnier, Léo Tronchon, Stas Bekman, Amanpreet Singh, Anton Lozhkov, Thomas Wang, Siddharth Karamcheti, Alexander M. Rush, Douwe Kiela, Matthieu Cord, Victor Sanh. +1. **[Idefics2](https://huggingface.co/docs/transformers/model_doc/idefics2)** (from Hugging Face) released with the blog [IDEFICS2](https://huggingface.co/blog/idefics2) by Léo Tronchon, Hugo Laurencon, Victor Sanh. +1. **[ImageGPT](https://huggingface.co/docs/transformers/model_doc/imagegpt)** (from OpenAI) released with the paper [Generative Pretraining from Pixels](https://openai.com/blog/image-gpt/) by Mark Chen, Alec Radford, Rewon Child, Jeffrey Wu, Heewoo Jun, David Luan, Ilya Sutskever. +1. **[Informer](https://huggingface.co/docs/transformers/model_doc/informer)** (from Beihang University, UC Berkeley, Rutgers University, SEDD Company) released with the paper [Informer: Beyond Efficient Transformer for Long Sequence Time-Series Forecasting](https://arxiv.org/abs/2012.07436) by Haoyi Zhou, Shanghang Zhang, Jieqi Peng, Shuai Zhang, Jianxin Li, Hui Xiong, and Wancai Zhang. +1. **[InstructBLIP](https://huggingface.co/docs/transformers/model_doc/instructblip)** (from Salesforce) released with the paper [InstructBLIP: Towards General-purpose Vision-Language Models with Instruction Tuning](https://arxiv.org/abs/2305.06500) by Wenliang Dai, Junnan Li, Dongxu Li, Anthony Meng Huat Tiong, Junqi Zhao, Weisheng Wang, Boyang Li, Pascale Fung, Steven Hoi. +1. **[Jamba](https://huggingface.co/docs/transformers/model_doc/jamba)** (from AI21 Labs Ltd.) released with the paper [Jamba: A Hybrid Transformer-Mamba Language Model](https://arxiv.org/abs/2403.19887) by Opher Lieber, Barak Lenz, Hofit Bata, Gal Cohen, Jhonathan Osin, Itay Dalmedigos, Erez Safahi, Shaked Meirom, Yonatan Belinkov, Shai Shalev-Shwartz, Omri Abend, Raz Alon, Tomer Asida, Amir Bergman, Roman Glozman, Michael Gokhman, Avshalom Manevich, Nir Ratner, Noam Rozen, Erez Shwartz, Mor Zusman, Yoav Shoham. +1. **[Jukebox](https://huggingface.co/docs/transformers/model_doc/jukebox)** (from OpenAI) released with the paper [Jukebox: A Generative Model for Music](https://arxiv.org/pdf/2005.00341.pdf) by Prafulla Dhariwal, Heewoo Jun, Christine Payne, Jong Wook Kim, Alec Radford, Ilya Sutskever. +1. **[KOSMOS-2](https://huggingface.co/docs/transformers/model_doc/kosmos-2)** (from Microsoft Research Asia) released with the paper [Kosmos-2: Grounding Multimodal Large Language Models to the World](https://arxiv.org/abs/2306.14824) by Zhiliang Peng, Wenhui Wang, Li Dong, Yaru Hao, Shaohan Huang, Shuming Ma, Furu Wei. +1. **[LayoutLM](https://huggingface.co/docs/transformers/model_doc/layoutlm)** (from Microsoft Research Asia) released with the paper [LayoutLM: Pre-training of Text and Layout for Document Image Understanding](https://arxiv.org/abs/1912.13318) by Yiheng Xu, Minghao Li, Lei Cui, Shaohan Huang, Furu Wei, Ming Zhou. +1. **[LayoutLMv2](https://huggingface.co/docs/transformers/model_doc/layoutlmv2)** (from Microsoft Research Asia) released with the paper [LayoutLMv2: Multi-modal Pre-training for Visually-Rich Document Understanding](https://arxiv.org/abs/2012.14740) by Yang Xu, Yiheng Xu, Tengchao Lv, Lei Cui, Furu Wei, Guoxin Wang, Yijuan Lu, Dinei Florencio, Cha Zhang, Wanxiang Che, Min Zhang, Lidong Zhou. +1. **[LayoutLMv3](https://huggingface.co/docs/transformers/model_doc/layoutlmv3)** (from Microsoft Research Asia) released with the paper [LayoutLMv3: Pre-training for Document AI with Unified Text and Image Masking](https://arxiv.org/abs/2204.08387) by Yupan Huang, Tengchao Lv, Lei Cui, Yutong Lu, Furu Wei. +1. **[LayoutXLM](https://huggingface.co/docs/transformers/model_doc/layoutxlm)** (from Microsoft Research Asia) released with the paper [LayoutXLM: Multimodal Pre-training for Multilingual Visually-rich Document Understanding](https://arxiv.org/abs/2104.08836) by Yiheng Xu, Tengchao Lv, Lei Cui, Guoxin Wang, Yijuan Lu, Dinei Florencio, Cha Zhang, Furu Wei. +1. **[LED](https://huggingface.co/docs/transformers/model_doc/led)** (from AllenAI) released with the paper [Longformer: The Long-Document Transformer](https://arxiv.org/abs/2004.05150) by Iz Beltagy, Matthew E. Peters, Arman Cohan. +1. **[LeViT](https://huggingface.co/docs/transformers/model_doc/levit)** (from Meta AI) released with the paper [LeViT: A Vision Transformer in ConvNet's Clothing for Faster Inference](https://arxiv.org/abs/2104.01136) by Ben Graham, Alaaeldin El-Nouby, Hugo Touvron, Pierre Stock, Armand Joulin, Hervé Jégou, Matthijs Douze. +1. **[LiLT](https://huggingface.co/docs/transformers/model_doc/lilt)** (from South China University of Technology) released with the paper [LiLT: A Simple yet Effective Language-Independent Layout Transformer for Structured Document Understanding](https://arxiv.org/abs/2202.13669) by Jiapeng Wang, Lianwen Jin, Kai Ding. +1. **[LLaMA](https://huggingface.co/docs/transformers/model_doc/llama)** (from The FAIR team of Meta AI) released with the paper [LLaMA: Open and Efficient Foundation Language Models](https://arxiv.org/abs/2302.13971) by Hugo Touvron, Thibaut Lavril, Gautier Izacard, Xavier Martinet, Marie-Anne Lachaux, Timothée Lacroix, Baptiste Rozière, Naman Goyal, Eric Hambro, Faisal Azhar, Aurelien Rodriguez, Armand Joulin, Edouard Grave, Guillaume Lample. +1. **[Llama2](https://huggingface.co/docs/transformers/model_doc/llama2)** (from The FAIR team of Meta AI) released with the paper [Llama2: Open Foundation and Fine-Tuned Chat Models](https://ai.meta.com/research/publications/llama-2-open-foundation-and-fine-tuned-chat-models/) by Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushka rMishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing EllenTan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu, Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom. +1. **[LLaVa](https://huggingface.co/docs/transformers/model_doc/llava)** (from Microsoft Research & University of Wisconsin-Madison) released with the paper [Visual Instruction Tuning](https://arxiv.org/abs/2304.08485) by Haotian Liu, Chunyuan Li, Yuheng Li and Yong Jae Lee. +1. **[LLaVA-NeXT](https://huggingface.co/docs/transformers/model_doc/llava_next)** (from Microsoft Research & University of Wisconsin-Madison) released with the paper [Improved Baselines with Visual Instruction Tuning](https://arxiv.org/abs/2310.03744) by Haotian Liu, Chunyuan Li, Yuheng Li and Yong Jae Lee. +1. **[Longformer](https://huggingface.co/docs/transformers/model_doc/longformer)** (from AllenAI) released with the paper [Longformer: The Long-Document Transformer](https://arxiv.org/abs/2004.05150) by Iz Beltagy, Matthew E. Peters, Arman Cohan. +1. **[LongT5](https://huggingface.co/docs/transformers/model_doc/longt5)** (from Google AI) released with the paper [LongT5: Efficient Text-To-Text Transformer for Long Sequences](https://arxiv.org/abs/2112.07916) by Mandy Guo, Joshua Ainslie, David Uthus, Santiago Ontanon, Jianmo Ni, Yun-Hsuan Sung, Yinfei Yang. +1. **[LUKE](https://huggingface.co/docs/transformers/model_doc/luke)** (from Studio Ousia) released with the paper [LUKE: Deep Contextualized Entity Representations with Entity-aware Self-attention](https://arxiv.org/abs/2010.01057) by Ikuya Yamada, Akari Asai, Hiroyuki Shindo, Hideaki Takeda, Yuji Matsumoto. +1. **[LXMERT](https://huggingface.co/docs/transformers/model_doc/lxmert)** (from UNC Chapel Hill) released with the paper [LXMERT: Learning Cross-Modality Encoder Representations from Transformers for Open-Domain Question Answering](https://arxiv.org/abs/1908.07490) by Hao Tan and Mohit Bansal. +1. **[M-CTC-T](https://huggingface.co/docs/transformers/model_doc/mctct)** (from Facebook) released with the paper [Pseudo-Labeling For Massively Multilingual Speech Recognition](https://arxiv.org/abs/2111.00161) by Loren Lugosch, Tatiana Likhomanenko, Gabriel Synnaeve, and Ronan Collobert. +1. **[M2M100](https://huggingface.co/docs/transformers/model_doc/m2m_100)** (from Facebook) released with the paper [Beyond English-Centric Multilingual Machine Translation](https://arxiv.org/abs/2010.11125) by Angela Fan, Shruti Bhosale, Holger Schwenk, Zhiyi Ma, Ahmed El-Kishky, Siddharth Goyal, Mandeep Baines, Onur Celebi, Guillaume Wenzek, Vishrav Chaudhary, Naman Goyal, Tom Birch, Vitaliy Liptchinsky, Sergey Edunov, Edouard Grave, Michael Auli, Armand Joulin. +1. **[MADLAD-400](https://huggingface.co/docs/transformers/model_doc/madlad-400)** (from Google) released with the paper [MADLAD-400: A Multilingual And Document-Level Large Audited Dataset](https://arxiv.org/abs/2309.04662) by Sneha Kudugunta, Isaac Caswell, Biao Zhang, Xavier Garcia, Christopher A. Choquette-Choo, Katherine Lee, Derrick Xin, Aditya Kusupati, Romi Stella, Ankur Bapna, Orhan Firat. +1. **[Mamba](https://huggingface.co/docs/transformers/model_doc/mamba)** (from Albert Gu and Tri Dao) released with the paper [Mamba: Linear-Time Sequence Modeling with Selective State Spaces](https://arxiv.org/abs/2312.00752) by Albert Gu and Tri Dao. +1. **[MarianMT](https://huggingface.co/docs/transformers/model_doc/marian)** Machine translation models trained using [OPUS](http://opus.nlpl.eu/) data by Jörg Tiedemann. The [Marian Framework](https://marian-nmt.github.io/) is being developed by the Microsoft Translator Team. +1. **[MarkupLM](https://huggingface.co/docs/transformers/model_doc/markuplm)** (from Microsoft Research Asia) released with the paper [MarkupLM: Pre-training of Text and Markup Language for Visually-rich Document Understanding](https://arxiv.org/abs/2110.08518) by Junlong Li, Yiheng Xu, Lei Cui, Furu Wei. +1. **[Mask2Former](https://huggingface.co/docs/transformers/model_doc/mask2former)** (from FAIR and UIUC) released with the paper [Masked-attention Mask Transformer for Universal Image Segmentation](https://arxiv.org/abs/2112.01527) by Bowen Cheng, Ishan Misra, Alexander G. Schwing, Alexander Kirillov, Rohit Girdhar. +1. **[MaskFormer](https://huggingface.co/docs/transformers/model_doc/maskformer)** (from Meta and UIUC) released with the paper [Per-Pixel Classification is Not All You Need for Semantic Segmentation](https://arxiv.org/abs/2107.06278) by Bowen Cheng, Alexander G. Schwing, Alexander Kirillov. +1. **[MatCha](https://huggingface.co/docs/transformers/model_doc/matcha)** (from Google AI) released with the paper [MatCha: Enhancing Visual Language Pretraining with Math Reasoning and Chart Derendering](https://arxiv.org/abs/2212.09662) by Fangyu Liu, Francesco Piccinno, Syrine Krichene, Chenxi Pang, Kenton Lee, Mandar Joshi, Yasemin Altun, Nigel Collier, Julian Martin Eisenschlos. +1. **[mBART](https://huggingface.co/docs/transformers/model_doc/mbart)** (from Facebook) released with the paper [Multilingual Denoising Pre-training for Neural Machine Translation](https://arxiv.org/abs/2001.08210) by Yinhan Liu, Jiatao Gu, Naman Goyal, Xian Li, Sergey Edunov, Marjan Ghazvininejad, Mike Lewis, Luke Zettlemoyer. +1. **[mBART-50](https://huggingface.co/docs/transformers/model_doc/mbart)** (from Facebook) released with the paper [Multilingual Translation with Extensible Multilingual Pretraining and Finetuning](https://arxiv.org/abs/2008.00401) by Yuqing Tang, Chau Tran, Xian Li, Peng-Jen Chen, Naman Goyal, Vishrav Chaudhary, Jiatao Gu, Angela Fan. +1. **[MEGA](https://huggingface.co/docs/transformers/model_doc/mega)** (from Meta/USC/CMU/SJTU) released with the paper [Mega: Moving Average Equipped Gated Attention](https://arxiv.org/abs/2209.10655) by Xuezhe Ma, Chunting Zhou, Xiang Kong, Junxian He, Liangke Gui, Graham Neubig, Jonathan May, and Luke Zettlemoyer. +1. **[Megatron-BERT](https://huggingface.co/docs/transformers/model_doc/megatron-bert)** (from NVIDIA) released with the paper [Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism](https://arxiv.org/abs/1909.08053) by Mohammad Shoeybi, Mostofa Patwary, Raul Puri, Patrick LeGresley, Jared Casper and Bryan Catanzaro. +1. **[Megatron-GPT2](https://huggingface.co/docs/transformers/model_doc/megatron_gpt2)** (from NVIDIA) released with the paper [Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism](https://arxiv.org/abs/1909.08053) by Mohammad Shoeybi, Mostofa Patwary, Raul Puri, Patrick LeGresley, Jared Casper and Bryan Catanzaro. +1. **[MGP-STR](https://huggingface.co/docs/transformers/model_doc/mgp-str)** (from Alibaba Research) released with the paper [Multi-Granularity Prediction for Scene Text Recognition](https://arxiv.org/abs/2209.03592) by Peng Wang, Cheng Da, and Cong Yao. +1. **[Mistral](https://huggingface.co/docs/transformers/model_doc/mistral)** (from Mistral AI) by The [Mistral AI](https://mistral.ai) team: Albert Jiang, Alexandre Sablayrolles, Arthur Mensch, Chris Bamford, Devendra Singh Chaplot, Diego de las Casas, Florian Bressand, Gianna Lengyel, Guillaume Lample, Lélio Renard Lavaud, Lucile Saulnier, Marie-Anne Lachaux, Pierre Stock, Teven Le Scao, Thibaut Lavril, Thomas Wang, Timothée Lacroix, William El Sayed. +1. **[Mixtral](https://huggingface.co/docs/transformers/model_doc/mixtral)** (from Mistral AI) by The [Mistral AI](https://mistral.ai) team: Albert Jiang, Alexandre Sablayrolles, Arthur Mensch, Chris Bamford, Devendra Singh Chaplot, Diego de las Casas, Florian Bressand, Gianna Lengyel, Guillaume Lample, Lélio Renard Lavaud, Lucile Saulnier, Marie-Anne Lachaux, Pierre Stock, Teven Le Scao, Thibaut Lavril, Thomas Wang, Timothée Lacroix, William El Sayed. +1. **[mLUKE](https://huggingface.co/docs/transformers/model_doc/mluke)** (from Studio Ousia) released with the paper [mLUKE: The Power of Entity Representations in Multilingual Pretrained Language Models](https://arxiv.org/abs/2110.08151) by Ryokan Ri, Ikuya Yamada, and Yoshimasa Tsuruoka. +1. **[MMS](https://huggingface.co/docs/transformers/model_doc/mms)** (from Facebook) released with the paper [Scaling Speech Technology to 1,000+ Languages](https://arxiv.org/abs/2305.13516) by Vineel Pratap, Andros Tjandra, Bowen Shi, Paden Tomasello, Arun Babu, Sayani Kundu, Ali Elkahky, Zhaoheng Ni, Apoorv Vyas, Maryam Fazel-Zarandi, Alexei Baevski, Yossi Adi, Xiaohui Zhang, Wei-Ning Hsu, Alexis Conneau, Michael Auli. +1. **[MobileBERT](https://huggingface.co/docs/transformers/model_doc/mobilebert)** (from CMU/Google Brain) released with the paper [MobileBERT: a Compact Task-Agnostic BERT for Resource-Limited Devices](https://arxiv.org/abs/2004.02984) by Zhiqing Sun, Hongkun Yu, Xiaodan Song, Renjie Liu, Yiming Yang, and Denny Zhou. +1. **[MobileNetV1](https://huggingface.co/docs/transformers/model_doc/mobilenet_v1)** (from Google Inc.) released with the paper [MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications](https://arxiv.org/abs/1704.04861) by Andrew G. Howard, Menglong Zhu, Bo Chen, Dmitry Kalenichenko, Weijun Wang, Tobias Weyand, Marco Andreetto, Hartwig Adam. +1. **[MobileNetV2](https://huggingface.co/docs/transformers/model_doc/mobilenet_v2)** (from Google Inc.) released with the paper [MobileNetV2: Inverted Residuals and Linear Bottlenecks](https://arxiv.org/abs/1801.04381) by Mark Sandler, Andrew Howard, Menglong Zhu, Andrey Zhmoginov, Liang-Chieh Chen. +1. **[MobileViT](https://huggingface.co/docs/transformers/model_doc/mobilevit)** (from Apple) released with the paper [MobileViT: Light-weight, General-purpose, and Mobile-friendly Vision Transformer](https://arxiv.org/abs/2110.02178) by Sachin Mehta and Mohammad Rastegari. +1. **[MobileViTV2](https://huggingface.co/docs/transformers/model_doc/mobilevitv2)** (from Apple) released with the paper [Separable Self-attention for Mobile Vision Transformers](https://arxiv.org/abs/2206.02680) by Sachin Mehta and Mohammad Rastegari. +1. **[MPNet](https://huggingface.co/docs/transformers/model_doc/mpnet)** (from Microsoft Research) released with the paper [MPNet: Masked and Permuted Pre-training for Language Understanding](https://arxiv.org/abs/2004.09297) by Kaitao Song, Xu Tan, Tao Qin, Jianfeng Lu, Tie-Yan Liu. +1. **[MPT](https://huggingface.co/docs/transformers/model_doc/mpt)** (from MosaiML) released with the repository [llm-foundry](https://github.com/mosaicml/llm-foundry/) by the MosaicML NLP Team. +1. **[MRA](https://huggingface.co/docs/transformers/model_doc/mra)** (from the University of Wisconsin - Madison) released with the paper [Multi Resolution Analysis (MRA) for Approximate Self-Attention](https://arxiv.org/abs/2207.10284) by Zhanpeng Zeng, Sourav Pal, Jeffery Kline, Glenn M Fung, Vikas Singh. +1. **[MT5](https://huggingface.co/docs/transformers/model_doc/mt5)** (from Google AI) released with the paper [mT5: A massively multilingual pre-trained text-to-text transformer](https://arxiv.org/abs/2010.11934) by Linting Xue, Noah Constant, Adam Roberts, Mihir Kale, Rami Al-Rfou, Aditya Siddhant, Aditya Barua, Colin Raffel. +1. **[MusicGen](https://huggingface.co/docs/transformers/model_doc/musicgen)** (from Meta) released with the paper [Simple and Controllable Music Generation](https://arxiv.org/abs/2306.05284) by Jade Copet, Felix Kreuk, Itai Gat, Tal Remez, David Kant, Gabriel Synnaeve, Yossi Adi and Alexandre Défossez. +1. **[MusicGen Melody](https://huggingface.co/docs/transformers/model_doc/musicgen_melody)** (from Meta) released with the paper [Simple and Controllable Music Generation](https://arxiv.org/abs/2306.05284) by Jade Copet, Felix Kreuk, Itai Gat, Tal Remez, David Kant, Gabriel Synnaeve, Yossi Adi and Alexandre Défossez. +1. **[MVP](https://huggingface.co/docs/transformers/model_doc/mvp)** (from RUC AI Box) released with the paper [MVP: Multi-task Supervised Pre-training for Natural Language Generation](https://arxiv.org/abs/2206.12131) by Tianyi Tang, Junyi Li, Wayne Xin Zhao and Ji-Rong Wen. +1. **[NAT](https://huggingface.co/docs/transformers/model_doc/nat)** (from SHI Labs) released with the paper [Neighborhood Attention Transformer](https://arxiv.org/abs/2204.07143) by Ali Hassani, Steven Walton, Jiachen Li, Shen Li, and Humphrey Shi. +1. **[Nezha](https://huggingface.co/docs/transformers/model_doc/nezha)** (from Huawei Noah’s Ark Lab) released with the paper [NEZHA: Neural Contextualized Representation for Chinese Language Understanding](https://arxiv.org/abs/1909.00204) by Junqiu Wei, Xiaozhe Ren, Xiaoguang Li, Wenyong Huang, Yi Liao, Yasheng Wang, Jiashu Lin, Xin Jiang, Xiao Chen and Qun Liu. +1. **[NLLB](https://huggingface.co/docs/transformers/model_doc/nllb)** (from Meta) released with the paper [No Language Left Behind: Scaling Human-Centered Machine Translation](https://arxiv.org/abs/2207.04672) by the NLLB team. +1. **[NLLB-MOE](https://huggingface.co/docs/transformers/model_doc/nllb-moe)** (from Meta) released with the paper [No Language Left Behind: Scaling Human-Centered Machine Translation](https://arxiv.org/abs/2207.04672) by the NLLB team. +1. **[Nougat](https://huggingface.co/docs/transformers/model_doc/nougat)** (from Meta AI) released with the paper [Nougat: Neural Optical Understanding for Academic Documents](https://arxiv.org/abs/2308.13418) by Lukas Blecher, Guillem Cucurull, Thomas Scialom, Robert Stojnic. +1. **[Nyströmformer](https://huggingface.co/docs/transformers/model_doc/nystromformer)** (from the University of Wisconsin - Madison) released with the paper [Nyströmformer: A Nyström-Based Algorithm for Approximating Self-Attention](https://arxiv.org/abs/2102.03902) by Yunyang Xiong, Zhanpeng Zeng, Rudrasis Chakraborty, Mingxing Tan, Glenn Fung, Yin Li, Vikas Singh. +1. **[OLMo](https://huggingface.co/docs/transformers/model_doc/olmo)** (from AI2) released with the paper [OLMo: Accelerating the Science of Language Models](https://arxiv.org/abs/2402.00838) by Dirk Groeneveld, Iz Beltagy, Pete Walsh, Akshita Bhagia, Rodney Kinney, Oyvind Tafjord, Ananya Harsh Jha, Hamish Ivison, Ian Magnusson, Yizhong Wang, Shane Arora, David Atkinson, Russell Authur, Khyathi Raghavi Chandu, Arman Cohan, Jennifer Dumas, Yanai Elazar, Yuling Gu, Jack Hessel, Tushar Khot, William Merrill, Jacob Morrison, Niklas Muennighoff, Aakanksha Naik, Crystal Nam, Matthew E. Peters, Valentina Pyatkin, Abhilasha Ravichander, Dustin Schwenk, Saurabh Shah, Will Smith, Emma Strubell, Nishant Subramani, Mitchell Wortsman, Pradeep Dasigi, Nathan Lambert, Kyle Richardson, Luke Zettlemoyer, Jesse Dodge, Kyle Lo, Luca Soldaini, Noah A. Smith, Hannaneh Hajishirzi. +1. **[OneFormer](https://huggingface.co/docs/transformers/model_doc/oneformer)** (from SHI Labs) released with the paper [OneFormer: One Transformer to Rule Universal Image Segmentation](https://arxiv.org/abs/2211.06220) by Jitesh Jain, Jiachen Li, MangTik Chiu, Ali Hassani, Nikita Orlov, Humphrey Shi. +1. **[OpenLlama](https://huggingface.co/docs/transformers/model_doc/open-llama)** (from [s-JoL](https://huggingface.co/s-JoL)) released on GitHub (now removed). +1. **[OPT](https://huggingface.co/docs/transformers/master/model_doc/opt)** (from Meta AI) released with the paper [OPT: Open Pre-trained Transformer Language Models](https://arxiv.org/abs/2205.01068) by Susan Zhang, Stephen Roller, Naman Goyal, Mikel Artetxe, Moya Chen, Shuohui Chen et al. +1. **[OWL-ViT](https://huggingface.co/docs/transformers/model_doc/owlvit)** (from Google AI) released with the paper [Simple Open-Vocabulary Object Detection with Vision Transformers](https://arxiv.org/abs/2205.06230) by Matthias Minderer, Alexey Gritsenko, Austin Stone, Maxim Neumann, Dirk Weissenborn, Alexey Dosovitskiy, Aravindh Mahendran, Anurag Arnab, Mostafa Dehghani, Zhuoran Shen, Xiao Wang, Xiaohua Zhai, Thomas Kipf, and Neil Houlsby. +1. **[OWLv2](https://huggingface.co/docs/transformers/model_doc/owlv2)** (from Google AI) released with the paper [Scaling Open-Vocabulary Object Detection](https://arxiv.org/abs/2306.09683) by Matthias Minderer, Alexey Gritsenko, Neil Houlsby. +1. **[PatchTSMixer](https://huggingface.co/docs/transformers/model_doc/patchtsmixer)** (from IBM Research) released with the paper [TSMixer: Lightweight MLP-Mixer Model for Multivariate Time Series Forecasting](https://arxiv.org/pdf/2306.09364.pdf) by Vijay Ekambaram, Arindam Jati, Nam Nguyen, Phanwadee Sinthong, Jayant Kalagnanam. +1. **[PatchTST](https://huggingface.co/docs/transformers/model_doc/patchtst)** (from IBM) released with the paper [A Time Series is Worth 64 Words: Long-term Forecasting with Transformers](https://arxiv.org/abs/2211.14730) by Yuqi Nie, Nam H. Nguyen, Phanwadee Sinthong, Jayant Kalagnanam. +1. **[Pegasus](https://huggingface.co/docs/transformers/model_doc/pegasus)** (from Google) released with the paper [PEGASUS: Pre-training with Extracted Gap-sentences for Abstractive Summarization](https://arxiv.org/abs/1912.08777) by Jingqing Zhang, Yao Zhao, Mohammad Saleh and Peter J. Liu. +1. **[PEGASUS-X](https://huggingface.co/docs/transformers/model_doc/pegasus_x)** (from Google) released with the paper [Investigating Efficiently Extending Transformers for Long Input Summarization](https://arxiv.org/abs/2208.04347) by Jason Phang, Yao Zhao, and Peter J. Liu. +1. **[Perceiver IO](https://huggingface.co/docs/transformers/model_doc/perceiver)** (from Deepmind) released with the paper [Perceiver IO: A General Architecture for Structured Inputs & Outputs](https://arxiv.org/abs/2107.14795) by Andrew Jaegle, Sebastian Borgeaud, Jean-Baptiste Alayrac, Carl Doersch, Catalin Ionescu, David Ding, Skanda Koppula, Daniel Zoran, Andrew Brock, Evan Shelhamer, Olivier Hénaff, Matthew M. Botvinick, Andrew Zisserman, Oriol Vinyals, João Carreira. +1. **[Persimmon](https://huggingface.co/docs/transformers/model_doc/persimmon)** (from ADEPT) released in a [blog post](https://www.adept.ai/blog/persimmon-8b) by Erich Elsen, Augustus Odena, Maxwell Nye, Sağnak Taşırlar, Tri Dao, Curtis Hawthorne, Deepak Moparthi, Arushi Somani. +1. **[Phi](https://huggingface.co/docs/transformers/model_doc/phi)** (from Microsoft) released with the papers - [Textbooks Are All You Need](https://arxiv.org/abs/2306.11644) by Suriya Gunasekar, Yi Zhang, Jyoti Aneja, Caio César Teodoro Mendes, Allie Del Giorno, Sivakanth Gopi, Mojan Javaheripi, Piero Kauffmann, Gustavo de Rosa, Olli Saarikivi, Adil Salim, Shital Shah, Harkirat Singh Behl, Xin Wang, Sébastien Bubeck, Ronen Eldan, Adam Tauman Kalai, Yin Tat Lee and Yuanzhi Li, [Textbooks Are All You Need II: phi-1.5 technical report](https://arxiv.org/abs/2309.05463) by Yuanzhi Li, Sébastien Bubeck, Ronen Eldan, Allie Del Giorno, Suriya Gunasekar and Yin Tat Lee. +1. **[PhoBERT](https://huggingface.co/docs/transformers/model_doc/phobert)** (from VinAI Research) released with the paper [PhoBERT: Pre-trained language models for Vietnamese](https://www.aclweb.org/anthology/2020.findings-emnlp.92/) by Dat Quoc Nguyen and Anh Tuan Nguyen. +1. **[Pix2Struct](https://huggingface.co/docs/transformers/model_doc/pix2struct)** (from Google) released with the paper [Pix2Struct: Screenshot Parsing as Pretraining for Visual Language Understanding](https://arxiv.org/abs/2210.03347) by Kenton Lee, Mandar Joshi, Iulia Turc, Hexiang Hu, Fangyu Liu, Julian Eisenschlos, Urvashi Khandelwal, Peter Shaw, Ming-Wei Chang, Kristina Toutanova. +1. **[PLBart](https://huggingface.co/docs/transformers/model_doc/plbart)** (from UCLA NLP) released with the paper [Unified Pre-training for Program Understanding and Generation](https://arxiv.org/abs/2103.06333) by Wasi Uddin Ahmad, Saikat Chakraborty, Baishakhi Ray, Kai-Wei Chang. +1. **[PoolFormer](https://huggingface.co/docs/transformers/model_doc/poolformer)** (from Sea AI Labs) released with the paper [MetaFormer is Actually What You Need for Vision](https://arxiv.org/abs/2111.11418) by Yu, Weihao and Luo, Mi and Zhou, Pan and Si, Chenyang and Zhou, Yichen and Wang, Xinchao and Feng, Jiashi and Yan, Shuicheng. +1. **[Pop2Piano](https://huggingface.co/docs/transformers/model_doc/pop2piano)** released with the paper [Pop2Piano : Pop Audio-based Piano Cover Generation](https://arxiv.org/abs/2211.00895) by Jongho Choi and Kyogu Lee. +1. **[ProphetNet](https://huggingface.co/docs/transformers/model_doc/prophetnet)** (from Microsoft Research) released with the paper [ProphetNet: Predicting Future N-gram for Sequence-to-Sequence Pre-training](https://arxiv.org/abs/2001.04063) by Yu Yan, Weizhen Qi, Yeyun Gong, Dayiheng Liu, Nan Duan, Jiusheng Chen, Ruofei Zhang and Ming Zhou. +1. **[PVT](https://huggingface.co/docs/transformers/model_doc/pvt)** (from Nanjing University, The University of Hong Kong etc.) released with the paper [Pyramid Vision Transformer: A Versatile Backbone for Dense Prediction without Convolutions](https://arxiv.org/pdf/2102.12122.pdf) by Wenhai Wang, Enze Xie, Xiang Li, Deng-Ping Fan, Kaitao Song, Ding Liang, Tong Lu, Ping Luo, Ling Shao. +1. **[PVTv2](https://huggingface.co/docs/transformers/model_doc/pvt_v2)** (from Shanghai AI Laboratory, Nanjing University, The University of Hong Kong etc.) released with the paper [PVT v2: Improved Baselines with Pyramid Vision Transformer](https://arxiv.org/abs/2106.13797) by Wenhai Wang, Enze Xie, Xiang Li, Deng-Ping Fan, Kaitao Song, Ding Liang, Tong Lu, Ping Luo, Ling Shao. +1. **[QDQBert](https://huggingface.co/docs/transformers/model_doc/qdqbert)** (from NVIDIA) released with the paper [Integer Quantization for Deep Learning Inference: Principles and Empirical Evaluation](https://arxiv.org/abs/2004.09602) by Hao Wu, Patrick Judd, Xiaojie Zhang, Mikhail Isaev and Paulius Micikevicius. +1. **[Qwen2](https://huggingface.co/docs/transformers/model_doc/qwen2)** (from the Qwen team, Alibaba Group) released with the paper [Qwen Technical Report](https://arxiv.org/abs/2309.16609) by Jinze Bai, Shuai Bai, Yunfei Chu, Zeyu Cui, Kai Dang, Xiaodong Deng, Yang Fan, Wenbin Ge, Yu Han, Fei Huang, Binyuan Hui, Luo Ji, Mei Li, Junyang Lin, Runji Lin, Dayiheng Liu, Gao Liu, Chengqiang Lu, Keming Lu, Jianxin Ma, Rui Men, Xingzhang Ren, Xuancheng Ren, Chuanqi Tan, Sinan Tan, Jianhong Tu, Peng Wang, Shijie Wang, Wei Wang, Shengguang Wu, Benfeng Xu, Jin Xu, An Yang, Hao Yang, Jian Yang, Shusheng Yang, Yang Yao, Bowen Yu, Hongyi Yuan, Zheng Yuan, Jianwei Zhang, Xingxuan Zhang, Yichang Zhang, Zhenru Zhang, Chang Zhou, Jingren Zhou, Xiaohuan Zhou and Tianhang Zhu. +1. **[Qwen2MoE](https://huggingface.co/docs/transformers/model_doc/qwen2_moe)** (from the Qwen team, Alibaba Group) released with [blog post](https://qwenlm.github.io/blog/qwen-moe/) by Bo Zheng, Dayiheng Liu, Rui Men, Junyang Lin, Zhou San, Bowen Yu, An Yang, Mingfeng Xue, Fei Huang, Binyuan Hui, Mei Li, Tianyu Liu, Xingzhang Ren, Xuancheng Ren, Kexin Yang, Chang Zhou, Jingren Zhou. +1. **[RAG](https://huggingface.co/docs/transformers/model_doc/rag)** (from Facebook) released with the paper [Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks](https://arxiv.org/abs/2005.11401) by Patrick Lewis, Ethan Perez, Aleksandara Piktus, Fabio Petroni, Vladimir Karpukhin, Naman Goyal, Heinrich Küttler, Mike Lewis, Wen-tau Yih, Tim Rocktäschel, Sebastian Riedel, Douwe Kiela. +1. **[REALM](https://huggingface.co/docs/transformers/model_doc/realm.html)** (from Google Research) released with the paper [REALM: Retrieval-Augmented Language Model Pre-Training](https://arxiv.org/abs/2002.08909) by Kelvin Guu, Kenton Lee, Zora Tung, Panupong Pasupat and Ming-Wei Chang. +1. **[RecurrentGemma](https://huggingface.co/docs/transformers/model_doc/recurrent-gemma)** (from Google) released with the paper [RecurrentGemma: Moving Past Transformers for Efficient Open Language Models](https://storage.googleapis.com/deepmind-media/gemma/recurrentgemma-report.pdf) by the Griffin, RLHF and Gemma Teams. +1. **[Reformer](https://huggingface.co/docs/transformers/model_doc/reformer)** (from Google Research) released with the paper [Reformer: The Efficient Transformer](https://arxiv.org/abs/2001.04451) by Nikita Kitaev, Łukasz Kaiser, Anselm Levskaya. +1. **[RegNet](https://huggingface.co/docs/transformers/model_doc/regnet)** (from META Platforms) released with the paper [Designing Network Design Space](https://arxiv.org/abs/2003.13678) by Ilija Radosavovic, Raj Prateek Kosaraju, Ross Girshick, Kaiming He, Piotr Dollár. +1. **[RemBERT](https://huggingface.co/docs/transformers/model_doc/rembert)** (from Google Research) released with the paper [Rethinking embedding coupling in pre-trained language models](https://arxiv.org/abs/2010.12821) by Hyung Won Chung, Thibault Févry, Henry Tsai, M. Johnson, Sebastian Ruder. +1. **[ResNet](https://huggingface.co/docs/transformers/model_doc/resnet)** (from Microsoft Research) released with the paper [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) by Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. +1. **[RoBERTa](https://huggingface.co/docs/transformers/model_doc/roberta)** (from Facebook), released together with the paper [RoBERTa: A Robustly Optimized BERT Pretraining Approach](https://arxiv.org/abs/1907.11692) by Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer, Veselin Stoyanov. +1. **[RoBERTa-PreLayerNorm](https://huggingface.co/docs/transformers/model_doc/roberta-prelayernorm)** (from Facebook) released with the paper [fairseq: A Fast, Extensible Toolkit for Sequence Modeling](https://arxiv.org/abs/1904.01038) by Myle Ott, Sergey Edunov, Alexei Baevski, Angela Fan, Sam Gross, Nathan Ng, David Grangier, Michael Auli. +1. **[RoCBert](https://huggingface.co/docs/transformers/model_doc/roc_bert)** (from WeChatAI) released with the paper [RoCBert: Robust Chinese Bert with Multimodal Contrastive Pretraining](https://aclanthology.org/2022.acl-long.65.pdf) by HuiSu, WeiweiShi, XiaoyuShen, XiaoZhou, TuoJi, JiaruiFang, JieZhou. +1. **[RoFormer](https://huggingface.co/docs/transformers/model_doc/roformer)** (from ZhuiyiTechnology), released together with the paper [RoFormer: Enhanced Transformer with Rotary Position Embedding](https://arxiv.org/abs/2104.09864) by Jianlin Su and Yu Lu and Shengfeng Pan and Bo Wen and Yunfeng Liu. +1. **[RWKV](https://huggingface.co/docs/transformers/model_doc/rwkv)** (from Bo Peng), released on [this repo](https://github.com/BlinkDL/RWKV-LM) by Bo Peng. +1. **[SeamlessM4T](https://huggingface.co/docs/transformers/model_doc/seamless_m4t)** (from Meta AI) released with the paper [SeamlessM4T — Massively Multilingual & Multimodal Machine Translation](https://dl.fbaipublicfiles.com/seamless/seamless_m4t_paper.pdf) by the Seamless Communication team. +1. **[SeamlessM4Tv2](https://huggingface.co/docs/transformers/model_doc/seamless_m4t_v2)** (from Meta AI) released with the paper [Seamless: Multilingual Expressive and Streaming Speech Translation](https://ai.meta.com/research/publications/seamless-multilingual-expressive-and-streaming-speech-translation/) by the Seamless Communication team. +1. **[SegFormer](https://huggingface.co/docs/transformers/model_doc/segformer)** (from NVIDIA) released with the paper [SegFormer: Simple and Efficient Design for Semantic Segmentation with Transformers](https://arxiv.org/abs/2105.15203) by Enze Xie, Wenhai Wang, Zhiding Yu, Anima Anandkumar, Jose M. Alvarez, Ping Luo. +1. **[SegGPT](https://huggingface.co/docs/transformers/model_doc/seggpt)** (from Beijing Academy of Artificial Intelligence (BAAI)) released with the paper [SegGPT: Segmenting Everything In Context](https://arxiv.org/abs/2304.03284) by Xinlong Wang, Xiaosong Zhang, Yue Cao, Wen Wang, Chunhua Shen, Tiejun Huang. +1. **[Segment Anything](https://huggingface.co/docs/transformers/model_doc/sam)** (from Meta AI) released with the paper [Segment Anything](https://arxiv.org/pdf/2304.02643v1.pdf) by Alexander Kirillov, Eric Mintun, Nikhila Ravi, Hanzi Mao, Chloe Rolland, Laura Gustafson, Tete Xiao, Spencer Whitehead, Alex Berg, Wan-Yen Lo, Piotr Dollar, Ross Girshick. +1. **[SEW](https://huggingface.co/docs/transformers/model_doc/sew)** (from ASAPP) released with the paper [Performance-Efficiency Trade-offs in Unsupervised Pre-training for Speech Recognition](https://arxiv.org/abs/2109.06870) by Felix Wu, Kwangyoun Kim, Jing Pan, Kyu Han, Kilian Q. Weinberger, Yoav Artzi. +1. **[SEW-D](https://huggingface.co/docs/transformers/model_doc/sew_d)** (from ASAPP) released with the paper [Performance-Efficiency Trade-offs in Unsupervised Pre-training for Speech Recognition](https://arxiv.org/abs/2109.06870) by Felix Wu, Kwangyoun Kim, Jing Pan, Kyu Han, Kilian Q. Weinberger, Yoav Artzi. +1. **[SigLIP](https://huggingface.co/docs/transformers/model_doc/siglip)** (from Google AI) released with the paper [Sigmoid Loss for Language Image Pre-Training](https://arxiv.org/abs/2303.15343) by Xiaohua Zhai, Basil Mustafa, Alexander Kolesnikov, Lucas Beyer. +1. **[SpeechT5](https://huggingface.co/docs/transformers/model_doc/speecht5)** (from Microsoft Research) released with the paper [SpeechT5: Unified-Modal Encoder-Decoder Pre-Training for Spoken Language Processing](https://arxiv.org/abs/2110.07205) by Junyi Ao, Rui Wang, Long Zhou, Chengyi Wang, Shuo Ren, Yu Wu, Shujie Liu, Tom Ko, Qing Li, Yu Zhang, Zhihua Wei, Yao Qian, Jinyu Li, Furu Wei. +1. **[SpeechToTextTransformer](https://huggingface.co/docs/transformers/model_doc/speech_to_text)** (from Facebook), released together with the paper [fairseq S2T: Fast Speech-to-Text Modeling with fairseq](https://arxiv.org/abs/2010.05171) by Changhan Wang, Yun Tang, Xutai Ma, Anne Wu, Dmytro Okhonko, Juan Pino. +1. **[SpeechToTextTransformer2](https://huggingface.co/docs/transformers/model_doc/speech_to_text_2)** (from Facebook), released together with the paper [Large-Scale Self- and Semi-Supervised Learning for Speech Translation](https://arxiv.org/abs/2104.06678) by Changhan Wang, Anne Wu, Juan Pino, Alexei Baevski, Michael Auli, Alexis Conneau. +1. **[Splinter](https://huggingface.co/docs/transformers/model_doc/splinter)** (from Tel Aviv University), released together with the paper [Few-Shot Question Answering by Pretraining Span Selection](https://arxiv.org/abs/2101.00438) by Ori Ram, Yuval Kirstain, Jonathan Berant, Amir Globerson, Omer Levy. +1. **[SqueezeBERT](https://huggingface.co/docs/transformers/model_doc/squeezebert)** (from Berkeley) released with the paper [SqueezeBERT: What can computer vision teach NLP about efficient neural networks?](https://arxiv.org/abs/2006.11316) by Forrest N. Iandola, Albert E. Shaw, Ravi Krishna, and Kurt W. Keutzer. +1. **[StableLm](https://huggingface.co/docs/transformers/model_doc/stablelm)** (from Stability AI) released with the paper [StableLM 3B 4E1T (Technical Report)](https://stability.wandb.io/stability-llm/stable-lm/reports/StableLM-3B-4E1T--VmlldzoyMjU4?accessToken=u3zujipenkx5g7rtcj9qojjgxpconyjktjkli2po09nffrffdhhchq045vp0wyfo) by Jonathan Tow, Marco Bellagente, Dakota Mahan, Carlos Riquelme Ruiz, Duy Phung, Maksym Zhuravinskyi, Nathan Cooper, Nikhil Pinnaparaju, Reshinth Adithyan, and James Baicoianu. +1. **[Starcoder2](https://huggingface.co/docs/transformers/model_doc/starcoder2)** (from BigCode team) released with the paper [StarCoder 2 and The Stack v2: The Next Generation](https://arxiv.org/abs/2402.19173) by Anton Lozhkov, Raymond Li, Loubna Ben Allal, Federico Cassano, Joel Lamy-Poirier, Nouamane Tazi, Ao Tang, Dmytro Pykhtar, Jiawei Liu, Yuxiang Wei, Tianyang Liu, Max Tian, Denis Kocetkov, Arthur Zucker, Younes Belkada, Zijian Wang, Qian Liu, Dmitry Abulkhanov, Indraneil Paul, Zhuang Li, Wen-Ding Li, Megan Risdal, Jia Li, Jian Zhu, Terry Yue Zhuo, Evgenii Zheltonozhskii, Nii Osae Osae Dade, Wenhao Yu, Lucas Krauß, Naman Jain, Yixuan Su, Xuanli He, Manan Dey, Edoardo Abati, Yekun Chai, Niklas Muennighoff, Xiangru Tang, Muhtasham Oblokulov, Christopher Akiki, Marc Marone, Chenghao Mou, Mayank Mishra, Alex Gu, Binyuan Hui, Tri Dao, Armel Zebaze, Olivier Dehaene, Nicolas Patry, Canwen Xu, Julian McAuley, Han Hu, Torsten Scholak, Sebastien Paquet, Jennifer Robinson, Carolyn Jane Anderson, Nicolas Chapados, Mostofa Patwary, Nima Tajbakhsh, Yacine Jernite, Carlos Muñoz Ferrandis, Lingming Zhang, Sean Hughes, Thomas Wolf, Arjun Guha, Leandro von Werra, and Harm de Vries. +1. **[SuperPoint](https://huggingface.co/docs/transformers/model_doc/superpoint)** (from MagicLeap) released with the paper [SuperPoint: Self-Supervised Interest Point Detection and Description](https://arxiv.org/abs/1712.07629) by Daniel DeTone, Tomasz Malisiewicz and Andrew Rabinovich. +1. **[SwiftFormer](https://huggingface.co/docs/transformers/model_doc/swiftformer)** (from MBZUAI) released with the paper [SwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications](https://arxiv.org/abs/2303.15446) by Abdelrahman Shaker, Muhammad Maaz, Hanoona Rasheed, Salman Khan, Ming-Hsuan Yang, Fahad Shahbaz Khan. +1. **[Swin Transformer](https://huggingface.co/docs/transformers/model_doc/swin)** (from Microsoft) released with the paper [Swin Transformer: Hierarchical Vision Transformer using Shifted Windows](https://arxiv.org/abs/2103.14030) by Ze Liu, Yutong Lin, Yue Cao, Han Hu, Yixuan Wei, Zheng Zhang, Stephen Lin, Baining Guo. +1. **[Swin Transformer V2](https://huggingface.co/docs/transformers/model_doc/swinv2)** (from Microsoft) released with the paper [Swin Transformer V2: Scaling Up Capacity and Resolution](https://arxiv.org/abs/2111.09883) by Ze Liu, Han Hu, Yutong Lin, Zhuliang Yao, Zhenda Xie, Yixuan Wei, Jia Ning, Yue Cao, Zheng Zhang, Li Dong, Furu Wei, Baining Guo. +1. **[Swin2SR](https://huggingface.co/docs/transformers/model_doc/swin2sr)** (from University of Würzburg) released with the paper [Swin2SR: SwinV2 Transformer for Compressed Image Super-Resolution and Restoration](https://arxiv.org/abs/2209.11345) by Marcos V. Conde, Ui-Jin Choi, Maxime Burchi, Radu Timofte. +1. **[SwitchTransformers](https://huggingface.co/docs/transformers/model_doc/switch_transformers)** (from Google) released with the paper [Switch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity](https://arxiv.org/abs/2101.03961) by William Fedus, Barret Zoph, Noam Shazeer. +1. **[T5](https://huggingface.co/docs/transformers/model_doc/t5)** (from Google AI) released with the paper [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/abs/1910.10683) by Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu. +1. **[T5v1.1](https://huggingface.co/docs/transformers/model_doc/t5v1.1)** (from Google AI) released in the repository [google-research/text-to-text-transfer-transformer](https://github.com/google-research/text-to-text-transfer-transformer/blob/main/released_checkpoints.md#t511) by Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu. +1. **[Table Transformer](https://huggingface.co/docs/transformers/model_doc/table-transformer)** (from Microsoft Research) released with the paper [PubTables-1M: Towards Comprehensive Table Extraction From Unstructured Documents](https://arxiv.org/abs/2110.00061) by Brandon Smock, Rohith Pesala, Robin Abraham. +1. **[TAPAS](https://huggingface.co/docs/transformers/model_doc/tapas)** (from Google AI) released with the paper [TAPAS: Weakly Supervised Table Parsing via Pre-training](https://arxiv.org/abs/2004.02349) by Jonathan Herzig, Paweł Krzysztof Nowak, Thomas Müller, Francesco Piccinno and Julian Martin Eisenschlos. +1. **[TAPEX](https://huggingface.co/docs/transformers/model_doc/tapex)** (from Microsoft Research) released with the paper [TAPEX: Table Pre-training via Learning a Neural SQL Executor](https://arxiv.org/abs/2107.07653) by Qian Liu, Bei Chen, Jiaqi Guo, Morteza Ziyadi, Zeqi Lin, Weizhu Chen, Jian-Guang Lou. +1. **[Time Series Transformer](https://huggingface.co/docs/transformers/model_doc/time_series_transformer)** (from HuggingFace). +1. **[TimeSformer](https://huggingface.co/docs/transformers/model_doc/timesformer)** (from Facebook) released with the paper [Is Space-Time Attention All You Need for Video Understanding?](https://arxiv.org/abs/2102.05095) by Gedas Bertasius, Heng Wang, Lorenzo Torresani. +1. **[Trajectory Transformer](https://huggingface.co/docs/transformers/model_doc/trajectory_transformers)** (from the University of California at Berkeley) released with the paper [Offline Reinforcement Learning as One Big Sequence Modeling Problem](https://arxiv.org/abs/2106.02039) by Michael Janner, Qiyang Li, Sergey Levine +1. **[Transformer-XL](https://huggingface.co/docs/transformers/model_doc/transfo-xl)** (from Google/CMU) released with the paper [Transformer-XL: Attentive Language Models Beyond a Fixed-Length Context](https://arxiv.org/abs/1901.02860) by Zihang Dai*, Zhilin Yang*, Yiming Yang, Jaime Carbonell, Quoc V. Le, Ruslan Salakhutdinov. +1. **[TrOCR](https://huggingface.co/docs/transformers/model_doc/trocr)** (from Microsoft), released together with the paper [TrOCR: Transformer-based Optical Character Recognition with Pre-trained Models](https://arxiv.org/abs/2109.10282) by Minghao Li, Tengchao Lv, Lei Cui, Yijuan Lu, Dinei Florencio, Cha Zhang, Zhoujun Li, Furu Wei. +1. **[TVLT](https://huggingface.co/docs/transformers/model_doc/tvlt)** (from UNC Chapel Hill) released with the paper [TVLT: Textless Vision-Language Transformer](https://arxiv.org/abs/2209.14156) by Zineng Tang, Jaemin Cho, Yixin Nie, Mohit Bansal. +1. **[TVP](https://huggingface.co/docs/transformers/model_doc/tvp)** (from Intel) released with the paper [Text-Visual Prompting for Efficient 2D Temporal Video Grounding](https://arxiv.org/abs/2303.04995) by Yimeng Zhang, Xin Chen, Jinghan Jia, Sijia Liu, Ke Ding. +1. **[UDOP](https://huggingface.co/docs/transformers/model_doc/udop)** (from Microsoft Research) released with the paper [Unifying Vision, Text, and Layout for Universal Document Processing](https://arxiv.org/abs/2212.02623) by Zineng Tang, Ziyi Yang, Guoxin Wang, Yuwei Fang, Yang Liu, Chenguang Zhu, Michael Zeng, Cha Zhang, Mohit Bansal. +1. **[UL2](https://huggingface.co/docs/transformers/model_doc/ul2)** (from Google Research) released with the paper [Unifying Language Learning Paradigms](https://arxiv.org/abs/2205.05131v1) by Yi Tay, Mostafa Dehghani, Vinh Q. Tran, Xavier Garcia, Dara Bahri, Tal Schuster, Huaixiu Steven Zheng, Neil Houlsby, Donald Metzler +1. **[UMT5](https://huggingface.co/docs/transformers/model_doc/umt5)** (from Google Research) released with the paper [UniMax: Fairer and More Effective Language Sampling for Large-Scale Multilingual Pretraining](https://openreview.net/forum?id=kXwdL1cWOAi) by Hyung Won Chung, Xavier Garcia, Adam Roberts, Yi Tay, Orhan Firat, Sharan Narang, Noah Constant. +1. **[UniSpeech](https://huggingface.co/docs/transformers/model_doc/unispeech)** (from Microsoft Research) released with the paper [UniSpeech: Unified Speech Representation Learning with Labeled and Unlabeled Data](https://arxiv.org/abs/2101.07597) by Chengyi Wang, Yu Wu, Yao Qian, Kenichi Kumatani, Shujie Liu, Furu Wei, Michael Zeng, Xuedong Huang. +1. **[UniSpeechSat](https://huggingface.co/docs/transformers/model_doc/unispeech-sat)** (from Microsoft Research) released with the paper [UNISPEECH-SAT: UNIVERSAL SPEECH REPRESENTATION LEARNING WITH SPEAKER AWARE PRE-TRAINING](https://arxiv.org/abs/2110.05752) by Sanyuan Chen, Yu Wu, Chengyi Wang, Zhengyang Chen, Zhuo Chen, Shujie Liu, Jian Wu, Yao Qian, Furu Wei, Jinyu Li, Xiangzhan Yu. +1. **[UnivNet](https://huggingface.co/docs/transformers/model_doc/univnet)** (from Kakao Corporation) released with the paper [UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation](https://arxiv.org/abs/2106.07889) by Won Jang, Dan Lim, Jaesam Yoon, Bongwan Kim, and Juntae Kim. +1. **[UPerNet](https://huggingface.co/docs/transformers/model_doc/upernet)** (from Peking University) released with the paper [Unified Perceptual Parsing for Scene Understanding](https://arxiv.org/abs/1807.10221) by Tete Xiao, Yingcheng Liu, Bolei Zhou, Yuning Jiang, Jian Sun. +1. **[VAN](https://huggingface.co/docs/transformers/model_doc/van)** (from Tsinghua University and Nankai University) released with the paper [Visual Attention Network](https://arxiv.org/abs/2202.09741) by Meng-Hao Guo, Cheng-Ze Lu, Zheng-Ning Liu, Ming-Ming Cheng, Shi-Min Hu. +1. **[VideoMAE](https://huggingface.co/docs/transformers/model_doc/videomae)** (from Multimedia Computing Group, Nanjing University) released with the paper [VideoMAE: Masked Autoencoders are Data-Efficient Learners for Self-Supervised Video Pre-Training](https://arxiv.org/abs/2203.12602) by Zhan Tong, Yibing Song, Jue Wang, Limin Wang. +1. **[ViLT](https://huggingface.co/docs/transformers/model_doc/vilt)** (from NAVER AI Lab/Kakao Enterprise/Kakao Brain) released with the paper [ViLT: Vision-and-Language Transformer Without Convolution or Region Supervision](https://arxiv.org/abs/2102.03334) by Wonjae Kim, Bokyung Son, Ildoo Kim. +1. **[VipLlava](https://huggingface.co/docs/transformers/model_doc/vipllava)** (from University of Wisconsin–Madison) released with the paper [Making Large Multimodal Models Understand Arbitrary Visual Prompts](https://arxiv.org/abs/2312.00784) by Mu Cai, Haotian Liu, Siva Karthik Mustikovela, Gregory P. Meyer, Yuning Chai, Dennis Park, Yong Jae Lee. +1. **[Vision Transformer (ViT)](https://huggingface.co/docs/transformers/model_doc/vit)** (from Google AI) released with the paper [An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale](https://arxiv.org/abs/2010.11929) by Alexey Dosovitskiy, Lucas Beyer, Alexander Kolesnikov, Dirk Weissenborn, Xiaohua Zhai, Thomas Unterthiner, Mostafa Dehghani, Matthias Minderer, Georg Heigold, Sylvain Gelly, Jakob Uszkoreit, Neil Houlsby. +1. **[VisualBERT](https://huggingface.co/docs/transformers/model_doc/visual_bert)** (from UCLA NLP) released with the paper [VisualBERT: A Simple and Performant Baseline for Vision and Language](https://arxiv.org/pdf/1908.03557) by Liunian Harold Li, Mark Yatskar, Da Yin, Cho-Jui Hsieh, Kai-Wei Chang. +1. **[ViT Hybrid](https://huggingface.co/docs/transformers/model_doc/vit_hybrid)** (from Google AI) released with the paper [An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale](https://arxiv.org/abs/2010.11929) by Alexey Dosovitskiy, Lucas Beyer, Alexander Kolesnikov, Dirk Weissenborn, Xiaohua Zhai, Thomas Unterthiner, Mostafa Dehghani, Matthias Minderer, Georg Heigold, Sylvain Gelly, Jakob Uszkoreit, Neil Houlsby. +1. **[VitDet](https://huggingface.co/docs/transformers/model_doc/vitdet)** (from Meta AI) released with the paper [Exploring Plain Vision Transformer Backbones for Object Detection](https://arxiv.org/abs/2203.16527) by Yanghao Li, Hanzi Mao, Ross Girshick, Kaiming He. +1. **[ViTMAE](https://huggingface.co/docs/transformers/model_doc/vit_mae)** (from Meta AI) released with the paper [Masked Autoencoders Are Scalable Vision Learners](https://arxiv.org/abs/2111.06377) by Kaiming He, Xinlei Chen, Saining Xie, Yanghao Li, Piotr Dollár, Ross Girshick. +1. **[ViTMatte](https://huggingface.co/docs/transformers/model_doc/vitmatte)** (from HUST-VL) released with the paper [ViTMatte: Boosting Image Matting with Pretrained Plain Vision Transformers](https://arxiv.org/abs/2305.15272) by Jingfeng Yao, Xinggang Wang, Shusheng Yang, Baoyuan Wang. +1. **[ViTMSN](https://huggingface.co/docs/transformers/model_doc/vit_msn)** (from Meta AI) released with the paper [Masked Siamese Networks for Label-Efficient Learning](https://arxiv.org/abs/2204.07141) by Mahmoud Assran, Mathilde Caron, Ishan Misra, Piotr Bojanowski, Florian Bordes, Pascal Vincent, Armand Joulin, Michael Rabbat, Nicolas Ballas. +1. **[VITS](https://huggingface.co/docs/transformers/model_doc/vits)** (from Kakao Enterprise) released with the paper [Conditional Variational Autoencoder with Adversarial Learning for End-to-End Text-to-Speech](https://arxiv.org/abs/2106.06103) by Jaehyeon Kim, Jungil Kong, Juhee Son. +1. **[ViViT](https://huggingface.co/docs/transformers/model_doc/vivit)** (from Google Research) released with the paper [ViViT: A Video Vision Transformer](https://arxiv.org/abs/2103.15691) by Anurag Arnab, Mostafa Dehghani, Georg Heigold, Chen Sun, Mario Lučić, Cordelia Schmid. +1. **[Wav2Vec2](https://huggingface.co/docs/transformers/model_doc/wav2vec2)** (from Facebook AI) released with the paper [wav2vec 2.0: A Framework for Self-Supervised Learning of Speech Representations](https://arxiv.org/abs/2006.11477) by Alexei Baevski, Henry Zhou, Abdelrahman Mohamed, Michael Auli. +1. **[Wav2Vec2-BERT](https://huggingface.co/docs/transformers/model_doc/wav2vec2-bert)** (from Meta AI) released with the paper [Seamless: Multilingual Expressive and Streaming Speech Translation](https://ai.meta.com/research/publications/seamless-multilingual-expressive-and-streaming-speech-translation/) by the Seamless Communication team. +1. **[Wav2Vec2-Conformer](https://huggingface.co/docs/transformers/model_doc/wav2vec2-conformer)** (from Facebook AI) released with the paper [FAIRSEQ S2T: Fast Speech-to-Text Modeling with FAIRSEQ](https://arxiv.org/abs/2010.05171) by Changhan Wang, Yun Tang, Xutai Ma, Anne Wu, Sravya Popuri, Dmytro Okhonko, Juan Pino. +1. **[Wav2Vec2Phoneme](https://huggingface.co/docs/transformers/model_doc/wav2vec2_phoneme)** (from Facebook AI) released with the paper [Simple and Effective Zero-shot Cross-lingual Phoneme Recognition](https://arxiv.org/abs/2109.11680) by Qiantong Xu, Alexei Baevski, Michael Auli. +1. **[WavLM](https://huggingface.co/docs/transformers/model_doc/wavlm)** (from Microsoft Research) released with the paper [WavLM: Large-Scale Self-Supervised Pre-Training for Full Stack Speech Processing](https://arxiv.org/abs/2110.13900) by Sanyuan Chen, Chengyi Wang, Zhengyang Chen, Yu Wu, Shujie Liu, Zhuo Chen, Jinyu Li, Naoyuki Kanda, Takuya Yoshioka, Xiong Xiao, Jian Wu, Long Zhou, Shuo Ren, Yanmin Qian, Yao Qian, Jian Wu, Michael Zeng, Furu Wei. +1. **[Whisper](https://huggingface.co/docs/transformers/model_doc/whisper)** (from OpenAI) released with the paper [Robust Speech Recognition via Large-Scale Weak Supervision](https://cdn.openai.com/papers/whisper.pdf) by Alec Radford, Jong Wook Kim, Tao Xu, Greg Brockman, Christine McLeavey, Ilya Sutskever. +1. **[X-CLIP](https://huggingface.co/docs/transformers/model_doc/xclip)** (from Microsoft Research) released with the paper [Expanding Language-Image Pretrained Models for General Video Recognition](https://arxiv.org/abs/2208.02816) by Bolin Ni, Houwen Peng, Minghao Chen, Songyang Zhang, Gaofeng Meng, Jianlong Fu, Shiming Xiang, Haibin Ling. +1. **[X-MOD](https://huggingface.co/docs/transformers/model_doc/xmod)** (from Meta AI) released with the paper [Lifting the Curse of Multilinguality by Pre-training Modular Transformers](http://dx.doi.org/10.18653/v1/2022.naacl-main.255) by Jonas Pfeiffer, Naman Goyal, Xi Lin, Xian Li, James Cross, Sebastian Riedel, Mikel Artetxe. +1. **[XGLM](https://huggingface.co/docs/transformers/model_doc/xglm)** (From Facebook AI) released with the paper [Few-shot Learning with Multilingual Language Models](https://arxiv.org/abs/2112.10668) by Xi Victoria Lin, Todor Mihaylov, Mikel Artetxe, Tianlu Wang, Shuohui Chen, Daniel Simig, Myle Ott, Naman Goyal, Shruti Bhosale, Jingfei Du, Ramakanth Pasunuru, Sam Shleifer, Punit Singh Koura, Vishrav Chaudhary, Brian O'Horo, Jeff Wang, Luke Zettlemoyer, Zornitsa Kozareva, Mona Diab, Veselin Stoyanov, Xian Li. +1. **[XLM](https://huggingface.co/docs/transformers/model_doc/xlm)** (from Facebook) released together with the paper [Cross-lingual Language Model Pretraining](https://arxiv.org/abs/1901.07291) by Guillaume Lample and Alexis Conneau. +1. **[XLM-ProphetNet](https://huggingface.co/docs/transformers/model_doc/xlm-prophetnet)** (from Microsoft Research) released with the paper [ProphetNet: Predicting Future N-gram for Sequence-to-Sequence Pre-training](https://arxiv.org/abs/2001.04063) by Yu Yan, Weizhen Qi, Yeyun Gong, Dayiheng Liu, Nan Duan, Jiusheng Chen, Ruofei Zhang and Ming Zhou. +1. **[XLM-RoBERTa](https://huggingface.co/docs/transformers/model_doc/xlm-roberta)** (from Facebook AI), released together with the paper [Unsupervised Cross-lingual Representation Learning at Scale](https://arxiv.org/abs/1911.02116) by Alexis Conneau*, Kartikay Khandelwal*, Naman Goyal, Vishrav Chaudhary, Guillaume Wenzek, Francisco Guzmán, Edouard Grave, Myle Ott, Luke Zettlemoyer and Veselin Stoyanov. +1. **[XLM-RoBERTa-XL](https://huggingface.co/docs/transformers/model_doc/xlm-roberta-xl)** (from Facebook AI), released together with the paper [Larger-Scale Transformers for Multilingual Masked Language Modeling](https://arxiv.org/abs/2105.00572) by Naman Goyal, Jingfei Du, Myle Ott, Giri Anantharaman, Alexis Conneau. +1. **[XLM-V](https://huggingface.co/docs/transformers/model_doc/xlm-v)** (from Meta AI) released with the paper [XLM-V: Overcoming the Vocabulary Bottleneck in Multilingual Masked Language Models](https://arxiv.org/abs/2301.10472) by Davis Liang, Hila Gonen, Yuning Mao, Rui Hou, Naman Goyal, Marjan Ghazvininejad, Luke Zettlemoyer, Madian Khabsa. +1. **[XLNet](https://huggingface.co/docs/transformers/model_doc/xlnet)** (from Google/CMU) released with the paper [XLNet: Generalized Autoregressive Pretraining for Language Understanding](https://arxiv.org/abs/1906.08237) by Zhilin Yang*, Zihang Dai*, Yiming Yang, Jaime Carbonell, Ruslan Salakhutdinov, Quoc V. Le. +1. **[XLS-R](https://huggingface.co/docs/transformers/model_doc/xls_r)** (from Facebook AI) released with the paper [XLS-R: Self-supervised Cross-lingual Speech Representation Learning at Scale](https://arxiv.org/abs/2111.09296) by Arun Babu, Changhan Wang, Andros Tjandra, Kushal Lakhotia, Qiantong Xu, Naman Goyal, Kritika Singh, Patrick von Platen, Yatharth Saraf, Juan Pino, Alexei Baevski, Alexis Conneau, Michael Auli. +1. **[XLSR-Wav2Vec2](https://huggingface.co/docs/transformers/model_doc/xlsr_wav2vec2)** (from Facebook AI) released with the paper [Unsupervised Cross-Lingual Representation Learning For Speech Recognition](https://arxiv.org/abs/2006.13979) by Alexis Conneau, Alexei Baevski, Ronan Collobert, Abdelrahman Mohamed, Michael Auli. +1. **[YOLOS](https://huggingface.co/docs/transformers/model_doc/yolos)** (from Huazhong University of Science & Technology) released with the paper [You Only Look at One Sequence: Rethinking Transformer in Vision through Object Detection](https://arxiv.org/abs/2106.00666) by Yuxin Fang, Bencheng Liao, Xinggang Wang, Jiemin Fang, Jiyang Qi, Rui Wu, Jianwei Niu, Wenyu Liu. +1. **[YOSO](https://huggingface.co/docs/transformers/model_doc/yoso)** (from the University of Wisconsin - Madison) released with the paper [You Only Sample (Almost) Once: Linear Cost Self-Attention Via Bernoulli Sampling](https://arxiv.org/abs/2111.09714) by Zhanpeng Zeng, Yunyang Xiong, Sathya N. Ravi, Shailesh Acharya, Glenn Fung, Vikas Singh. +1. Want to contribute a new model? We have added a **detailed guide and templates** to guide you in the process of adding a new model. You can find them in the [`templates`](./templates) folder of the repository. Be sure to check the [contributing guidelines](./CONTRIBUTING.md) and contact the maintainers or open an issue to collect feedback before starting your PR. + +To check if each model has an implementation in Flax, PyTorch or TensorFlow, or has an associated tokenizer backed by the 🤗 Tokenizers library, refer to [this table](https://huggingface.co/docs/transformers/index#supported-frameworks). + +These implementations have been tested on several datasets (see the example scripts) and should match the performance of the original implementations. You can find more details on performance in the Examples section of the [documentation](https://github.com/huggingface/transformers/tree/main/examples). + + +## Learn more + +| Section | Description | +|-|-| +| [Documentation](https://huggingface.co/docs/transformers/) | Full API documentation and tutorials | +| [Task summary](https://huggingface.co/docs/transformers/task_summary) | Tasks supported by 🤗 Transformers | +| [Preprocessing tutorial](https://huggingface.co/docs/transformers/preprocessing) | Using the `Tokenizer` class to prepare data for the models | +| [Training and fine-tuning](https://huggingface.co/docs/transformers/training) | Using the models provided by 🤗 Transformers in a PyTorch/TensorFlow training loop and the `Trainer` API | +| [Quick tour: Fine-tuning/usage scripts](https://github.com/huggingface/transformers/tree/main/examples) | Example scripts for fine-tuning models on a wide range of tasks | +| [Model sharing and uploading](https://huggingface.co/docs/transformers/model_sharing) | Upload and share your fine-tuned models with the community | + +## Citation + +We now have a [paper](https://www.aclweb.org/anthology/2020.emnlp-demos.6/) you can cite for the 🤗 Transformers library: +```bibtex +@inproceedings{wolf-etal-2020-transformers, + title = "Transformers: State-of-the-Art Natural Language Processing", + author = "Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush", + booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations", + month = oct, + year = "2020", + address = "Online", + publisher = "Association for Computational Linguistics", + url = "https://www.aclweb.org/anthology/2020.emnlp-demos.6", + pages = "38--45" +} +``` diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..c7986054dfce84e9e60253b38139052f6f79883b --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/RECORD @@ -0,0 +1,3309 @@ +../../../bin/transformers-cli,sha256=R6msCH788bH-_4bWmK9GgcJsiiIcz5BfrZR42wc7zF4,266 +transformers-4.40.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +transformers-4.40.1.dist-info/LICENSE,sha256=d_1HEN757DwPYiWADgI18VpCWr1KiwNVkSf814JhIEk,11418 +transformers-4.40.1.dist-info/METADATA,sha256=Dh24LVBcPQN9VxgenNOjQPbsSU0ty_YErjGeWqrR8nA,137978 +transformers-4.40.1.dist-info/RECORD,, +transformers-4.40.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +transformers-4.40.1.dist-info/entry_points.txt,sha256=kgdW_0F_tXNrWKSZXKWKeUD_LqVgcji9j7atGXve8z4,81 +transformers-4.40.1.dist-info/top_level.txt,sha256=GLBaeTo_CSdhnHvbxQ0kzpEHdlLuA_33foIogaWxntI,13 +transformers/__init__.py,sha256=I9TlFMq2iw-kseOF5e2ojv2eT-9RX_1Ppyph-Z33pVc,331699 +transformers/__pycache__/__init__.cpython-310.pyc,, +transformers/__pycache__/activations.cpython-310.pyc,, +transformers/__pycache__/activations_tf.cpython-310.pyc,, +transformers/__pycache__/audio_utils.cpython-310.pyc,, +transformers/__pycache__/cache_utils.cpython-310.pyc,, +transformers/__pycache__/configuration_utils.cpython-310.pyc,, +transformers/__pycache__/convert_graph_to_onnx.cpython-310.pyc,, +transformers/__pycache__/convert_pytorch_checkpoint_to_tf2.cpython-310.pyc,, +transformers/__pycache__/convert_slow_tokenizer.cpython-310.pyc,, +transformers/__pycache__/convert_slow_tokenizers_checkpoints_to_fast.cpython-310.pyc,, +transformers/__pycache__/convert_tf_hub_seq_to_seq_bert_to_pytorch.cpython-310.pyc,, +transformers/__pycache__/debug_utils.cpython-310.pyc,, +transformers/__pycache__/deepspeed.cpython-310.pyc,, +transformers/__pycache__/dependency_versions_check.cpython-310.pyc,, +transformers/__pycache__/dependency_versions_table.cpython-310.pyc,, +transformers/__pycache__/dynamic_module_utils.cpython-310.pyc,, +transformers/__pycache__/feature_extraction_sequence_utils.cpython-310.pyc,, +transformers/__pycache__/feature_extraction_utils.cpython-310.pyc,, +transformers/__pycache__/file_utils.cpython-310.pyc,, +transformers/__pycache__/hf_argparser.cpython-310.pyc,, +transformers/__pycache__/hyperparameter_search.cpython-310.pyc,, +transformers/__pycache__/image_processing_utils.cpython-310.pyc,, +transformers/__pycache__/image_transforms.cpython-310.pyc,, +transformers/__pycache__/image_utils.cpython-310.pyc,, +transformers/__pycache__/keras_callbacks.cpython-310.pyc,, +transformers/__pycache__/modelcard.cpython-310.pyc,, +transformers/__pycache__/modeling_attn_mask_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_utils.cpython-310.pyc,, +transformers/__pycache__/optimization.cpython-310.pyc,, +transformers/__pycache__/optimization_tf.cpython-310.pyc,, +transformers/__pycache__/processing_utils.cpython-310.pyc,, +transformers/__pycache__/pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/safetensors_conversion.cpython-310.pyc,, +transformers/__pycache__/testing_utils.cpython-310.pyc,, +transformers/__pycache__/tf_utils.cpython-310.pyc,, +transformers/__pycache__/time_series_utils.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils_base.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils_fast.cpython-310.pyc,, +transformers/__pycache__/trainer.cpython-310.pyc,, +transformers/__pycache__/trainer_callback.cpython-310.pyc,, +transformers/__pycache__/trainer_pt_utils.cpython-310.pyc,, +transformers/__pycache__/trainer_seq2seq.cpython-310.pyc,, +transformers/__pycache__/trainer_utils.cpython-310.pyc,, +transformers/__pycache__/training_args.cpython-310.pyc,, +transformers/__pycache__/training_args_seq2seq.cpython-310.pyc,, +transformers/__pycache__/training_args_tf.cpython-310.pyc,, +transformers/activations.py,sha256=EMN-kVzitS1TmltS7Kr2ROKwxW0oLbAHeAmNdDQuvu4,8177 +transformers/activations_tf.py,sha256=u2Y9dgDRgW-YbN_J-xmd05EK4p24rV8ZkzrQzpz4lCI,4689 +transformers/audio_utils.py,sha256=QhEp44hIpjSaSR3hPUKpEmyNXhgcxK8-2kd9Wt5BjdU,36788 +transformers/benchmark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/benchmark/__pycache__/__init__.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args_tf.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args_utils.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_tf.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_utils.cpython-310.pyc,, +transformers/benchmark/benchmark.py,sha256=q2Jk1RyHOtzNe7vDSVjkL9Kf1jkMiGZsJPDmsACnxxY,10752 +transformers/benchmark/benchmark_args.py,sha256=djFAjBC11MnI-auxByCWSVVAqRqXGV650Leosd60VmA,4050 +transformers/benchmark/benchmark_args_tf.py,sha256=bAcsgf7bOUyoo8AGFSiQhciR8S5wMJqnL5iVlvbQzow,4735 +transformers/benchmark/benchmark_args_utils.py,sha256=pkgvor3IuC5v9BubOCFVuwbgGHsoGkNp1CDdgJlyBi4,6499 +transformers/benchmark/benchmark_tf.py,sha256=aEjclKepsQhn6vjxVJ5l2ho0ptUJuvaSYfuP4rJE6MQ,13251 +transformers/benchmark/benchmark_utils.py,sha256=f9fv_EF1GwfK6A9wS6O-AYDrjI_cBflTbffL32iFTY0,37600 +transformers/cache_utils.py,sha256=KktrOY-OqGMUUKhxsKG7xrlyIUiXBkjGeHRaBlc78Yw,20155 +transformers/commands/__init__.py,sha256=aFO3I7C6G9OLA9JZSc_yMaZl0glOQtjNPjqMFfu9wfQ,923 +transformers/commands/__pycache__/__init__.cpython-310.pyc,, +transformers/commands/__pycache__/add_new_model.cpython-310.pyc,, +transformers/commands/__pycache__/add_new_model_like.cpython-310.pyc,, +transformers/commands/__pycache__/convert.cpython-310.pyc,, +transformers/commands/__pycache__/download.cpython-310.pyc,, +transformers/commands/__pycache__/env.cpython-310.pyc,, +transformers/commands/__pycache__/lfs.cpython-310.pyc,, +transformers/commands/__pycache__/pt_to_tf.cpython-310.pyc,, +transformers/commands/__pycache__/run.cpython-310.pyc,, +transformers/commands/__pycache__/serving.cpython-310.pyc,, +transformers/commands/__pycache__/train.cpython-310.pyc,, +transformers/commands/__pycache__/transformers_cli.cpython-310.pyc,, +transformers/commands/__pycache__/user.cpython-310.pyc,, +transformers/commands/add_new_model.py,sha256=H8_UkJ8TYyC8sEMqE4Iu-Izq3lJi93l813oU-LI2XyY,11062 +transformers/commands/add_new_model_like.py,sha256=kI87CNAiy_WPsOMJopWmS3ZHQtw95ULORRCJcFRDcww,70870 +transformers/commands/convert.py,sha256=lHz2sQti9HubMNwObLCc_sw9Y7L-IPcaYJMSJR_AVWM,7068 +transformers/commands/download.py,sha256=GKPadx-YGBL7dHJSEcUp-QNOP3R2L71-gPGP0z6NNQI,2395 +transformers/commands/env.py,sha256=q21O011lwdgGX862xAxH1Pjhd53uuxgB3g6C8cfNGV4,5316 +transformers/commands/lfs.py,sha256=4QDGBbJxBcRpgmhHXvigZQUsXuTPwrRY60t1qGjzfWU,8001 +transformers/commands/pt_to_tf.py,sha256=qVXHzdjjik3n_y8Ci8A6Wg6ag0eX0T6Dj36-sSv18Xg,20540 +transformers/commands/run.py,sha256=nyEe2lOoj6e0EOxjKeF08hdW9WVWa101r9hWXl9v3Jo,4249 +transformers/commands/serving.py,sha256=CnNHFVM_SK_-aNxEJnq7vJK5dBqDBw7bxxQiv5truEU,8027 +transformers/commands/train.py,sha256=FKlH-IYr3mVc7_mS5ObCyJaHs9JincYLg3Zt6WQz1ag,6341 +transformers/commands/transformers_cli.py,sha256=QimzKwJXAzZ9da0NDFrupqnATqP8MQ7upoj9TspwnKA,2047 +transformers/commands/user.py,sha256=t35-l945UBen5uYR_KsbhtNOqdHXrfdpHrhTbR3-YXc,7124 +transformers/configuration_utils.py,sha256=P3_uwVe3AUONwXqEYMuhFPxosvPEnGs6H9M4r9FmM0c,56494 +transformers/convert_graph_to_onnx.py,sha256=rJmIK0Rs5WPsOiRGWmgN9q4A5W5gqUDB7OmcRkTqvJY,20151 +transformers/convert_pytorch_checkpoint_to_tf2.py,sha256=bxCJ6CUpi63x9K-Hr2up0W0VPIEhuuceM0siBse8brs,14655 +transformers/convert_slow_tokenizer.py,sha256=Prb4ZdYi1_Gw6HVxPgPGqSfEIVdCA2UNuZEtbuDgUPM,56210 +transformers/convert_slow_tokenizers_checkpoints_to_fast.py,sha256=mIX3e0r7Dci5lahBf0iO4C2rvj0OzwkJbmw5lmgiG0Q,4982 +transformers/convert_tf_hub_seq_to_seq_bert_to_pytorch.py,sha256=so9OnNT3TmdTbRMGbuepLY0zCMNfB6huaLg38aDVWOU,2911 +transformers/data/__init__.py,sha256=JWIY7GLKedWilK2mpd_qtVeXLQK2ZXki6ISkRUua09Y,1423 +transformers/data/__pycache__/__init__.cpython-310.pyc,, +transformers/data/__pycache__/data_collator.cpython-310.pyc,, +transformers/data/data_collator.py,sha256=EQgDVvrLxXzDZqoMAHwVd6wkFMf0pjdCYERwlEb_L-w,78254 +transformers/data/datasets/__init__.py,sha256=PGzUJjdmTPOPMyjV4-Tj3sNrmmh-lspjyxrVbrfJoX8,909 +transformers/data/datasets/__pycache__/__init__.cpython-310.pyc,, +transformers/data/datasets/__pycache__/glue.cpython-310.pyc,, +transformers/data/datasets/__pycache__/language_modeling.cpython-310.pyc,, +transformers/data/datasets/__pycache__/squad.cpython-310.pyc,, +transformers/data/datasets/glue.py,sha256=K3h2KxjIg0kWegPCw6ikbOL-lCFbKoQewb7R8wLZoIc,6163 +transformers/data/datasets/language_modeling.py,sha256=E-VGwuyb09J4KmV8v37bNH5in90wDPuZHCYsqGdT7W0,23721 +transformers/data/datasets/squad.py,sha256=OUTQDd687SQns7HRWDCgAjnuo_ZXihifLS6jF2bhUhc,9219 +transformers/data/metrics/__init__.py,sha256=o9t_VTQtqU3lEhqvocDzFMm7OvAKD-uxrjPWy0r74BI,3632 +transformers/data/metrics/__pycache__/__init__.cpython-310.pyc,, +transformers/data/metrics/__pycache__/squad_metrics.cpython-310.pyc,, +transformers/data/metrics/squad_metrics.py,sha256=pMwqcTg9KnCvmhLzAy1VJHRgJOEx6lLD105d-JcnWfg,29698 +transformers/data/processors/__init__.py,sha256=lvN5mp9mdrr5v6QvZT6VcoZ78zZUvXiumTm6Gdvlgvo,1014 +transformers/data/processors/__pycache__/__init__.cpython-310.pyc,, +transformers/data/processors/__pycache__/glue.cpython-310.pyc,, +transformers/data/processors/__pycache__/squad.cpython-310.pyc,, +transformers/data/processors/__pycache__/utils.cpython-310.pyc,, +transformers/data/processors/__pycache__/xnli.cpython-310.pyc,, +transformers/data/processors/glue.py,sha256=hhY12jdX1WnZ3_E3vSv-0rmF53F56c_2gQeW8dTwYb4,23219 +transformers/data/processors/squad.py,sha256=_4WNLcZA6TAy7uNZO46948tmL5ngVF0LSB0y8nUn6rs,33153 +transformers/data/processors/utils.py,sha256=GSaZbJ--XYq57vqyRVx_5LHSR4tklzFyR7ZKHGWsTAs,13829 +transformers/data/processors/xnli.py,sha256=i03-c8vaQVYKpR7r4B8PsF6_CXXHxB7N-YHdzxs-APU,3489 +transformers/debug_utils.py,sha256=6q8ArB104GdcIC2qfBQzKLxO7PfXmHEKdYtfL2FOK2w,12907 +transformers/deepspeed.py,sha256=6C1uUQ84ImJPYu3WqZ-o6uOGPa7IHzD0MkP7DgnQxJY,1478 +transformers/dependency_versions_check.py,sha256=6HbgtT2Wp-QZGOAdyUOklHvNA4rOVITGHrX34dtMOqg,2115 +transformers/dependency_versions_table.py,sha256=cGScoQVuynb1ell9UhMZAaebjVrwEiUPCQ9yAobKUxE,3182 +transformers/dynamic_module_utils.py,sha256=oG4PmP0MBRal3PhhOYaw2nWroucDhawU77s0vgdKUbM,27468 +transformers/feature_extraction_sequence_utils.py,sha256=dPKvTC29tNn8xK_dxZSeDbhNRK2s8VHu2EZIEKesEAs,18307 +transformers/feature_extraction_utils.py,sha256=XaRKR3ez3AyK67ntVMsBTHUPdvv5p7YLF9vk7SvrZMM,29527 +transformers/file_utils.py,sha256=qI7cWTYpFy0v9HZSRBASv2yvD2U1OJgYShIOsQ7cCUg,3744 +transformers/generation/__init__.py,sha256=Ox2TuVV2Eg-5Lir9UztnDLf-lyYhh_RevPAtIQbkajs,11214 +transformers/generation/__pycache__/__init__.cpython-310.pyc,, +transformers/generation/__pycache__/beam_constraints.cpython-310.pyc,, +transformers/generation/__pycache__/beam_search.cpython-310.pyc,, +transformers/generation/__pycache__/candidate_generator.cpython-310.pyc,, +transformers/generation/__pycache__/configuration_utils.cpython-310.pyc,, +transformers/generation/__pycache__/flax_logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/flax_utils.cpython-310.pyc,, +transformers/generation/__pycache__/logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/stopping_criteria.cpython-310.pyc,, +transformers/generation/__pycache__/streamers.cpython-310.pyc,, +transformers/generation/__pycache__/tf_logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/tf_utils.cpython-310.pyc,, +transformers/generation/__pycache__/utils.cpython-310.pyc,, +transformers/generation/beam_constraints.py,sha256=GefqriO2jWruyhdZI9pyGz4yZ-W9AYmzZueSWITgok4,19105 +transformers/generation/beam_search.py,sha256=d6ZduwortYoRu6d0uCWfz1ivHqeQAxdA_lDrRA0kUOU,48812 +transformers/generation/candidate_generator.py,sha256=uyroHZgYZAD3zxat1XYH_dvD7nDql1c4xei5KcYPDXQ,20792 +transformers/generation/configuration_utils.py,sha256=vatVMXQvHtDOQW_4gJ7jrADmvbE8yaADlsZaJp0pUsc,57916 +transformers/generation/flax_logits_process.py,sha256=JBFbiYJANPD_2LeY2hghPHfrerBwr4ZEGds4kcmaS1k,23005 +transformers/generation/flax_utils.py,sha256=x4J5blTRgFVp8o0lK-UvjOYzpeTP54kdy5m5eK8apzQ,50078 +transformers/generation/logits_process.py,sha256=XFBNkklG1DhG-Py1rffCj0WnnnO7fgn7h0Dd3WEXAw0,106675 +transformers/generation/stopping_criteria.py,sha256=jMkMaMeBKO7Vw-Cn8or1kd7PlRsNV-pwnBRPcU2fZZs,8590 +transformers/generation/streamers.py,sha256=ArJCKAVRKIKALqdGBAsQu038-BwZbo05tzOXZWP9yng,9213 +transformers/generation/tf_logits_process.py,sha256=ZsIBDrFJ3egkk8aWYKtCvqH4M7INnlBa2zoCAIT5MR0,28114 +transformers/generation/tf_utils.py,sha256=dUFykUJNLGm5gYMadkcJgoHK5y1zw2pCa3Vm0HcdRbI,175623 +transformers/generation/utils.py,sha256=sO4vvRA950NtMdxcPRA0_2w4rLdpQYidV6-OrtM2bSM,271310 +transformers/hf_argparser.py,sha256=t6EC7gJ6yWJPCDScgrppfgXOAkjZxEJJO6pe8W-aK_0,19823 +transformers/hyperparameter_search.py,sha256=wmfAWk_NTUQj3MezO_6CaDaJyUt9pbARcs-tbo_BdeM,4171 +transformers/image_processing_utils.py,sha256=YrlSb_pIVAneGj-YaKjAiO6h5XSGIfQ93biaGCnXT-k,36375 +transformers/image_transforms.py,sha256=q1pV5pPS32f9i5m2NXObPp50nqAMiARQduiuvHgCruY,34203 +transformers/image_utils.py,sha256=o2ajRGC2m0wQsu1UljF0d2l_TJ_G0RQtglI2lTHC-0M,30007 +transformers/integrations/__init__.py,sha256=fyK711qayQzwCj1pXHOgDi3aBFWaLThXWrV1bQkbAVc,4832 +transformers/integrations/__pycache__/__init__.cpython-310.pyc,, +transformers/integrations/__pycache__/aqlm.cpython-310.pyc,, +transformers/integrations/__pycache__/awq.cpython-310.pyc,, +transformers/integrations/__pycache__/bitsandbytes.cpython-310.pyc,, +transformers/integrations/__pycache__/deepspeed.cpython-310.pyc,, +transformers/integrations/__pycache__/integration_utils.cpython-310.pyc,, +transformers/integrations/__pycache__/peft.cpython-310.pyc,, +transformers/integrations/__pycache__/quanto.cpython-310.pyc,, +transformers/integrations/__pycache__/tpu.cpython-310.pyc,, +transformers/integrations/aqlm.py,sha256=wpVq2OAdGDMTywT-_rpH6vpRQEhUH4hLTi13jACFDCg,4462 +transformers/integrations/awq.py,sha256=vA5OO8hjOL1LxTnCDFBnZGLUe8HgcH51O5WQ8eNYMek,18380 +transformers/integrations/bitsandbytes.py,sha256=i0oRUxEbpqlsFA3-3K0G2A_mCPeZ9zFZINloQvr-jZE,15305 +transformers/integrations/deepspeed.py,sha256=toOGbMcsGg_TACcGQ5dEqjS9x34Ix-7uhjhelC2dxHA,18703 +transformers/integrations/integration_utils.py,sha256=gMUtD9L91UScWCjvUqmp9STBlmCTBLKMzoyL8mQLW30,85781 +transformers/integrations/peft.py,sha256=_1zABToVWSH9U7XoPG5cJVmAT_5jbSbMDUADHvGiAXE,22620 +transformers/integrations/quanto.py,sha256=VR7GV9KG6mFweixYDaUdhYzfGoLZVurwXWU24Idg32w,4250 +transformers/integrations/tpu.py,sha256=Y8YMwIrEgh1s-OCNbOQZFD1_3Tvqpo3e1H6eECTceSU,1392 +transformers/keras_callbacks.py,sha256=i95nrEd_QsEo10x3T9RqZf3xGzfPiMOhmU1Ef_HvnGE,20675 +transformers/kernels/deformable_detr/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deformable_detr/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.cu,sha256=l7UQ6zn1qbeve1meY0QLq2RKk3X6fGpp2UfKt4aEYJ4,7466 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.cuh,sha256=HD7bMWLoGrDKw7XUPPgILCAdOSo1IC8RIv_KyKAnLb0,61539 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.h,sha256=xxP17aer-SiU9J5ASLHdtLIyhFmHC5iLcPIPNW2xkrg,1694 +transformers/kernels/deformable_detr/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deformable_detr/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deformable_detr/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cu,sha256=M5-bW9g5z-upTFMNPIfnyLAqKTxGMCjAPqBr0GmWHX8,7360 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cuh,sha256=hygB20Vh3RttOSdCuTFz8V0d3CXNp-Q89x22rYmD258,61433 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.h,sha256=rPWOOMo3QyFdB5kMiexpApLFZ4dnRtx4CluEAGwsfO8,1139 +transformers/kernels/deta/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deta/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deta/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/mra/cuda_kernel.cu,sha256=LxxRYTymSoBEQpWXHA0PMzwZwpolcwX7mFAjwU8-ZMc,11678 +transformers/kernels/mra/cuda_kernel.h,sha256=UJvYq_MDzhcp07bZpYcOBn8ZGFcf_Ax1dynuiVTBvmA,1682 +transformers/kernels/mra/cuda_launch.cu,sha256=Ox5MTACriC30CGyn-g1Kb5EgQSMAZSaN6fpit3xLFWc,4072 +transformers/kernels/mra/cuda_launch.h,sha256=RVCkN_euasvgPK0zADNRvRYGWd4ah5l9X-7UG_AcdH8,707 +transformers/kernels/mra/torch_extension.cpp,sha256=N0YdBLVX0lZabckJzV_RYTHS2atCNvn13E4Ivobt25g,1405 +transformers/kernels/rwkv/wkv_cuda.cu,sha256=EvaUrEnw_qr2EjMKP-Pq7VPzFfGlMJnFhdHNLtn1fPU,6219 +transformers/kernels/rwkv/wkv_cuda_bf16.cu,sha256=DG9hTtOAlrnpDFahjt-MmnOxjMuhGU55GPsmV21HtrQ,6633 +transformers/kernels/rwkv/wkv_op.cpp,sha256=qSExhKdT6p3hyaTv5SypCnH_c7EmaX6HbhTcCntvZWg,4022 +transformers/kernels/yoso/common.h,sha256=Tq2rOUtE8Y4DRAUrRISvwIwVI3u8JBf21WgWSAYiDlQ,273 +transformers/kernels/yoso/common_cuda.h,sha256=Sji70AuVcuZSotLF7Gotmun9MJuOHo8wEkxizKXLRtc,258 +transformers/kernels/yoso/common_cuda_device.h,sha256=y6WUgAiapnMKqthRMS5s-DMSWNVkar_i8g4KPFvqiuk,2063 +transformers/kernels/yoso/fast_lsh_cumulation.cu,sha256=LA4LGNgyXT3osIyQtFBcRanSyNQWm8yqmpz7AeLP7cw,19061 +transformers/kernels/yoso/fast_lsh_cumulation.h,sha256=1cTWZjOm751HGiEB5P-UPJ8SE1VO7XRyXmBgyxYDyjI,1575 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.cu,sha256=HKGLWl-WFz5BXjaAPHTNTbG6IUkJjhBdvFf2K7hrDVQ,32870 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.h,sha256=_KGI8HQbVFtCN5KAcSGpyiJ2foGi26RKen138CUc2fY,5490 +transformers/kernels/yoso/fast_lsh_cumulation_torch.cpp,sha256=-Rh7o39Z3rtOPwNnEM-c51TCqywpVdK0WVaA7VRrXbQ,3154 +transformers/modelcard.py,sha256=zeGRoH_h9x3BNmXiG_YhZ69pCxp8YSgzt2tMooaszGQ,35155 +transformers/modeling_attn_mask_utils.py,sha256=-hc3S9YP3oQb2NZd4HLQhVBSQ8yHXYgphfzTYizwjls,22276 +transformers/modeling_flax_outputs.py,sha256=wXse1g9VyQyVOZ9DrbPALeoZBdS45fsBA9fNrGnwaZc,41961 +transformers/modeling_flax_pytorch_utils.py,sha256=UL5zridIWWbmo5vZ6uVoRcF6kIuEN4jthQ4q8uRKgRQ,21886 +transformers/modeling_flax_utils.py,sha256=UCYFom8AM-0nN0o6jwheWrXEVs9nGmtYTrb0_3q6kBs,61404 +transformers/modeling_outputs.py,sha256=CYpjijqZNOVUc-kixDLI-jMFru9MhpDQvnncSfp0wb4,112567 +transformers/modeling_tf_outputs.py,sha256=nXCMOmFZ7IZFVuiQr7EU2ciV9QqwOYPYld_r2jBxVpE,56074 +transformers/modeling_tf_pytorch_utils.py,sha256=5V6hH7KnfdP1f-R8s09GEyOKXSuoZVwDISxPmFZnhjw,27908 +transformers/modeling_tf_utils.py,sha256=muR9u6vciEekJG939anukWeBROWD6YeAkfUZEwtqnDM,166736 +transformers/modeling_utils.py,sha256=upsOIqBVhgHmVpHfsQIL492QL4dbwgcjHUmL6IBSxlE,237486 +transformers/models/__init__.py,sha256=G2S4oftKfd2YTDX4Q8QcGC1ISDnw6-SdVMTjKzx6eL8,4222 +transformers/models/__pycache__/__init__.cpython-310.pyc,, +transformers/models/albert/__init__.py,sha256=eXW8msH9V8No-Tb5R28tdpXQbOnnSG77L_TVEwCRf9o,5482 +transformers/models/albert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/albert/__pycache__/configuration_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/convert_albert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_flax_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_tf_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/tokenization_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/tokenization_albert_fast.cpython-310.pyc,, +transformers/models/albert/configuration_albert.py,sha256=orosaIXnT6NdUCmKUv9J3BPByewA1BKe8OD15DLoMWA,8184 +transformers/models/albert/convert_albert_original_tf_checkpoint_to_pytorch.py,sha256=nTwtVg0AZgG4QnG9K361HM37gxGegQvD-ymZWuhic7s,2162 +transformers/models/albert/modeling_albert.py,sha256=PhiNBl2jRMs8OJdhL3Go8Nx2ry5qKeYHvIqd9X9f-bg,60519 +transformers/models/albert/modeling_flax_albert.py,sha256=u2EEkckxVFt5WA8oQNbLJGcV5mhHGIJ6DMS867O150U,40739 +transformers/models/albert/modeling_tf_albert.py,sha256=CYfemwBDzPw9fjsy36jrmAjJMI7yzBFsiF8RBOdfjJg,68950 +transformers/models/albert/tokenization_albert.py,sha256=ysc5uU3xkgZFpAS4EGyYjEvfw9MHy6bx4kbsIfL-fnE,14423 +transformers/models/albert/tokenization_albert_fast.py,sha256=FhSf6cK3YdDHdHJqtar_mMPeSkoOCCEJouFMl6jWwwU,8832 +transformers/models/align/__init__.py,sha256=DWtMJsXbmRuoSAwLLOy6aXKY65IT1TDV4ifwBmApkM0,2064 +transformers/models/align/__pycache__/__init__.cpython-310.pyc,, +transformers/models/align/__pycache__/configuration_align.cpython-310.pyc,, +transformers/models/align/__pycache__/convert_align_tf_to_hf.cpython-310.pyc,, +transformers/models/align/__pycache__/modeling_align.cpython-310.pyc,, +transformers/models/align/__pycache__/processing_align.cpython-310.pyc,, +transformers/models/align/configuration_align.py,sha256=a8fS9v05sS50o11bk0aRY-RD3FbhXYqpElyew61WwWw,18194 +transformers/models/align/convert_align_tf_to_hf.py,sha256=tzPoEMyLV_ckVngYdvJ6uAFZ6RgsuX55JYjEkIMtPTg,15536 +transformers/models/align/modeling_align.py,sha256=Z27Ux-Mym96AbnHZoiZPcQhG0DKnSHz71NPJi5PiQXs,71777 +transformers/models/align/processing_align.py,sha256=nd9rZAv_C3Xz7Zbv3SBkmCy-pZ6ChimNzEBf33zzdrM,6110 +transformers/models/altclip/__init__.py,sha256=bvOH6rQhnWm4shjpJ51SPs0uxlDdPrViBxQqTt3gRik,2126 +transformers/models/altclip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/altclip/__pycache__/configuration_altclip.cpython-310.pyc,, +transformers/models/altclip/__pycache__/modeling_altclip.cpython-310.pyc,, +transformers/models/altclip/__pycache__/processing_altclip.cpython-310.pyc,, +transformers/models/altclip/configuration_altclip.py,sha256=cDETvjuzn-FFtHMIZMACD-HYCRYB8eCnniWdKkLQxj4,19801 +transformers/models/altclip/modeling_altclip.py,sha256=4VuL9K0aaKbigpYaqRmRC_4l05mDR-lEyyNG1rIv9ww,78250 +transformers/models/altclip/processing_altclip.py,sha256=LHCFcwZHPlRnVYzIONbEtm60ZRHzJeTQq2o7akvAM_g,6396 +transformers/models/audio_spectrogram_transformer/__init__.py,sha256=-LyBP9am8Di97o7CZupQyqD1-2bYHKLcUqVWTZBHVs8,2159 +transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/configuration_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/convert_audio_spectrogram_transformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/feature_extraction_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/modeling_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py,sha256=xEHkE6ckUKtuSLQrhSnmh8wVTu0oDsf_zBVxFYscP1Y,5549 +transformers/models/audio_spectrogram_transformer/convert_audio_spectrogram_transformer_original_to_pytorch.py,sha256=Csn0NnGlPMLUehRWvgU1cW49EzTNZ7p0COxWNIqQIp8,11052 +transformers/models/audio_spectrogram_transformer/feature_extraction_audio_spectrogram_transformer.py,sha256=CLMcdUUk8ehA2PC9wEBwvWd68tIMFtZswNhVbVwXWc8,9908 +transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py,sha256=FRTHbROpF95XpMUfzrucRZ1_RWYSLSEXd5Blf2EzKq0,25924 +transformers/models/auto/__init__.py,sha256=pkAEEIEmLLFzRM_jTAP42u15RL8dwmJc4xY2op7NwPg,16840 +transformers/models/auto/__pycache__/__init__.cpython-310.pyc,, +transformers/models/auto/__pycache__/auto_factory.cpython-310.pyc,, +transformers/models/auto/__pycache__/configuration_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/feature_extraction_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/image_processing_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_flax_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_tf_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/processing_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/tokenization_auto.cpython-310.pyc,, +transformers/models/auto/auto_factory.py,sha256=m9d07NNVM9JDRHyRCsi4q5DhwgbNvrhIozsbSa948-k,43218 +transformers/models/auto/configuration_auto.py,sha256=mB9qTmRzBjXePSpnpwq8CK7PfxbzlQS2ppT4wYb_zwg,38372 +transformers/models/auto/feature_extraction_auto.py,sha256=YfgaeHgaDj8qtVEHSLdq9Xjit6c_O5c1wO_pRykwGrY,19509 +transformers/models/auto/image_processing_auto.py,sha256=IpCXZv1iwq2hAQLYdbE1bey1ASOdkD8mEGtoTl7e2vE,21868 +transformers/models/auto/modeling_auto.py,sha256=nePbz4WQBdzaaQB9lmX6Adi54VGndxuvPk9PGaYoCbk,68407 +transformers/models/auto/modeling_flax_auto.py,sha256=WKcWOmDTq2kwtFYGHccSyV3o8yUtvHlCgVRlh_5K2OI,14475 +transformers/models/auto/modeling_tf_auto.py,sha256=fB3ufe0eyB2DzDupxt_EBfDUybgUz3HdT6qhF7DAUu8,28077 +transformers/models/auto/processing_auto.py,sha256=0u7tVknEh1L6WIyY_ztD5Zi-6K4E8YP3KbGQBKloCZg,17056 +transformers/models/auto/tokenization_auto.py,sha256=kLADVhqr2UbmfMBvJuOixPwFS-0N7EOBYCrvcNfpDHk,46856 +transformers/models/autoformer/__init__.py,sha256=wNFDMEr-Yo9Bt33bP5qqiC5dWKXOnWQPFg4C_ewyfGU,1914 +transformers/models/autoformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/autoformer/__pycache__/configuration_autoformer.cpython-310.pyc,, +transformers/models/autoformer/__pycache__/modeling_autoformer.cpython-310.pyc,, +transformers/models/autoformer/configuration_autoformer.py,sha256=j1rKPbxRBrljEb0k9f0LrcW56s98dadHTWfd__cbTlU,12244 +transformers/models/autoformer/modeling_autoformer.py,sha256=vs6vneh5RLDK4pC6s2BrkoWmIzl0hZCaGYGPOhyEOTQ,108848 +transformers/models/bark/__init__.py,sha256=o6hWj_LrFLp-JSNY04tbWewQyrA44B0mhLUDpyv4jVw,2212 +transformers/models/bark/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bark/__pycache__/configuration_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/convert_suno_to_hf.cpython-310.pyc,, +transformers/models/bark/__pycache__/generation_configuration_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/modeling_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/processing_bark.cpython-310.pyc,, +transformers/models/bark/configuration_bark.py,sha256=nDioA5tQVccq1raX9PUewVvbAbVGXMpOlAY9XUpE6Fk,12836 +transformers/models/bark/convert_suno_to_hf.py,sha256=O1OYzKyTr-9snPYUAw09GmVwb76UmiQGi3C2WfEIwTw,9373 +transformers/models/bark/generation_configuration_bark.py,sha256=80ZI8x5r8JH26siXfm_c8NkuaRTUUzcxiMrtfIKDoSg,14992 +transformers/models/bark/modeling_bark.py,sha256=DKdikVinEBLfbcNvr2OoRdx3rgGEbGdBLB92xSWlTms,86628 +transformers/models/bark/processing_bark.py,sha256=PgoptE_6V_ESvgXhGrRfVa68pTjJHXv1j9YwV24W9HA,13312 +transformers/models/bart/__init__.py,sha256=FH8iETt_U4YAIIjo-Oap-WtQsBZqsaxGr9028KnrDEQ,4397 +transformers/models/bart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bart/__pycache__/configuration_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/convert_bart_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_flax_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_tf_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/tokenization_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/tokenization_bart_fast.cpython-310.pyc,, +transformers/models/bart/configuration_bart.py,sha256=TDtjKSQs7UaY6TZFDDhTqIbdqXa-lTdtNrm2LWikJv0,18783 +transformers/models/bart/convert_bart_original_pytorch_checkpoint_to_pytorch.py,sha256=VIRm-jWP4PNWN0Japr8yCJAJAAPVkJpJmEzYnHexU88,6055 +transformers/models/bart/modeling_bart.py,sha256=iPA4Z8NMV5OLezna3y8BTzCAm9-a6AjTkP-Gzf3zJc4,109215 +transformers/models/bart/modeling_flax_bart.py,sha256=JH4YXctmpkynng1wP-50Vn4t8vEuhEmFfsfQZu1-lFI,82707 +transformers/models/bart/modeling_tf_bart.py,sha256=SCaGH910Egz8gtbPF8Kg38uTG5KwPilRQTG4CMLvTaU,80773 +transformers/models/bart/tokenization_bart.py,sha256=fqHZCAZwmM4QoFAk6unDGwrbXtZkV1kPWMsVIu7HrPg,16250 +transformers/models/bart/tokenization_bart_fast.py,sha256=h-DIyLB7ii7QjTUFF78nU8e5fPVEMUCXaPKkHqMI71E,11723 +transformers/models/barthez/__init__.py,sha256=7IXg6okZoJ10NCYRWn0GvoWWUvGUN27eIw7CzJ5CVGA,1848 +transformers/models/barthez/__pycache__/__init__.cpython-310.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez.cpython-310.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez_fast.cpython-310.pyc,, +transformers/models/barthez/tokenization_barthez.py,sha256=i_gcpfOaZ8IY9l5kqNMhChOo336CEyJ2i7ldKn3e_kg,12066 +transformers/models/barthez/tokenization_barthez_fast.py,sha256=HQ-622iS3S_XjkSTWyx-gfMhpFAOfHe7POWqqnALrhA,7838 +transformers/models/bartpho/__init__.py,sha256=Q0mAOPJGQaHHigdajLg5-2TPOw9NWw5uIRQlmfhh8Ds,1362 +transformers/models/bartpho/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bartpho/__pycache__/tokenization_bartpho.cpython-310.pyc,, +transformers/models/bartpho/tokenization_bartpho.py,sha256=1qUC54eDZVK7cpBqlkFSM5xM8c6JCFBPKXxFMPKitC0,13525 +transformers/models/beit/__init__.py,sha256=T88Lwe4Y0tQmdrOpVnewjuHJoW_DZEbRmbTZDU2oAR0,3339 +transformers/models/beit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/beit/__pycache__/configuration_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/convert_beit_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/beit/__pycache__/feature_extraction_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/image_processing_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/modeling_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/modeling_flax_beit.cpython-310.pyc,, +transformers/models/beit/configuration_beit.py,sha256=Gmv_5upSDT5CQV_fFqa5R1pJztvBWdpxCSEjZLjUS9A,11698 +transformers/models/beit/convert_beit_unilm_to_pytorch.py,sha256=CndMgSTJoOik5LPH3YVLnQ6IR7IqfCsEN0KPUR43jHA,16578 +transformers/models/beit/feature_extraction_beit.py,sha256=C9wchKLt3K__wzqOkDWsbK0hMPzVn9HZtm5KPI5Oq2s,1172 +transformers/models/beit/image_processing_beit.py,sha256=y83OF3kyhKd6ODDq2IkZAyZhGNw7SzBhvz5v0k7U37U,25074 +transformers/models/beit/modeling_beit.py,sha256=67Sc74br06LoRjUnREs1bWdQdvQl6JSL1CXnBCNhBzU,59791 +transformers/models/beit/modeling_flax_beit.py,sha256=9_xkFN7xtiLrxbShhpX8EgpY8kuOKIui-OlRidmNUAI,36996 +transformers/models/bert/__init__.py,sha256=Tj3tueT-1FoWBmNNZXGGnytzeoLeEcjviP32uyfU1rw,6057 +transformers/models/bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert/__pycache__/configuration_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_original_tf2_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_pytorch_checkpoint_to_original_tf.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_token_dropping_original_tf2_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_flax_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_tf_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_fast.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_tf.cpython-310.pyc,, +transformers/models/bert/configuration_bert.py,sha256=7NgpZT7tKTkmYNrewwywV36M8vWmvHiAJV0D9MJk7O0,7340 +transformers/models/bert/convert_bert_original_tf2_checkpoint_to_pytorch.py,sha256=niQmTMwlmUA0aII1Zzg2OiJSpFljzwLCeJYotJ4tKOY,10490 +transformers/models/bert/convert_bert_original_tf_checkpoint_to_pytorch.py,sha256=Hq-TMOnQnfpZOh0m9GHoykkogg0-HgLAmSiFvK8E6K4,2159 +transformers/models/bert/convert_bert_pytorch_checkpoint_to_original_tf.py,sha256=6nISsCdgO_sJFFiLpnkGGsmTqC9Yp-gzDPDM-EafVXA,4112 +transformers/models/bert/convert_bert_token_dropping_original_tf2_checkpoint_to_pytorch.py,sha256=5kYqUUc-RGck4D0OUTlLDnyIPb_OIJ1NWboYRJ-7H0c,7606 +transformers/models/bert/modeling_bert.py,sha256=Hw94jzZlKj33oYz1_VFCJ5s4-M0fuM2QfxH9jHhd7Rg,83243 +transformers/models/bert/modeling_flax_bert.py,sha256=UMRUMxvvwu8oIzkLfVjXWP9Y47WolZPtZFELypsG-pg,63672 +transformers/models/bert/modeling_tf_bert.py,sha256=wgt4VwesdaX5kXwSuBCLMTA1nqAEUxmANmEmDygSlo4,94392 +transformers/models/bert/tokenization_bert.py,sha256=5PvCwO7TdeghUBTOMEoBEiQ2WEawchcHLHrRykJaGjI,20528 +transformers/models/bert/tokenization_bert_fast.py,sha256=FoAutpMtmt_D77Z82RtBcttl8Cl5P2Rdt_HFIKUT2m8,7652 +transformers/models/bert/tokenization_bert_tf.py,sha256=1zWzz3FPrh5zWqRG7YVY_wIVCzzB8iNGR6MGx48ke3c,11895 +transformers/models/bert_generation/__init__.py,sha256=2XUvSVePne5Hspjzn6l_PonKfZ9WXjRBub9bevOv8R4,2275 +transformers/models/bert_generation/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/configuration_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/modeling_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/tokenization_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/configuration_bert_generation.py,sha256=DIEAcuNI_Ufp7hPLN-nDuvJLYDYgr9gNphiroKv-4qY,6342 +transformers/models/bert_generation/modeling_bert_generation.py,sha256=XwCC1kp-Sr2QssLGXpH4wds3Y8J80Xz8MNHHj2_w9j4,48087 +transformers/models/bert_generation/tokenization_bert_generation.py,sha256=jOLb4GKOuFKlqxBDgtJ3Ii4iH3ovUGONeoUCwEHElck,7078 +transformers/models/bert_japanese/__init__.py,sha256=6prQNXS2J4cWXqAqkqDyxNmzx-vaFQtOjJQio-ZUc4g,1053 +transformers/models/bert_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert_japanese/__pycache__/tokenization_bert_japanese.cpython-310.pyc,, +transformers/models/bert_japanese/tokenization_bert_japanese.py,sha256=tc6yQ6PwwyXpwsgQC61VePBfSQ-GwwG_eLiT91o-gCo,39028 +transformers/models/bertweet/__init__.py,sha256=sXE2NweoWp8UIaJkuSaLSw4EaSEzpWwBe3pegec_Kj0,959 +transformers/models/bertweet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bertweet/__pycache__/tokenization_bertweet.cpython-310.pyc,, +transformers/models/bertweet/tokenization_bertweet.py,sha256=lC_rUAk5FTs4tgDyIjBoe6Eel_DlrsV3GW3pa7hnE9I,26988 +transformers/models/big_bird/__init__.py,sha256=XaBDMkK9Dhqc9pVSqqn2xFCNYInFMsBpPOP8GZ0F04Q,4574 +transformers/models/big_bird/__pycache__/__init__.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/configuration_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/convert_bigbird_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/modeling_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/modeling_flax_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird_fast.cpython-310.pyc,, +transformers/models/big_bird/configuration_big_bird.py,sha256=9go6AGwK90fiINxUr2QVpRa84NmdtBMdE1yRaQdeIFE,7932 +transformers/models/big_bird/convert_bigbird_original_tf_checkpoint_to_pytorch.py,sha256=Y75oSwtX-d2wwOSwLo6LlUlZ9uzSEVtWwzwiJYcrXyg,2493 +transformers/models/big_bird/modeling_big_bird.py,sha256=mwU2ApXuiKEQ4pGw41M9FTzFYf95-X5Asi1Vlh_do_E,142329 +transformers/models/big_bird/modeling_flax_big_bird.py,sha256=ePVW-6VwD8sgJYIlX4eWv0EVNaInVosJW_CtqlyzpGs,109510 +transformers/models/big_bird/tokenization_big_bird.py,sha256=ieqxWuknmGjrFCdbn4eHBtW20D-8X1RTEuhy8ExX5A0,14218 +transformers/models/big_bird/tokenization_big_bird_fast.py,sha256=IdSpX06f6d92UnjbR5oXHaoUQTit4yVeYps8ebvLaKc,10168 +transformers/models/bigbird_pegasus/__init__.py,sha256=lTnaYtQ3nRjYYND5G3wilFyh6VOOWlKjNXbsmJTo-A4,2316 +transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/configuration_bigbird_pegasus.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/convert_bigbird_pegasus_tf_to_pytorch.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/modeling_bigbird_pegasus.cpython-310.pyc,, +transformers/models/bigbird_pegasus/configuration_bigbird_pegasus.py,sha256=9MOM5DGsV6qMwCgBTlw766O26R-WlpQn_6akpBHgdR4,19323 +transformers/models/bigbird_pegasus/convert_bigbird_pegasus_tf_to_pytorch.py,sha256=Wc7aoNvtzxt-DPi655Kl30CgDgq_hp08psISb8dWpLU,6288 +transformers/models/bigbird_pegasus/modeling_bigbird_pegasus.py,sha256=M9Xr4ukUftzShmCZZT7lJjtmcuHj2aHhmwOWZaEADcM,145911 +transformers/models/biogpt/__init__.py,sha256=dV4wh5lT3U-EYdvjCy6b9lI4Lr2zIN1RqSs6Rsuc6Sg,2058 +transformers/models/biogpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/configuration_biogpt.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/convert_biogpt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/modeling_biogpt.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/tokenization_biogpt.cpython-310.pyc,, +transformers/models/biogpt/configuration_biogpt.py,sha256=k2iyeXyZa50vG6BDeZCoryOL8R5gjRupfny3O4qSRtk,6277 +transformers/models/biogpt/convert_biogpt_original_pytorch_checkpoint_to_pytorch.py,sha256=5zNYzaEy7QPc99LCHTcofXSCI3tr0pzlIpFpwT1ZgN0,10578 +transformers/models/biogpt/modeling_biogpt.py,sha256=uK6gC28UpvxBf283QmsQ68QbCjuW3y-R0r1JyOp9bls,41084 +transformers/models/biogpt/tokenization_biogpt.py,sha256=xrM8Q7_nUFI8MFELCXSiQspn1y7pM5-hHy2hvGQBSKU,13256 +transformers/models/bit/__init__.py,sha256=g9Upc1daCF75FealBk9SK9FMQ-wkJMQxtjoN5mDk4cI,2244 +transformers/models/bit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bit/__pycache__/configuration_bit.cpython-310.pyc,, +transformers/models/bit/__pycache__/convert_bit_to_pytorch.cpython-310.pyc,, +transformers/models/bit/__pycache__/image_processing_bit.cpython-310.pyc,, +transformers/models/bit/__pycache__/modeling_bit.cpython-310.pyc,, +transformers/models/bit/configuration_bit.py,sha256=upReAwVSescKNWbsk0naktI-fR8anH1SfBhiWUYYA1g,6365 +transformers/models/bit/convert_bit_to_pytorch.py,sha256=Z50gXtfe6Tj44cPdIvrFRqjHPdWHdeka5oAqsTuK_ig,5955 +transformers/models/bit/image_processing_bit.py,sha256=NjlrvLfIuCExl48RLRO-5kft5NwqwhZPjex7qBjDSr8,16395 +transformers/models/bit/modeling_bit.py,sha256=yrYTWfA4I4amTBbf0LEx1B9JYQHuEqiduj-P769tyv8,31814 +transformers/models/blenderbot/__init__.py,sha256=nB9V1KQEetB0dazUyJ_KWDJscltclpJ6fJ746wy6zuU,4031 +transformers/models/blenderbot/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/configuration_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/convert_blenderbot_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_flax_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_tf_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot_fast.cpython-310.pyc,, +transformers/models/blenderbot/configuration_blenderbot.py,sha256=JvjK0b-KKOwlYH3CEVrqh60eFiiM50kDJDo8wY84EpQ,18884 +transformers/models/blenderbot/convert_blenderbot_original_pytorch_checkpoint_to_pytorch.py,sha256=86QBWYTeyJvxMUOfxqmGHwpDneadfqbEGSujMYw3yuU,3702 +transformers/models/blenderbot/modeling_blenderbot.py,sha256=p3OFM5CHEgSFexKbmvtA9vUB4QsGmdxeI0NspQPFzxU,75689 +transformers/models/blenderbot/modeling_flax_blenderbot.py,sha256=-2C6LxBSnWTRtoaOHDJrt9pGPLqo-7nGwCYQkJdQ4Js,64985 +transformers/models/blenderbot/modeling_tf_blenderbot.py,sha256=YROTUbcA-LZRlKB0Fuo1_glkTd-Vuu45h6YOrx9ti4U,72696 +transformers/models/blenderbot/tokenization_blenderbot.py,sha256=ts-1ntfTkffcPvfKhfs7V6R8S3wfTe7mz1_HSQHbJyU,19075 +transformers/models/blenderbot/tokenization_blenderbot_fast.py,sha256=RJXNwnxtwIAPAR4LAFfhDRhW4-Ecm5RcH7copzj93ys,13877 +transformers/models/blenderbot_small/__init__.py,sha256=O-iMMZ9xZdyvP2PV4QYvxFcCaY6jEpKt5iyDzI_mrfM,4263 +transformers/models/blenderbot_small/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/configuration_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_flax_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_tf_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small_fast.cpython-310.pyc,, +transformers/models/blenderbot_small/configuration_blenderbot_small.py,sha256=GspvDUXyjyfh4a0lyxXtl2sJQJyNP-epvHgo-703ay0,18321 +transformers/models/blenderbot_small/modeling_blenderbot_small.py,sha256=WxrZrwkUa8jovo31-ZhueZ9T-kD-Mz1kaMY-2RjkhCk,74555 +transformers/models/blenderbot_small/modeling_flax_blenderbot_small.py,sha256=7S4Aw5OKwRuUErJrna1O5LNERPCtclQ4p_bFbApnLOI,65946 +transformers/models/blenderbot_small/modeling_tf_blenderbot_small.py,sha256=fJBdZGkwA1VejvUF8iPZj8gUFN6Bt3knuxU-C6NsGQI,71608 +transformers/models/blenderbot_small/tokenization_blenderbot_small.py,sha256=NXWfTtB-D9UmwpaGFsMt22UqGG5_RUVXmpR9QyhGo-8,8911 +transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py,sha256=jVbE-wwb3zhY2uCp27EgcXoyLhNP57iTMCx73Wwb39A,4309 +transformers/models/blip/__init__.py,sha256=1OJOhjlrdGG1mkS-46qni8DdTosNMNVWZlR9QTe1K2I,3692 +transformers/models/blip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blip/__pycache__/configuration_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/convert_blip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/blip/__pycache__/image_processing_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_blip_text.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip_text.cpython-310.pyc,, +transformers/models/blip/__pycache__/processing_blip.cpython-310.pyc,, +transformers/models/blip/configuration_blip.py,sha256=BM2UyTXiZNMMHkyfOQyMoirYEuTPTLzDDTU9BgOFZlQ,16571 +transformers/models/blip/convert_blip_original_pytorch_to_hf.py,sha256=olLA10DbRUnCUOY2uHxF70u3W9wY2EBwm7eyAGfm8nM,6992 +transformers/models/blip/image_processing_blip.py,sha256=hn7D0Svr6hfuggT9wPSgXuHkEg_bjjpHZL4oVM7d1So,15692 +transformers/models/blip/modeling_blip.py,sha256=qBARXCfB0l-rfqpZtKIkxYwbs65V9Yzeir4L3yKmdZ8,61466 +transformers/models/blip/modeling_blip_text.py,sha256=MMmp7Is3B_dluI1QIqGI6_yQ8EQHY34_cJBB-aQN4kE,43781 +transformers/models/blip/modeling_tf_blip.py,sha256=oq2iOlJnUyv0K7znD8gqi7EBID54YHH3bkJOZ6yb3Uo,71414 +transformers/models/blip/modeling_tf_blip_text.py,sha256=iJiYcnZpqJhoNrfUcxPxtokT_qMJGgLyz1hAcAWZ-t4,49972 +transformers/models/blip/processing_blip.py,sha256=oU2XUYUq7FZy_9TiJFzlsojF0P-hTd9o93f4TNtSxxo,6205 +transformers/models/blip_2/__init__.py,sha256=uEo0Z9nF4AxtGnnMSZPEvbdImyy24KR_F1YtOJj_mvY,2153 +transformers/models/blip_2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/configuration_blip_2.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/convert_blip_2_original_to_pytorch.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/modeling_blip_2.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/processing_blip_2.cpython-310.pyc,, +transformers/models/blip_2/configuration_blip_2.py,sha256=8T-Go6bUcycWouvObZoGz7hrTZ2MMJDtggmoLHF70oc,16587 +transformers/models/blip_2/convert_blip_2_original_to_pytorch.py,sha256=0343xouUoM4JqP29bgDyCbNIJfSl8BO-e278133ytSA,12276 +transformers/models/blip_2/modeling_blip_2.py,sha256=8LRioqvJpRuMiP_bpdQT1fAvffoMz9wikgbs2olfIH4,82756 +transformers/models/blip_2/processing_blip_2.py,sha256=4HnjqBRHKwuEH6NKGv0s27Tx3-alA0DYoWXJtM2gZ2I,6699 +transformers/models/bloom/__init__.py,sha256=21dUYJI8_NttCwbHTXqYSl6VcqLj_PoHPPr5NRRu49E,3098 +transformers/models/bloom/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bloom/__pycache__/configuration_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/convert_bloom_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bloom/__pycache__/modeling_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/modeling_flax_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/tokenization_bloom_fast.cpython-310.pyc,, +transformers/models/bloom/configuration_bloom.py,sha256=wAgm59dCFBCemfBFmREfAbVtw-ACvEXagW3nlS94LAk,10235 +transformers/models/bloom/convert_bloom_original_checkpoint_to_pytorch.py,sha256=WvxNS5YRu84Ek1ieKkyHRKcakRbZFJr5989nEjI6qQs,10302 +transformers/models/bloom/modeling_bloom.py,sha256=Hv_vRyXVDoc4UG2veZ5R82XCLRZ18cVx6awFYfGZUrE,54994 +transformers/models/bloom/modeling_flax_bloom.py,sha256=zBWwHZI6OBs9S1h9JSSAaEnskPKpa8jHn5AROhbLXpw,30092 +transformers/models/bloom/tokenization_bloom_fast.py,sha256=7kdn8u4ZTwk7IKEK8HTpJ-XtTtDy9pYmE7cE8ZYWybA,7033 +transformers/models/bridgetower/__init__.py,sha256=hqrBKe3gtOVATPn1QP5BEpqSVNhJZ2x_Cg11t0Bv-lc,2864 +transformers/models/bridgetower/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/configuration_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/image_processing_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/modeling_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/processing_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/configuration_bridgetower.py,sha256=ugoxQU3dN5BvV-xCraSR_f_a5gNZ1EMYXaABP6Gz2sI,16313 +transformers/models/bridgetower/image_processing_bridgetower.py,sha256=MS7LDFMYTUJHF-WIbxpcJAVPuhZGFkQCC8f7qgKkLxk,26821 +transformers/models/bridgetower/modeling_bridgetower.py,sha256=-zRrxV9HZvoleRqP5WC9U_d05_UL1RpzvhhpBKi3qRA,88197 +transformers/models/bridgetower/processing_bridgetower.py,sha256=FriChYR6CPgyDBUwOJrDlCJBuHo9RBIWXwN_NxgSGN8,5057 +transformers/models/bros/__init__.py,sha256=T1UKhF6X3-gs8q9-oIzspFbX-kmnMVirfNN1yZyCT2o,2445 +transformers/models/bros/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bros/__pycache__/configuration_bros.cpython-310.pyc,, +transformers/models/bros/__pycache__/convert_bros_to_pytorch.cpython-310.pyc,, +transformers/models/bros/__pycache__/modeling_bros.cpython-310.pyc,, +transformers/models/bros/__pycache__/processing_bros.cpython-310.pyc,, +transformers/models/bros/configuration_bros.py,sha256=bsY0G9Wq7fUrRNbPMcq6JDyTfv2VIYkJXUsTZfXnJ6k,6488 +transformers/models/bros/convert_bros_to_pytorch.py,sha256=kxZDGzvIYxz9hbIzzJOfOj5tixji5efb2884rqwoY6A,4871 +transformers/models/bros/modeling_bros.py,sha256=ffxJcV4jX4dmo0dl053rRtB9ujwPi3j0ij5NQd4fcrQ,57936 +transformers/models/bros/processing_bros.py,sha256=FQUu5czHHvQzZ1P5N9GhfjZu4cmZw_mYKuX0VNjrB54,4193 +transformers/models/byt5/__init__.py,sha256=06YhQd8TFNbc9lU5qzERZUdcSWIFxOeBOaqQh6S4WC4,942 +transformers/models/byt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/byt5/__pycache__/convert_byt5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/byt5/__pycache__/tokenization_byt5.cpython-310.pyc,, +transformers/models/byt5/convert_byt5_original_tf_checkpoint_to_pytorch.py,sha256=83tKCwYRSRW7zXtm9cmszqtPhpw44cH8Cj0SWUSBgN0,2120 +transformers/models/byt5/tokenization_byt5.py,sha256=DF8GtvaS6EpR1UqaQEh6IRaT0lRQD3CKineT6ngRy_4,10031 +transformers/models/camembert/__init__.py,sha256=UBlxBknmDgdOkelwnQSGkAejq1meoGd2CgmQtGayhII,4443 +transformers/models/camembert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/camembert/__pycache__/configuration_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/modeling_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/modeling_tf_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert_fast.cpython-310.pyc,, +transformers/models/camembert/configuration_camembert.py,sha256=PUXoVUL_wD0GzQZF1tPUvevuMpg1KZMYw8z3MemsGJw,7451 +transformers/models/camembert/modeling_camembert.py,sha256=YOSDDRPyeOqyCIi3iW6j6hsGuzKzzkVAmhzKZRrbi-k,72509 +transformers/models/camembert/modeling_tf_camembert.py,sha256=dZ22NfFnEGrgnmudjH6PQ04cp_yD4FnHpPT596Ljpgg,81636 +transformers/models/camembert/tokenization_camembert.py,sha256=RbjLk4_erty0RUfNn__WEA6q5xmOECFmyJ3eEHLuWxk,13978 +transformers/models/camembert/tokenization_camembert_fast.py,sha256=SxE-LECgWeoJoHax_4ivwPTM2Cmbd7_5IqrzSF-T8rc,8274 +transformers/models/canine/__init__.py,sha256=7AYQEAa5qVyCZ73fkPg0yXl5-YpLg55i3RpY1J3KulM,2272 +transformers/models/canine/__pycache__/__init__.cpython-310.pyc,, +transformers/models/canine/__pycache__/configuration_canine.cpython-310.pyc,, +transformers/models/canine/__pycache__/convert_canine_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/canine/__pycache__/modeling_canine.cpython-310.pyc,, +transformers/models/canine/__pycache__/tokenization_canine.cpython-310.pyc,, +transformers/models/canine/configuration_canine.py,sha256=VtShXIcJvDsUTXigsxBaok_t2UPv7fB49uVWslR_c28,6654 +transformers/models/canine/convert_canine_original_tf_checkpoint_to_pytorch.py,sha256=vGfFFo49PfyXtZdgIQHRcqMPcbmF8aMEC9DiHMyEsn0,2117 +transformers/models/canine/modeling_canine.py,sha256=2x2L3uE9QUKekoDb5NQFnl0Ysv2Da3cH75nnoo7EVWU,73494 +transformers/models/canine/tokenization_canine.py,sha256=bLQPsvTpk8GFuH3rsUR6o0l6f9Ldvb4S3Os0H-oFQQc,9287 +transformers/models/chinese_clip/__init__.py,sha256=SNfgqh2dGAcoNXXZx-8XFNO3UDriK_yV7vf-M23Qnfk,2919 +transformers/models/chinese_clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/configuration_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/convert_chinese_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/feature_extraction_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/image_processing_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/modeling_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/processing_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/configuration_chinese_clip.py,sha256=Hh_PJMLRsFI22qEPskV-vsW4iIzrv5nWlkmfPW0rsec,22431 +transformers/models/chinese_clip/convert_chinese_clip_original_pytorch_to_hf.py,sha256=-0bnVcdXxStmygkyj6S1hIGCVbpEbe3cM7AoshHH5ZE,5069 +transformers/models/chinese_clip/feature_extraction_chinese_clip.py,sha256=znduyOyJ-Qdx4MC5CPb6MFZ-Wrb5PLgHWRh0xfoULR0,1247 +transformers/models/chinese_clip/image_processing_chinese_clip.py,sha256=eIjF9ejRpZBkmGpzNSopH8FicTbd_5GuzvnA1vY0ia4,15946 +transformers/models/chinese_clip/modeling_chinese_clip.py,sha256=ix32T8Zqg4ccIRxU5A6dBsCAJ_sPklKsI8vVvtRWaBE,73081 +transformers/models/chinese_clip/processing_chinese_clip.py,sha256=xeAbYW_LoAVP3wwtnOdjoJ3kvCPUVoE8OFWMldm_QXY,6706 +transformers/models/clap/__init__.py,sha256=MOoheQt_0P8KCRlN4QiWyzrskH9dUUfSSF_pZpJEchw,2322 +transformers/models/clap/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clap/__pycache__/configuration_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/convert_clap_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clap/__pycache__/feature_extraction_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/modeling_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/processing_clap.cpython-310.pyc,, +transformers/models/clap/configuration_clap.py,sha256=Y1vAbl7FYI2UehNFgLNwETR0CrD-72ymSkPT-igvhkU,20382 +transformers/models/clap/convert_clap_original_pytorch_to_hf.py,sha256=FqHoVAYXIzfUY9342azwlm9zfSP7QdS8p-u9Q6RE_K4,5149 +transformers/models/clap/feature_extraction_clap.py,sha256=rN5ZDLkqtfddEsT6kcFW2OVe7nehoPUE4HM7T3ua5us,18692 +transformers/models/clap/modeling_clap.py,sha256=pSoYaFvFpFL877F0Yz8Rp8WStKred_14115B2vMWL6A,104794 +transformers/models/clap/processing_clap.py,sha256=QpXK1vA69fFLzQesu-qetj22YiV_BiO-0cpatq8ViKo,5705 +transformers/models/clip/__init__.py,sha256=4_WowO4qRlP_COGzdscG6QH0pZU-Q5a38GsrtBTlSHs,5193 +transformers/models/clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clip/__pycache__/configuration_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/convert_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clip/__pycache__/feature_extraction_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/image_processing_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_flax_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_tf_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/processing_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/tokenization_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/tokenization_clip_fast.cpython-310.pyc,, +transformers/models/clip/configuration_clip.py,sha256=vMFHNjjfLiqeJTtQ6jN-mB8tOQU2QlixyX5VGXZo52Y,20990 +transformers/models/clip/convert_clip_original_pytorch_to_hf.py,sha256=3_eKm-gpqB5DNvL8b3OKSUrjG7YFxqrQl1DBdL_IboA,5306 +transformers/models/clip/feature_extraction_clip.py,sha256=hgRfD-s9DoI7tzDLAJ0EW3rSbkY9dOiGqoGClOiRiBM,1172 +transformers/models/clip/image_processing_clip.py,sha256=zmKMxx_qthrWGJBOO9aeVvVaB7syFsdNLHyZYxo0LQA,16512 +transformers/models/clip/modeling_clip.py,sha256=wNUb3ENle3SltgMs3ntlNf0baktqeonmoz9zVrM35jM,61190 +transformers/models/clip/modeling_flax_clip.py,sha256=4uabm9t6i4bnqRR3DZrGk7X1NcaV78L6b6E6i0Gkl2U,50517 +transformers/models/clip/modeling_tf_clip.py,sha256=4DBx81Dc8OmozGPQKeUOZqpvXUIVSrCHooJVQollyHw,60461 +transformers/models/clip/processing_clip.py,sha256=xXp4RfloqWH1K1dFCL81jGvaOowCNQ2s0CU1vz2ClP8,7148 +transformers/models/clip/tokenization_clip.py,sha256=lDCHtIoqfquNg1n69Eok2jnV1mv5ebZStvt32cnowRU,20584 +transformers/models/clip/tokenization_clip_fast.py,sha256=vrfnIp8ZOxNHwzEkCJKSAMHaY7Jc8puUHIuudJ7Iuj0,6555 +transformers/models/clipseg/__init__.py,sha256=XmEjQiZo2l7fQvPX8Tm_rsd3wItyBrBg3gtvDAkOTZM,2179 +transformers/models/clipseg/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/configuration_clipseg.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/convert_clipseg_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/modeling_clipseg.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/processing_clipseg.cpython-310.pyc,, +transformers/models/clipseg/configuration_clipseg.py,sha256=ra61UArge-I-7ojfmu8bDzIOcYeePhhi5RyPVFB-zJo,21029 +transformers/models/clipseg/convert_clipseg_original_pytorch_to_hf.py,sha256=kYyPxdpdtt6nSxD65tXUTMbN0xPyyzjfTOOMbQ8OL0Y,11114 +transformers/models/clipseg/modeling_clipseg.py,sha256=74i474c_eJ-9FK1q4siNMd-5q-h-Xg_5eupowsmwutg,64514 +transformers/models/clipseg/processing_clipseg.py,sha256=dm7u-6S5Hg1ITAc0lYzXRJssiR92LOMkWbnR7p4eHzE,7790 +transformers/models/clvp/__init__.py,sha256=VUtmHMpw33TwZIXIYxV_ImQSKobm9ItMAZnw87Ke4Dg,2396 +transformers/models/clvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clvp/__pycache__/configuration_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/convert_clvp_to_hf.cpython-310.pyc,, +transformers/models/clvp/__pycache__/feature_extraction_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/modeling_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/number_normalizer.cpython-310.pyc,, +transformers/models/clvp/__pycache__/processing_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/tokenization_clvp.cpython-310.pyc,, +transformers/models/clvp/configuration_clvp.py,sha256=1uV0hAVyFwCcZYgDmnHMaog7sh_CiRhdWM_nA-GnCec,21029 +transformers/models/clvp/convert_clvp_to_hf.py,sha256=1WYf_vwj1CeQ_VU9iMqu7Grr_MmlAsaKEK1Lojk6yM4,9326 +transformers/models/clvp/feature_extraction_clvp.py,sha256=rq0Ygr1pCT1DK4mMzv6f4b06zgXeAwT29GYSzu1Fprw,10935 +transformers/models/clvp/modeling_clvp.py,sha256=XtbPX-UX87uEnxLrAVopZPT_cRwAUTzqwQ5e7PRvf7A,91213 +transformers/models/clvp/number_normalizer.py,sha256=gJb8KFEdsDWgzubs6cTn1i2q2R1fHCYs9C3k2hBoCyU,8857 +transformers/models/clvp/processing_clvp.py,sha256=zn13cG8abp5_ZFhoL_QQxcoTRS57rLKXBh9H5KAUBxk,3605 +transformers/models/clvp/tokenization_clvp.py,sha256=dNbrXIhYcqum_vonAZ7xsxvKimu1to6CdDDu5T5-0XA,14800 +transformers/models/code_llama/__init__.py,sha256=S1xpVZ6cLZxN1ADmRNp7dCsoKQKnb3-Tw-HkHjHcnBY,1882 +transformers/models/code_llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama.cpython-310.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama_fast.cpython-310.pyc,, +transformers/models/code_llama/tokenization_code_llama.py,sha256=_cpVOfwPY1I39Jyea7FLoXDam-W7l3-nJp6zoCJ5nUc,22959 +transformers/models/code_llama/tokenization_code_llama_fast.py,sha256=REf6FgNg7WbBovoDFKJey0VekXWMFVJGMVfOwHUCZaU,19758 +transformers/models/codegen/__init__.py,sha256=Zb96Hyd6W5WaIc7l-psLnEhYjANmwxzZlAR-g37xKkI,2443 +transformers/models/codegen/__pycache__/__init__.cpython-310.pyc,, +transformers/models/codegen/__pycache__/configuration_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/modeling_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen_fast.cpython-310.pyc,, +transformers/models/codegen/configuration_codegen.py,sha256=yDXZ9gsIW7pl5Tjah0XxgsLAfRr3zPoe2SlLEiltFTk,9591 +transformers/models/codegen/modeling_codegen.py,sha256=6Y_fXx2eRskk3_lmsWf_iUJ7bn0iwHppfCzVdyhUj3Y,31320 +transformers/models/codegen/tokenization_codegen.py,sha256=_-xlrNZM80xsLxqdH16T-TCDr_2Uq_dWfwT-vDvnhbo,16531 +transformers/models/codegen/tokenization_codegen_fast.py,sha256=PS7r7kD-hdmuBOPgAlWJG9IFFAWC0e6IYkNMfqBQ6sw,11433 +transformers/models/cohere/__init__.py,sha256=JRbmLNV1IKapV0NxDyyYL9-ZNPuHIWkYpBPbyUCwKAI,2214 +transformers/models/cohere/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cohere/__pycache__/configuration_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/modeling_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/tokenization_cohere_fast.cpython-310.pyc,, +transformers/models/cohere/configuration_cohere.py,sha256=rCmrKme3Qh74CFYClgeGUgqbDLWYs9DtzEIlrDxaIrA,7361 +transformers/models/cohere/modeling_cohere.py,sha256=M_TDYvfnEfWV8qwnSNMhQLRQesU_GJ1rL7hPgXAvysU,58569 +transformers/models/cohere/tokenization_cohere_fast.py,sha256=QNO7uiaMfmvHeyIfFPoUewJ0sz3LzltSpkCP8NeffJA,41992 +transformers/models/conditional_detr/__init__.py,sha256=aFyaZb6RKCOPPf_kPK83WhyaDO5NFiox70ZbMe5gxvw,2828 +transformers/models/conditional_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/configuration_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/convert_conditional_detr_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/feature_extraction_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/image_processing_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/modeling_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/configuration_conditional_detr.py,sha256=x1o4bO0eaiPvhZcA0ZtIwdsKf3IZOzZ3iNEtd4_hb4I,13306 +transformers/models/conditional_detr/convert_conditional_detr_original_pytorch_checkpoint_to_pytorch.py,sha256=O0da9fOwcPhpQSaa0Ci34txn-9YF9fAMGvRHK0dCk3Q,15930 +transformers/models/conditional_detr/feature_extraction_conditional_detr.py,sha256=opHXZebd-6cMJnO6RbrAdmVYmnkNzK1up_fPlHTSLrk,1553 +transformers/models/conditional_detr/image_processing_conditional_detr.py,sha256=4ixTeCxDOS9bz4gyUTVE1GjAJeDCK8srbaKRbt0t-LM,81280 +transformers/models/conditional_detr/modeling_conditional_detr.py,sha256=BA-OJXHlNglkGfMq15IyO4_nqv3LQbe8ENhjAYhmBHc,132187 +transformers/models/convbert/__init__.py,sha256=wkLfe2pjkQmfQ0sd28ixnL1__YYimYDtT5FP1bRD0YE,4069 +transformers/models/convbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convbert/__pycache__/configuration_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.cpython-310.pyc,, +transformers/models/convbert/__pycache__/modeling_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/modeling_tf_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert_fast.cpython-310.pyc,, +transformers/models/convbert/configuration_convbert.py,sha256=_Ryl9j6dcPjzID_tAPrzgJgwf3NU7xNcJU0haut1lo8,6934 +transformers/models/convbert/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.py,sha256=vTZyGhG9v7o4rDuP9-xM26gX1EzlCda7Sn_ELT9n3Gk,2108 +transformers/models/convbert/modeling_convbert.py,sha256=8DZ6IVR32VDfq8ALr_nZcJ0wLh5ml9i8s3_CT_-UQgM,58380 +transformers/models/convbert/modeling_tf_convbert.py,sha256=5E36T-ANwFpieUU0uGWImtpTsyauum9N2ldRE9TFBdw,61468 +transformers/models/convbert/tokenization_convbert.py,sha256=_FUqWP5wYDZ5Z0A8Y84Ptc0c5iVAUlq0dCU9Z7prvAs,20970 +transformers/models/convbert/tokenization_convbert_fast.py,sha256=XyV1ssOlvc2Ln3rgXZ9_eb7cDzYr_x0nETPei-AtclE,7780 +transformers/models/convnext/__init__.py,sha256=K8TKvIQuVogfZPifZjZeCwGJKA_vnASMr7LWx4CggqA,3150 +transformers/models/convnext/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convnext/__pycache__/configuration_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/convert_convnext_to_pytorch.cpython-310.pyc,, +transformers/models/convnext/__pycache__/feature_extraction_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/image_processing_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/modeling_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/modeling_tf_convnext.cpython-310.pyc,, +transformers/models/convnext/configuration_convnext.py,sha256=DKHCH1QXO8vjBJOWa0a1JRBqlbNWqmZrQ1BSisMlp5M,6227 +transformers/models/convnext/convert_convnext_to_pytorch.py,sha256=6QenssUB5Op--7nvPTPjRUEozX-4kljweJvc-blSpnQ,10220 +transformers/models/convnext/feature_extraction_convnext.py,sha256=TyFMochXYlN3vKH7Ud0nXagzxGhio2Bfma4ofceR_zA,1200 +transformers/models/convnext/image_processing_convnext.py,sha256=JIXegI7ZMZUxJApMMoY4JTmA5iNxJm9FN3UQnQwRpNc,16288 +transformers/models/convnext/modeling_convnext.py,sha256=2Bz-3mive7eeRtCSDPWV4E2MvXkIJ5-YwN6xMTiWuTU,21883 +transformers/models/convnext/modeling_tf_convnext.py,sha256=E21qdpGpPYVH4xJcMyjw5tdTCpoVobcjEcqhhtSID90,27195 +transformers/models/convnextv2/__init__.py,sha256=JmOrlR6-q7yFZqSG7obPonJSuSpLVhTOIax7X-3FDwY,2825 +transformers/models/convnextv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/configuration_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/convert_convnextv2_to_pytorch.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/modeling_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/modeling_tf_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/configuration_convnextv2.py,sha256=8zueCHJbSR0BqWzIqn7G8p49usf0QVUHQJVjGC9IFQE,5525 +transformers/models/convnextv2/convert_convnextv2_to_pytorch.py,sha256=Yswl5UwLP0t0tC8O2b8wix2beNaMtPy7areKFCuEccg,12473 +transformers/models/convnextv2/modeling_convnextv2.py,sha256=VT0O_uhD08q8fJKv9wKMW5zWXIcWDzt8IHcLgcToIrM,23655 +transformers/models/convnextv2/modeling_tf_convnextv2.py,sha256=RAbH2qhb2IPA9QYlqofUQ6ACGoqaKAAr6s1LeUeGgs4,27595 +transformers/models/cpm/__init__.py,sha256=9SmT0nL5DgGjXxmPaQFi9GGPXWuhFic2DX2GsF-BynQ,1816 +transformers/models/cpm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm.cpython-310.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm_fast.cpython-310.pyc,, +transformers/models/cpm/tokenization_cpm.py,sha256=CS26Yw8B-Jo_HIwVz6C-VR12d4MVxNh-M0mM3qTlYDE,15026 +transformers/models/cpm/tokenization_cpm_fast.py,sha256=Rt2x767ZSd-A_Khz5itY9iw4AQXtLhKCxP8w-sUz1wU,10425 +transformers/models/cpmant/__init__.py,sha256=5hTyJtQwoONrf9-BMvt_nT_bovkj9avoSk9UdLCvW4w,2117 +transformers/models/cpmant/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/configuration_cpmant.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/modeling_cpmant.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/tokenization_cpmant.cpython-310.pyc,, +transformers/models/cpmant/configuration_cpmant.py,sha256=F69mQQvTS_eskh5_8-4mmtcgHMnuNqumodJHABqyRkk,5215 +transformers/models/cpmant/modeling_cpmant.py,sha256=AwKfTJbnqiNX4IssjQtdy1yA1tZHfaByKYIVhoja7uw,37512 +transformers/models/cpmant/tokenization_cpmant.py,sha256=cfySlpG9_Ula60VT3BxPJlcFxmrQ--CQxGswVZNgWGk,9711 +transformers/models/ctrl/__init__.py,sha256=-Sa7nUQv3Cxj4KLXFaBtnkG_r3uIdpbU_Q_TmMl1lKM,2688 +transformers/models/ctrl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/configuration_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/modeling_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/modeling_tf_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/tokenization_ctrl.cpython-310.pyc,, +transformers/models/ctrl/configuration_ctrl.py,sha256=0r-lMZF0FCvkxDSSut0UoFqlJkWIap3cWIChSdq0-ao,4754 +transformers/models/ctrl/modeling_ctrl.py,sha256=zWef4WjFz6iro6fxfYkPP6y90BEO5QmYsOyuQTv1U5k,35697 +transformers/models/ctrl/modeling_tf_ctrl.py,sha256=54SBEtzFfUjGTtluw7RnFH1GOYXTZaPyazTqghxU2aI,39730 +transformers/models/ctrl/tokenization_ctrl.py,sha256=EGNrlwwhfT39EWpoLFk8VvclzCnpfgttJHwM7VUSoqw,8058 +transformers/models/cvt/__init__.py,sha256=dk1C0zaBDT0dl7BYLe1mRb85Dp_a_IHomekjOjYPHJ8,2434 +transformers/models/cvt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cvt/__pycache__/configuration_cvt.cpython-310.pyc,, +transformers/models/cvt/__pycache__/convert_cvt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/cvt/__pycache__/modeling_cvt.cpython-310.pyc,, +transformers/models/cvt/__pycache__/modeling_tf_cvt.cpython-310.pyc,, +transformers/models/cvt/configuration_cvt.py,sha256=VZLhB8Ol81xijVNMgrTY2ves1pzVQ5hAyYWuZsyQAJw,6754 +transformers/models/cvt/convert_cvt_original_pytorch_checkpoint_to_pytorch.py,sha256=miqNzPWIAjwl5rtkWOmRUJl-18X-9cRXXWb9M3ScHI4,13570 +transformers/models/cvt/modeling_cvt.py,sha256=ErKQINT0sm045FNLxrrI_mu6jc2AEeAavKU130VPdtE,28764 +transformers/models/cvt/modeling_tf_cvt.py,sha256=aOCKA0ON6OY3acBGGRwpM_zZcOY72YeBCINi-8R1IT4,43563 +transformers/models/data2vec/__init__.py,sha256=1Pq8n8wNccLQ76e8oNDwOemqh-E0eMKpr6tdt2ata8w,4933 +transformers/models/data2vec/__pycache__/__init__.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_audio.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_text.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_audio.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_text.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_tf_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/configuration_data2vec_audio.py,sha256=XsmBloKL5Bop4r7O4ag5B2R_h94EIIwdJRUzT82rKKE,16322 +transformers/models/data2vec/configuration_data2vec_text.py,sha256=B0L25th5BhuUQ4g1Jz0Ac-XqptaMMZjxs9hiJo_AaFs,7380 +transformers/models/data2vec/configuration_data2vec_vision.py,sha256=8WDKE8oQ9InFj3SMpKXdAoFCXRrBe-AfRLtGhkVvZtI,9347 +transformers/models/data2vec/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.py,sha256=czYaA_tlF-uCDMFV1RFaL5g8QJRozBiVUCu9nuhLcZU,10858 +transformers/models/data2vec/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.py,sha256=4scSS9J1m1xG6sy_BLvjbCeEL8Ke2RhNtNqsVt2zUCI,9580 +transformers/models/data2vec/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py,sha256=qKjV-jqIgL-6i17m4yQLW_93SbPpGxQnvHjuy1xVxQU,15340 +transformers/models/data2vec/modeling_data2vec_audio.py,sha256=q7ZgT0uIF90cblhwXUkcXJUHfr2EV4u37Z5Lmvet7ks,65405 +transformers/models/data2vec/modeling_data2vec_text.py,sha256=zGEJKXfSx2dzYaTz7yW8BnkEo4wgbzTNFQWuheSuzVE,71278 +transformers/models/data2vec/modeling_data2vec_vision.py,sha256=lpCgPsKjll09wAmVI8C-CrrRRUgp2X21b4Fj8tmdCYs,53758 +transformers/models/data2vec/modeling_tf_data2vec_vision.py,sha256=NptTURm89gID4Xn7M5whOHxMX2RWHVBWDWcca2G0srA,73356 +transformers/models/dbrx/__init__.py,sha256=n36C-BWFjJ9wkgBAv764sGksJFOL_fkME1fe1cTm-sg,1513 +transformers/models/dbrx/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dbrx/__pycache__/configuration_dbrx.cpython-310.pyc,, +transformers/models/dbrx/__pycache__/modeling_dbrx.cpython-310.pyc,, +transformers/models/dbrx/configuration_dbrx.py,sha256=5A7PHWzLZ7VQ7rPVXbfV0Ix3SAQ72DWdXNVZHvMmmtk,11054 +transformers/models/dbrx/modeling_dbrx.py,sha256=4YYQHqZLZcn1v8ETXIlBfl8GLe3QWOlHWHye5heek3A,70159 +transformers/models/deberta/__init__.py,sha256=azYcZaZso6o7T3SDyUrczkAZ4ZzgDh4hcPoT0bgPRSE,3677 +transformers/models/deberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deberta/__pycache__/configuration_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/modeling_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/modeling_tf_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta_fast.cpython-310.pyc,, +transformers/models/deberta/configuration_deberta.py,sha256=vKByhl26MNUkc2PbGuAjJ7jIa_wS3YRd0iozdrjFxmU,8782 +transformers/models/deberta/modeling_deberta.py,sha256=AiY5atrWFBRFdyZxYLRgCcENxvBiAGTqC2carfQ09vs,57918 +transformers/models/deberta/modeling_tf_deberta.py,sha256=Z0GnuxXSoGQtgvjkd8MFvP7pGZ_4GaXHtsnwv-iwf9s,68935 +transformers/models/deberta/tokenization_deberta.py,sha256=7ikisAJGKrVD1bhhsYlcN4MHhP0R_UqUEHCu5MzpDrc,17052 +transformers/models/deberta/tokenization_deberta_fast.py,sha256=MfbMLRaqECwKDRvpJ6dAr317gKN0q6TbjjTSe5JTL0M,10722 +transformers/models/deberta_v2/__init__.py,sha256=afG1pzu0TIczwpL6vPJXnwkO5Sn9R5qrMvjaTzysH1U,3981 +transformers/models/deberta_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/configuration_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_tf_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2_fast.cpython-310.pyc,, +transformers/models/deberta_v2/configuration_deberta_v2.py,sha256=Hp9o7TDKpc3kQ7c_rsSsUvXJIqKyR3M9M0sJ87UGPN0,8721 +transformers/models/deberta_v2/modeling_deberta_v2.py,sha256=E-bduoiC6I0Wozr0m9VkyggI9yvZjUaELHBm0-1CjpI,67493 +transformers/models/deberta_v2/modeling_tf_deberta_v2.py,sha256=B6ETqQpXYwLr1-zBHWis1GsuLvue9OMdQ2cjj9MTXus,81234 +transformers/models/deberta_v2/tokenization_deberta_v2.py,sha256=j0JyIHf3JHcrO_mpGz_W_igf6GJS2XWADOp3CwsEkUw,20704 +transformers/models/deberta_v2/tokenization_deberta_v2_fast.py,sha256=qwgQDjV0k_3d40RgzlUiIDBqwRO1v_dbqwtcf2WiZ-M,9758 +transformers/models/decision_transformer/__init__.py,sha256=geVmBybTFepK0keGuRrLYl6hwZhT5I2BK4dfeYFDqWw,2124 +transformers/models/decision_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/decision_transformer/__pycache__/configuration_decision_transformer.cpython-310.pyc,, +transformers/models/decision_transformer/__pycache__/modeling_decision_transformer.cpython-310.pyc,, +transformers/models/decision_transformer/configuration_decision_transformer.py,sha256=7Z0GZiYd90utw6yBr5xUqIT-iyH4eiq5LGDq3ALkCzw,7099 +transformers/models/decision_transformer/modeling_decision_transformer.py,sha256=QcIDiihZ3Ct8Ku08wSkHkfLj6YsCiDd8QyMW6D7ILHA,43049 +transformers/models/deformable_detr/__init__.py,sha256=jwNDOMAnuD5Efvu3FYvA1H9JJB9QBb6NpoaoCCJU1Ns,2599 +transformers/models/deformable_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/configuration_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/convert_deformable_detr_to_pytorch.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/feature_extraction_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/image_processing_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/load_custom.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/modeling_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/configuration_deformable_detr.py,sha256=5n4pHUoiDWEUeEGObAyY3loaKNLUfPBZDqEayBWMHsM,14522 +transformers/models/deformable_detr/convert_deformable_detr_to_pytorch.py,sha256=264dW2XMu4QcgO6IaMa4eOjrIHErz-RLw_9FLD6C46Q,9477 +transformers/models/deformable_detr/feature_extraction_deformable_detr.py,sha256=GwYaT6B6-Fu2Jbl8CALodb7Lz4gr9jSRfq01QfLQc7Y,1546 +transformers/models/deformable_detr/image_processing_deformable_detr.py,sha256=JizyRpHgEX8ZNruNiFth0a_Co-D0M3cFEbi8At8jCrU,68712 +transformers/models/deformable_detr/load_custom.py,sha256=0jENX1Mkz0bYlyUYYgp1YYEpQ8r32degzoL4CmVGe3w,1559 +transformers/models/deformable_detr/modeling_deformable_detr.py,sha256=pcwxvcPrryk9Dl0tDsXXU8CAOZTHQEodnai-u7QB1Lw,121344 +transformers/models/deit/__init__.py,sha256=ZVWuhflGzxt-AZ2wcCTX0JfXBY3puVD_O9WkNqfOH1A,3486 +transformers/models/deit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deit/__pycache__/configuration_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/convert_deit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/deit/__pycache__/feature_extraction_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/image_processing_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/modeling_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/modeling_tf_deit.cpython-310.pyc,, +transformers/models/deit/configuration_deit.py,sha256=5DiDN1-_FHZX59MM9n5gITzkt8mDtN5Td2497ZQUBaU,5792 +transformers/models/deit/convert_deit_timm_to_pytorch.py,sha256=JMCXzccvcbz1euXpqx-pb86V2PVDLKl-OYbFDLvvSZU,9217 +transformers/models/deit/feature_extraction_deit.py,sha256=1j_aV0oAZUofSYJGCEFRo0WNd_zVEXjj3SFlTQSuV1E,1172 +transformers/models/deit/image_processing_deit.py,sha256=VgMa1Wp87jIbbkcfqNUj_61sapJtXzOFZ0vFIbKpcdA,15720 +transformers/models/deit/modeling_deit.py,sha256=wV7Ag5ODDECuJz8rJWuRby3RFZqEhQRmuXc1T6DzBjw,38183 +transformers/models/deit/modeling_tf_deit.py,sha256=k0x2P8L_4SZ9CEGbDZ7_ArofzLRNdfsL7-E9GwwCCrw,49513 +transformers/models/deprecated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/__pycache__/_archive_maps.cpython-310.pyc,, +transformers/models/deprecated/_archive_maps.py,sha256=OsadX80pmOWHO2xgu8WBe8vgHxwko-MDcglgM_jG_30,124695 +transformers/models/deprecated/bort/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/bort/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/bort/__pycache__/convert_bort_original_gluonnlp_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/bort/convert_bort_original_gluonnlp_checkpoint_to_pytorch.py,sha256=y0wlQneBswkzekq70fW2-mqsn9RuITThO1AKV_8Cn5I,14068 +transformers/models/deprecated/mctct/__init__.py,sha256=Rbzjcs6HiXhpUeaKRE6Qtj9XsIRLkUrFAiQnbOerMrM,1892 +transformers/models/deprecated/mctct/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/configuration_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/feature_extraction_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/modeling_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/processing_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/configuration_mctct.py,sha256=sRUJF1RcoVRib5IAxg4u1sBLj_XxH-11hry18Tfl8ok,9159 +transformers/models/deprecated/mctct/feature_extraction_mctct.py,sha256=JsaSE20NeqBX8Uw-07Y5HdUcQtbYZqCrTN18Wu2B4rI,13460 +transformers/models/deprecated/mctct/modeling_mctct.py,sha256=Xmh-PfvY2jJ0cAgcuuaWPsW0FvmZJQ-FFglPr_XgiPI,32881 +transformers/models/deprecated/mctct/processing_mctct.py,sha256=0ejBpQWA6YVuU0A7hrFg797hFZnOO7GexVU5Da7xLP0,5930 +transformers/models/deprecated/mmbt/__init__.py,sha256=0CCmesCwGIMNFlf2oDsL0gYaCSpsfAC1_bMOXRcAgF4,1480 +transformers/models/deprecated/mmbt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mmbt/__pycache__/configuration_mmbt.cpython-310.pyc,, +transformers/models/deprecated/mmbt/__pycache__/modeling_mmbt.cpython-310.pyc,, +transformers/models/deprecated/mmbt/configuration_mmbt.py,sha256=agMAOVRnUrMlA8C6adBRLTuLmt8qG4lm4ykjGwS-qs4,1606 +transformers/models/deprecated/mmbt/modeling_mmbt.py,sha256=daov1Smf2qd_BhebAOQiyN53C-8oZZary9m7iZV-nuU,18914 +transformers/models/deprecated/open_llama/__init__.py,sha256=Mlmat1Ln8JLYZcldnGrMfBdgOwM01CmsoQEFedbJ24g,2788 +transformers/models/deprecated/open_llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/open_llama/__pycache__/configuration_open_llama.cpython-310.pyc,, +transformers/models/deprecated/open_llama/__pycache__/modeling_open_llama.cpython-310.pyc,, +transformers/models/deprecated/open_llama/configuration_open_llama.py,sha256=xnOSXytwBznDjv6AQGEyOMQXGExxoq48MbY0lWRzyHk,7964 +transformers/models/deprecated/open_llama/modeling_open_llama.py,sha256=wawujfN1QFBKngsUWOZq6dFc3DHycnn4m_Za3jJBXgc,43896 +transformers/models/deprecated/retribert/__init__.py,sha256=yMGneTgD7_VaMhXG00Liyvt4digAfyQ_j6Ou55p8iEU,2351 +transformers/models/deprecated/retribert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/configuration_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/modeling_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert_fast.cpython-310.pyc,, +transformers/models/deprecated/retribert/configuration_retribert.py,sha256=F9252tGauNQByeRMSgeJ1qPr4iJ5OL4U06WZ2rzCs34,5290 +transformers/models/deprecated/retribert/modeling_retribert.py,sha256=MIXbAXvuAGiOEMzpYEsUGoLKpPmh6xgRRrIodcg78iA,9388 +transformers/models/deprecated/retribert/tokenization_retribert.py,sha256=Va7tovgQiEME_QhTpbZNz9htj6M4dZbBWmwJv8MouUY,22090 +transformers/models/deprecated/retribert/tokenization_retribert_fast.py,sha256=Xd0oASIwAqMU-OzoGBzUFV1Lx5gx8AtKgvDpmpMNjXk,8251 +transformers/models/deprecated/tapex/__init__.py,sha256=lQutKYtwbU8ztPva0tyRnnV-zOWw6rxkGyoOUSuvnUo,926 +transformers/models/deprecated/tapex/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/tapex/__pycache__/tokenization_tapex.cpython-310.pyc,, +transformers/models/deprecated/tapex/tokenization_tapex.py,sha256=MPuB1JknrO9WY_j-Hgy8JWGNKvcowBDrjhFi-bCGALw,64347 +transformers/models/deprecated/trajectory_transformer/__init__.py,sha256=NZl7qNHOSc-VlOFIvhh4iSpn_fyGHZ8k7a9WXXG5HGg,2077 +transformers/models/deprecated/trajectory_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/configuration_trajectory_transformer.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/modeling_trajectory_transformer.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/configuration_trajectory_transformer.py,sha256=HO7LnfesbLDrz16K1zfNH2yboJJHsSpPzAIR7DUksDY,7165 +transformers/models/deprecated/trajectory_transformer/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.py,sha256=9jmCO1yueIbzUUvOHCl62XDCG4ExTkvsgRVCe-aBG7U,3139 +transformers/models/deprecated/trajectory_transformer/modeling_trajectory_transformer.py,sha256=Ez9W4hVDl3eKn3wG3Mw6rtxcVxQwpyN1Cbbm5ootOdY,25697 +transformers/models/deprecated/transfo_xl/__init__.py,sha256=bO5xiMeUsfu9k2nqJ4N2qTGvSniyD9oA8rHEn46ne-0,3183 +transformers/models/deprecated/transfo_xl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/configuration_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/convert_transfo_xl_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl_utilities.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl_utilities.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/tokenization_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/configuration_transfo_xl.py,sha256=cAICvGEF8Q8UKNrTekcZ2Sb08j6ZvCsdFdK5LYJQh5Q,7966 +transformers/models/deprecated/transfo_xl/convert_transfo_xl_original_tf_checkpoint_to_pytorch.py,sha256=cUL10fYCG-kWYI3BHuKto2AIxb0V2pgPQ3Z8JU9G-Sg,4938 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl.py,sha256=gxO81tAVwqcgFIZkEtX1WILtMng7pR9wo19ARAj4o8U,45995 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl_utilities.py,sha256=Kd2QFblDU3C5U0uqrkCIg1U3vytu9a8VLccyomBUu2o,7635 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl.py,sha256=k9_cCEAI9yZjCDmOAbph5ipRDIwRlEXL7aT825wC06s,55979 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl_utilities.py,sha256=oZAsrKz41ek-kSV2rvFHyCHfkAM6e5NyqbGCZSxIML4,10861 +transformers/models/deprecated/transfo_xl/tokenization_transfo_xl.py,sha256=GlTLQX9uKzmx2I63lbcBSNQdnbISt2Rw5EiZ6-eJGDY,31973 +transformers/models/deprecated/van/__init__.py,sha256=LfVeE-QGxQJS0QZhWPmPD9s2yX5Pk9iA5NK90CkoyQQ,1728 +transformers/models/deprecated/van/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/configuration_van.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/convert_van_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/modeling_van.cpython-310.pyc,, +transformers/models/deprecated/van/configuration_van.py,sha256=MMRnXRxKSfu67Ep6Mhoz7-ru0FRiTk7vS8XmnzmmyMM,4742 +transformers/models/deprecated/van/convert_van_to_pytorch.py,sha256=KW-0r4GVcmH_EzxC-qsdUn5TJw4TEl0wmUKPnJPYZaw,10374 +transformers/models/deprecated/van/modeling_van.py,sha256=DT_vMwDxQPVJBf2YaPCVhlL_ImiRHqnS18ypDAxK5zw,21383 +transformers/models/depth_anything/__init__.py,sha256=nSTo0y3RhnvBAua09yiGxbsVy8YKNb6x7Hl-jaM3Sro,1858 +transformers/models/depth_anything/__pycache__/__init__.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/configuration_depth_anything.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/convert_depth_anything_to_hf.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/modeling_depth_anything.cpython-310.pyc,, +transformers/models/depth_anything/configuration_depth_anything.py,sha256=PpAcQ3jdwqBaGnkvelCP0FnNHII_RR9UYhOFZ43_x0c,6609 +transformers/models/depth_anything/convert_depth_anything_to_hf.py,sha256=N2RCeVAiH6pzGmUZnHq0FPoCHD-EkrMviOqof1Qd7Ww,13710 +transformers/models/depth_anything/modeling_depth_anything.py,sha256=QVPwdQDTE1bsycXM-OrxUi5XznZj6iy6MQISwdrB2sw,18119 +transformers/models/deta/__init__.py,sha256=eHgP2aY7a0Of2OkxgCPavzEYvqk2etS3aqXD23Zd3Rc,2205 +transformers/models/deta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deta/__pycache__/configuration_deta.cpython-310.pyc,, +transformers/models/deta/__pycache__/convert_deta_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/deta/__pycache__/convert_deta_swin_to_pytorch.cpython-310.pyc,, +transformers/models/deta/__pycache__/image_processing_deta.cpython-310.pyc,, +transformers/models/deta/__pycache__/modeling_deta.cpython-310.pyc,, +transformers/models/deta/configuration_deta.py,sha256=UIY42yFQMrXj9SotRVh41ZcLOGN7MDWr1pW1LPGSN_4,14043 +transformers/models/deta/convert_deta_resnet_to_pytorch.py,sha256=r-beTAdmCNONvgIPQmIf890KgDQmdi8mRoDkSWoumJg,16833 +transformers/models/deta/convert_deta_swin_to_pytorch.py,sha256=WL18erfLKYr7-pmcHC5i5t6it7EnSagPsuHs5VEgLEA,19031 +transformers/models/deta/image_processing_deta.py,sha256=32jbJymBXq0aWrMx2bUV22d6GssnIufh_emIddCWBIw,52396 +transformers/models/deta/modeling_deta.py,sha256=P0zogPkirwOJ_CtbiBos9u0SXJ6PliBkNidy_2f6j_w,139637 +transformers/models/detr/__init__.py,sha256=dWemW6cL_QLOXK3i2uoP6ywKNrjVkpw8IXeQYbs0HfA,2438 +transformers/models/detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/detr/__pycache__/configuration_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/convert_detr_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/detr/__pycache__/convert_detr_to_pytorch.cpython-310.pyc,, +transformers/models/detr/__pycache__/feature_extraction_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/image_processing_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/modeling_detr.cpython-310.pyc,, +transformers/models/detr/configuration_detr.py,sha256=I8DS6B1dlh4gGU6vYeiyh3ZiRvwpecxul6JpwTRXF1E,13515 +transformers/models/detr/convert_detr_original_pytorch_checkpoint_to_pytorch.py,sha256=_4fQ1N3Zat1x1r-Gr3FosWuV3pW3yFKQQgM9MKujmbY,13561 +transformers/models/detr/convert_detr_to_pytorch.py,sha256=_E63l9rWZUfwSHCfJbz-HoIDT4hxAwoHRKXj1Ni03AA,18993 +transformers/models/detr/feature_extraction_detr.py,sha256=gMyG16pNJKoimImXOyqi589hGj37OYGWb7ZoTx84d5I,1474 +transformers/models/detr/image_processing_detr.py,sha256=vESOCWU_TLQwivn38arGL_SMCZD8LUKFfbT6U84DEvs,89208 +transformers/models/detr/modeling_detr.py,sha256=aQ9ck_gmiIbAKfyFu0MnCKMsnADZcosneDliUnKGYs0,116496 +transformers/models/dialogpt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dialogpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dialogpt/__pycache__/convert_dialogpt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/dialogpt/convert_dialogpt_original_pytorch_checkpoint_to_pytorch.py,sha256=Zp59TmLBKEs-x1-quZZeqARhpS3cTnnmgT4nCI0zsHY,1537 +transformers/models/dinat/__init__.py,sha256=Jt3EAbCCZcBjJD_sEane9NU0btqsFkOTqz6JkUtmY_4,1812 +transformers/models/dinat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinat/__pycache__/configuration_dinat.cpython-310.pyc,, +transformers/models/dinat/__pycache__/modeling_dinat.cpython-310.pyc,, +transformers/models/dinat/configuration_dinat.py,sha256=scl3Gvwml-AnQ8CvX3TRz_4r-wGLU3jbx_Rbxdi2-RY,7426 +transformers/models/dinat/modeling_dinat.py,sha256=MbU2PyIs2OFxHaK2MtZuger03MXCiF2i5Q3GJQ5DRYg,41719 +transformers/models/dinov2/__init__.py,sha256=vQdLyp1VnVfmx0Vdvwvgvk9bsWCUArt-hPzzoDsA20I,1890 +transformers/models/dinov2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/configuration_dinov2.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/convert_dinov2_to_hf.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/modeling_dinov2.cpython-310.pyc,, +transformers/models/dinov2/configuration_dinov2.py,sha256=XQYd81rCTmpBnJxcbpPkJmX8Gnafr8NUrrcYZ0WIvjs,8140 +transformers/models/dinov2/convert_dinov2_to_hf.py,sha256=g4wmiqVdUlNbRoy_GbEws3DQaXfUA1I9Qh6bHhL6yZk,11964 +transformers/models/dinov2/modeling_dinov2.py,sha256=6aG391Nec6OaDkXXoOkHZol_frdtM2TqyVVvmRVmA-M,36270 +transformers/models/distilbert/__init__.py,sha256=64w_AOUP-vupRT6bGlQF7Ak24rJB5AX58n1V8V_aHM0,5167 +transformers/models/distilbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/configuration_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_flax_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_tf_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert_fast.cpython-310.pyc,, +transformers/models/distilbert/configuration_distilbert.py,sha256=tIVlhKDhh0YOcubJcMJ9fk4M_LspEC1KvoeVPJdFXos,6091 +transformers/models/distilbert/modeling_distilbert.py,sha256=qkIYcvR8n8qG7m9_zfmYuiWoqPDwRXBsHs1gFyrJTm4,61606 +transformers/models/distilbert/modeling_flax_distilbert.py,sha256=cBRX7sUX2G9aSX6_I15sZ_H1yTXOMvwM7Gw3xbgOL6Q,32629 +transformers/models/distilbert/modeling_tf_distilbert.py,sha256=DhZ2Ap7aE3P9Gg7yGoxkBD5o3pZFM1xb6qqwriMgCqU,48952 +transformers/models/distilbert/tokenization_distilbert.py,sha256=Y9TPnqWaB-k8PbebKeZRoTyRFk2tgwySxcyZ_zybNOY,21906 +transformers/models/distilbert/tokenization_distilbert_fast.py,sha256=oO1CanamHXMuG6eJH8LOwRBEgK2yCSe80lF2fDGz5zo,8037 +transformers/models/dit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dit/__pycache__/convert_dit_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/dit/convert_dit_unilm_to_pytorch.py,sha256=qoCC3Hm-enjzLj5LoxjbpP8EaIsyhi3U3PERYYeSt7c,9420 +transformers/models/donut/__init__.py,sha256=VraCMZ5ZG0WtYvLmZv-B-gIH5joEM_QdAkiH2iDjLls,2455 +transformers/models/donut/__pycache__/__init__.cpython-310.pyc,, +transformers/models/donut/__pycache__/configuration_donut_swin.cpython-310.pyc,, +transformers/models/donut/__pycache__/convert_donut_to_pytorch.cpython-310.pyc,, +transformers/models/donut/__pycache__/feature_extraction_donut.cpython-310.pyc,, +transformers/models/donut/__pycache__/image_processing_donut.cpython-310.pyc,, +transformers/models/donut/__pycache__/modeling_donut_swin.cpython-310.pyc,, +transformers/models/donut/__pycache__/processing_donut.cpython-310.pyc,, +transformers/models/donut/configuration_donut_swin.py,sha256=VW7HBmDWZwdrop5HHQ92YsRwVjtW_XHqwecMWBTqaVY,5856 +transformers/models/donut/convert_donut_to_pytorch.py,sha256=0IgQ3V9hNWPOJ6KtOfowhVMfTh1m4WEVLOAQSMEGjJE,9316 +transformers/models/donut/feature_extraction_donut.py,sha256=jBSpDfoiCg_IWr4gcphIcxs7DA760JnH6V6hAfaoYPM,1179 +transformers/models/donut/image_processing_donut.py,sha256=vL7BsBj43uQsQEHJXXw3oMHmjOGFVH_IXRxQXzbzfK4,22310 +transformers/models/donut/modeling_donut_swin.py,sha256=jU9ovPUk-S7HI0rTUKSVIkc7R0tyb57LrIdiSicdAyk,43458 +transformers/models/donut/processing_donut.py,sha256=FxN4s7YYI0Yv_RyaBeYCnwx2Ljji37j3kJHvp75W0iI,8220 +transformers/models/dpr/__init__.py,sha256=qc_Fe-hF94ZxS9cfEXCp9h7-tkmi9Tj4KV9h_wg6yhs,4535 +transformers/models/dpr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dpr/__pycache__/configuration_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/convert_dpr_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/dpr/__pycache__/modeling_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/modeling_tf_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr_fast.cpython-310.pyc,, +transformers/models/dpr/configuration_dpr.py,sha256=hAE_Dth18kXhXn6qavKhMv2p7LdtwXeK-VhJIwNvAFk,6486 +transformers/models/dpr/convert_dpr_original_checkpoint_to_pytorch.py,sha256=XsxG5FBg46-EHlDsMq4w21C9W4wl8RZ6GZvx5coBmfk,6132 +transformers/models/dpr/modeling_dpr.py,sha256=oIoBMXJXKVb0lzDe5Aa5whyyKwfHJWU8cc6Hqo-RIeE,28591 +transformers/models/dpr/modeling_tf_dpr.py,sha256=fEwY6IjRaFbGN2JyxKmQERALN1VilZtm9LnRPIUtJww,33927 +transformers/models/dpr/tokenization_dpr.py,sha256=S4RPwKqgXSxgUy1CvdMrF4kWrC6_xiq0xeejbQNeXpo,15726 +transformers/models/dpr/tokenization_dpr_fast.py,sha256=OcoTaeO_t5AQ-SX9MKIUhLEU2gJNHkxdJbkmUCP81vQ,16112 +transformers/models/dpt/__init__.py,sha256=WoC0ADjpTTkspHtgIX_TtHXXG-4t8S-NGgJaAUiG-q4,2444 +transformers/models/dpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dpt/__pycache__/configuration_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dinov2_depth_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_beit_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_hybrid_to_pytorch.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_swinv2_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_to_pytorch.cpython-310.pyc,, +transformers/models/dpt/__pycache__/feature_extraction_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/image_processing_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/modeling_dpt.cpython-310.pyc,, +transformers/models/dpt/configuration_dpt.py,sha256=B4KgYJ8CFBN-PY4iTUD9hO6r3iWF7-8C22WEWPs0yeE,14504 +transformers/models/dpt/convert_dinov2_depth_to_hf.py,sha256=azN2ivIGa-g5fe6kdkQ0kJbgKitt10k8C2R3x3ff6FI,16935 +transformers/models/dpt/convert_dpt_beit_to_hf.py,sha256=VeC3Jpf_BVCkTdFJQHhrJPTgyRIibPzC32Isrd5iBPg,14347 +transformers/models/dpt/convert_dpt_hybrid_to_pytorch.py,sha256=czo2aHnDSZZqv2qwpx48s1dRTg25v-R5giSg4seNebE,12994 +transformers/models/dpt/convert_dpt_swinv2_to_hf.py,sha256=rFZSF_WFfMcVxXz815SX0THuTfg0juJBy6qCy8yT6QY,15176 +transformers/models/dpt/convert_dpt_to_pytorch.py,sha256=-SpPQGZ5tD6g0g5fQpSbMmUDK9xc1OFIInk9yyjkahE,11894 +transformers/models/dpt/feature_extraction_dpt.py,sha256=ZgBcSKNDX0_Fstv94sp1r9jpr9zvXCLPwvIek76Fkso,1165 +transformers/models/dpt/image_processing_dpt.py,sha256=--rXjjVmmW9b18QuLvkYd2dwSTCotaO7BDwMzyAfp2Q,23020 +transformers/models/dpt/modeling_dpt.py,sha256=y_FBw3uZvuTcIvpE84idIua7eZmXm_3y-8-gXiw9Hr4,57342 +transformers/models/efficientformer/__init__.py,sha256=hFVX-KUt3FRIjqb_MzHVif_h8r9FFezpRtRwFKLBKuY,3550 +transformers/models/efficientformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/configuration_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/convert_efficientformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/image_processing_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/modeling_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/modeling_tf_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/configuration_efficientformer.py,sha256=7zYu__djyFxNr5IOTXzcvNO5ZcLS8uWVBd0-XfKzGJk,7825 +transformers/models/efficientformer/convert_efficientformer_original_pytorch_checkpoint_to_pytorch.py,sha256=1ni0wyhRjTbF8U4BZ_FXU-_9Jzy43HMLKI3vGlyPjFc,9381 +transformers/models/efficientformer/image_processing_efficientformer.py,sha256=3D0DAnOMi7M3cb7UtjlOp7XDp96SL5XPdFumWLaZTQc,15694 +transformers/models/efficientformer/modeling_efficientformer.py,sha256=KwyEoMNevv3Jkz0U-E4kX9FNhFeC03TDHd9ryHu2h0w,33794 +transformers/models/efficientformer/modeling_tf_efficientformer.py,sha256=3ZEu1wKr37UyFMvH3fPJOeKnYlIrexx-GW7uHag1wvQ,49300 +transformers/models/efficientnet/__init__.py,sha256=mS43eilPqqiySKV0CZ34jg1SPUJa2zc6qyCwwRoJQFM,2670 +transformers/models/efficientnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/configuration_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/convert_efficientnet_to_pytorch.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/image_processing_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/modeling_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/configuration_efficientnet.py,sha256=UoppUIDRRJh9GpqU6zSCuLWRrDTIf5HxyR0fx2QhhoU,7701 +transformers/models/efficientnet/convert_efficientnet_to_pytorch.py,sha256=e2Na1xvNc7z9XvvI7v6v1V2uFWr88MSTN3JPKR5GstM,12756 +transformers/models/efficientnet/image_processing_efficientnet.py,sha256=t2SCJE3ChbM8bkQzd2QvxAsQ2SGB-sv8ywxw-MyGVc8,18848 +transformers/models/efficientnet/modeling_efficientnet.py,sha256=cqtMmoJwTG6xts8VOw5aZ1fmhFagro3h3aHCKAOCX5I,24052 +transformers/models/electra/__init__.py,sha256=UVRK4T71rPHmZYRbrQ_-5eu98Gfrkp6I9SA3KVVCcYQ,5257 +transformers/models/electra/__pycache__/__init__.cpython-310.pyc,, +transformers/models/electra/__pycache__/configuration_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/convert_electra_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_flax_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_tf_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/tokenization_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/tokenization_electra_fast.cpython-310.pyc,, +transformers/models/electra/configuration_electra.py,sha256=RYzIRYomSpcKP2RVySm3QPlDnSaGhtrWkThCE89mfk8,9194 +transformers/models/electra/convert_electra_original_tf_checkpoint_to_pytorch.py,sha256=iwbjp9v26TfI9iIRdR4KWv-zsrxVNbfgkUwn9N1WHaM,2862 +transformers/models/electra/modeling_electra.py,sha256=z1Yc3xI0QQuTasP4KEEiuT5V4gqFS6HhhH93Ok1-K2w,75835 +transformers/models/electra/modeling_flax_electra.py,sha256=S5TkUbjF-9GNOxeiGfXTjc3tnINV18R8CLLFf30A9zU,62268 +transformers/models/electra/modeling_tf_electra.py,sha256=UMST0sC4TTX-9NTjtH8U8c4FpT46MSbt44Nq5cBPdPA,78437 +transformers/models/electra/tokenization_electra.py,sha256=m1-PY3o6pZVr2xcqP4tfv8oMZHKzrHNF0x6HqAOsGUo,20909 +transformers/models/electra/tokenization_electra_fast.py,sha256=zPqzst_6dX5eiFgR2iVsZuzYIS-KTe8BKDkh3fsPTQo,7685 +transformers/models/encodec/__init__.py,sha256=LVz0exnSENNu1jnGsAoPoS7LfXgC-H7s3_lbwNEX_Dw,1910 +transformers/models/encodec/__pycache__/__init__.cpython-310.pyc,, +transformers/models/encodec/__pycache__/configuration_encodec.cpython-310.pyc,, +transformers/models/encodec/__pycache__/convert_encodec_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/encodec/__pycache__/feature_extraction_encodec.cpython-310.pyc,, +transformers/models/encodec/__pycache__/modeling_encodec.cpython-310.pyc,, +transformers/models/encodec/configuration_encodec.py,sha256=aKzD7Pnk3pdmRo2IBjonAJyXCGvMcQg6ICe_6SScJSQ,8596 +transformers/models/encodec/convert_encodec_checkpoint_to_pytorch.py,sha256=zF2ZSOCFsiMNvtIvRhjoucoF2G3m0nW-cHXimF_2uwQ,15253 +transformers/models/encodec/feature_extraction_encodec.py,sha256=luYd1uGvvQC_mDYlUsnMtSBn_S0dhbazYJ9zYGuQ1Kc,9873 +transformers/models/encodec/modeling_encodec.py,sha256=gI0-5Ebv6DyR_sS-48EBR4AHrKEPSxHJfISKcYsKOww,33493 +transformers/models/encoder_decoder/__init__.py,sha256=bR1yPbuqKHUYXaxI_QuDz6ccBSWpCr0THhPBM3lnttA,2451 +transformers/models/encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/configuration_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_flax_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_tf_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/configuration_encoder_decoder.py,sha256=HaF1rtwzf_tDXJYrfycr4ktA8-LlBia_RdAWD60RTu8,4362 +transformers/models/encoder_decoder/modeling_encoder_decoder.py,sha256=bV7b45U8L6TR6JNtKWfa_s01S47TGu06XpyiJv40QXk,35783 +transformers/models/encoder_decoder/modeling_flax_encoder_decoder.py,sha256=geeWvUTNF1OprImdmwdPclf2qUpHGQ_Z0TZzMMbqSsc,43529 +transformers/models/encoder_decoder/modeling_tf_encoder_decoder.py,sha256=pVGR6W436j6W2QhrlcyRLJji_wP8nJi3vyrqW0Lv3xQ,34308 +transformers/models/ernie/__init__.py,sha256=s0oBhpPU0MdftoAKWUbo3VR2D9VPTvjPde4NBylw5qI,2331 +transformers/models/ernie/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ernie/__pycache__/configuration_ernie.cpython-310.pyc,, +transformers/models/ernie/__pycache__/modeling_ernie.cpython-310.pyc,, +transformers/models/ernie/configuration_ernie.py,sha256=vrAn42VLrtzQkHf2blk2XkqouHtltRV_liEyyfYpR1c,7744 +transformers/models/ernie/modeling_ernie.py,sha256=FUwuyEzmohwLeYzgMhsgf_ZuHSlxKd18WCSjnApp13k,83926 +transformers/models/ernie_m/__init__.py,sha256=0neb_RuFu2HBnM3QZ5XRTBI9j8jzppR90ssXHH9LpGA,2637 +transformers/models/ernie_m/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ernie_m/__pycache__/configuration_ernie_m.cpython-310.pyc,, +transformers/models/ernie_m/__pycache__/modeling_ernie_m.cpython-310.pyc,, +transformers/models/ernie_m/__pycache__/tokenization_ernie_m.cpython-310.pyc,, +transformers/models/ernie_m/configuration_ernie_m.py,sha256=O8S19tEevivnWWwXwlLLi6hw6dRYO3gil9oOgmGDd-M,5982 +transformers/models/ernie_m/modeling_ernie_m.py,sha256=gcNj64kS1S1pxYXAuOupUHWanVQEJVjHe0i-GOY2SCU,47920 +transformers/models/ernie_m/tokenization_ernie_m.py,sha256=H2w93i-UQZpvEK8r-bl74x8ZxJ5mT3_wjUj2XKajDF4,16167 +transformers/models/esm/__init__.py,sha256=IfHOSRyzJHTD8eVSelVu_ijHcYnRp0Umm6hZGsoFYHQ,2978 +transformers/models/esm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/esm/__pycache__/configuration_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/convert_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_esmfold.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_tf_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/tokenization_esm.cpython-310.pyc,, +transformers/models/esm/configuration_esm.py,sha256=3KH-ti_vdKcNC235XlA0k_thhnkMUjDCXQkB6hSZJXA,14454 +transformers/models/esm/convert_esm.py,sha256=x0dfu2oexN80cndU3Zn81oVynsRuzfEtJZF20TK1y3k,18470 +transformers/models/esm/modeling_esm.py,sha256=PeAg54V-K3j95Ff77QfrNioNiw8YEBmRVK91_gW6hfo,55663 +transformers/models/esm/modeling_esmfold.py,sha256=GgMkBeEhTvZBj61fNGqDkZsWTGeRwrhkHSGYa0otbJ4,86908 +transformers/models/esm/modeling_tf_esm.py,sha256=q8Nh_eDqeI9xBSWmutfaeB3rgxkUDdhF_mzafFdAFc8,68965 +transformers/models/esm/openfold_utils/__init__.py,sha256=Xy2uqvFsLC8Ax-OOce5PgoBDiZgEJgJPqs__p5SBWUY,446 +transformers/models/esm/openfold_utils/__pycache__/__init__.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/chunk_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/data_transforms.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/feats.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/loss.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/protein.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/residue_constants.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/rigid_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/tensor_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/chunk_utils.py,sha256=eyd0NSdGIVBr9gLuI-3VI5cjJr46wYa9hlYBq1L1gCU,14392 +transformers/models/esm/openfold_utils/data_transforms.py,sha256=F4wGANRhKLd6MLHrwg2IxpqCxCJEx8aFSxqAdsXsBMo,3764 +transformers/models/esm/openfold_utils/feats.py,sha256=dgLcLJriW-eDIBdc0MyKPDT5w0POab9QLuN56qE8wsk,8376 +transformers/models/esm/openfold_utils/loss.py,sha256=wY2ONqbuRvWMomjkpfPwfoa7dqCO2vFkM-kmNfhjivo,3705 +transformers/models/esm/openfold_utils/protein.py,sha256=x9NK6bryLs9vNi3j8OfOlw0Jb1cFrwMhCi6JdxkDdQw,11490 +transformers/models/esm/openfold_utils/residue_constants.py,sha256=KDcdOt5wkJ7cO7p-LtmS8sLIzfQ2ej7p40Re8EsTkv0,37993 +transformers/models/esm/openfold_utils/rigid_utils.py,sha256=EF79POBO-abRsdXrfdKLaqJUVIPp4EOMFVt5oOjx504,41122 +transformers/models/esm/openfold_utils/tensor_utils.py,sha256=A07D5psNs5lGgWJp_kzJgrY8cmWmaL3odDgKXN1NVAE,4798 +transformers/models/esm/tokenization_esm.py,sha256=kthSlBphrNeHRinLA0-FdVQ0_olxhQhViQ2_3K_DBo8,5355 +transformers/models/falcon/__init__.py,sha256=Sf4eyG7aJ4pQoqLJXStTSTxP7iEHks73GWe9QjAnU3w,2067 +transformers/models/falcon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/falcon/__pycache__/configuration_falcon.cpython-310.pyc,, +transformers/models/falcon/__pycache__/convert_custom_code_checkpoint.cpython-310.pyc,, +transformers/models/falcon/__pycache__/modeling_falcon.cpython-310.pyc,, +transformers/models/falcon/configuration_falcon.py,sha256=2epVIM91N3mJKEvwjAMO698OA30XyxfLkHy89BfnY5w,9614 +transformers/models/falcon/convert_custom_code_checkpoint.py,sha256=XPJ1owRjRno_Y1AD5UeoPE4oo6a-SeQR9w9u-EIUktE,3061 +transformers/models/falcon/modeling_falcon.py,sha256=CnaGsYFw3XXSXx0RVf6bVuvu4eL7u0fDaP3XG_6s5wo,75692 +transformers/models/fastspeech2_conformer/__init__.py,sha256=eAZrmrz-mhay_crQQcN59ra1YBH341kxCGvR2h__YBE,2770 +transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/configuration_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_fastspeech2_conformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_hifigan.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_model_with_hifigan.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/modeling_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/tokenization_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/configuration_fastspeech2_conformer.py,sha256=DoA6gWj-XXM_UuCBIwiDdOwCaSm6q-RSwLpPhoydaYA,24662 +transformers/models/fastspeech2_conformer/convert_fastspeech2_conformer_original_pytorch_checkpoint_to_pytorch.py,sha256=-ToJHpwI-xoLLMzLYdqFrBL6j6nsSPlNbkQ3pfTgJ6Y,8939 +transformers/models/fastspeech2_conformer/convert_hifigan.py,sha256=RC1PaVnl1cLx8c2LdYycNti7iYRhUM7_KrX2mF5WyCM,5431 +transformers/models/fastspeech2_conformer/convert_model_with_hifigan.py,sha256=wT4pQGgEHVFoWI1Lb71L7_i6ujfNrSMDGYuDGb4oeh8,3471 +transformers/models/fastspeech2_conformer/modeling_fastspeech2_conformer.py,sha256=Btj_h3RCbp0lGpc06W3kzlC3M459OyfuB3eeKoAhKtc,77676 +transformers/models/fastspeech2_conformer/tokenization_fastspeech2_conformer.py,sha256=aM39xyh9UuqEsplBCWrbBtRYNj3BEw6V1QmGkTqCyAU,6218 +transformers/models/flaubert/__init__.py,sha256=neN63qn5CVIfPSr50g0WhbrcKDT7w0qIljyqSCxbqLI,3488 +transformers/models/flaubert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/configuration_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/modeling_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/modeling_tf_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/tokenization_flaubert.cpython-310.pyc,, +transformers/models/flaubert/configuration_flaubert.py,sha256=hCD-A6OTCJmxfMMIPDogR2Der27ch4hy9F-lKjlV6GE,11288 +transformers/models/flaubert/modeling_flaubert.py,sha256=5_dfp-KEyaBaDpT1pJY5PDhx5i-_-AgWF6wVBt_jJ-Q,57599 +transformers/models/flaubert/modeling_tf_flaubert.py,sha256=RH236R8k0gr1m5k3pXhj5AiILVFNgoxgrgGtpGpRb2k,57184 +transformers/models/flaubert/tokenization_flaubert.py,sha256=SmfPpRAS5kQXm9WvpRjS6QL7lA2JQZr4Vs3GbhXP-Xo,22136 +transformers/models/flava/__init__.py,sha256=TtPrEOob3V4Lk_NK3rgacXw0jJ2ABWKPnLP8x4uSs4I,3030 +transformers/models/flava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/flava/__pycache__/configuration_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/convert_dalle_to_flava_codebook.cpython-310.pyc,, +transformers/models/flava/__pycache__/convert_flava_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/flava/__pycache__/feature_extraction_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/image_processing_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/modeling_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/processing_flava.cpython-310.pyc,, +transformers/models/flava/configuration_flava.py,sha256=3t2E9ekLNZMSAgo6djFRubfLOfbqDMyw2uSx3yjjyhw,37184 +transformers/models/flava/convert_dalle_to_flava_codebook.py,sha256=iEJM9W_cKk3HK0gKS6i2ygEMeyymWCMl18LDaQXRAhY,3428 +transformers/models/flava/convert_flava_original_pytorch_to_hf.py,sha256=LilQpbe6qeN2P_uXljae6zEPx_KoepoRv4uvCEAo0QA,4372 +transformers/models/flava/feature_extraction_flava.py,sha256=mA1uAn29yv9PV7gYXauz0VTAJDgcpl9DPHvH99Ed__s,1201 +transformers/models/flava/image_processing_flava.py,sha256=88KY6CipM_6HkY3SKu9dfdv_KhSAoYrFZRkKqrpal7A,38581 +transformers/models/flava/modeling_flava.py,sha256=obSU7TGFe-_XjLMyGSkDrTiiP1dGFXDcfq1HkQ4sjtE,96774 +transformers/models/flava/processing_flava.py,sha256=fj9uFlMerVGFnB9hV1XJ61c3q82qstjPwmWUdMiL46U,6832 +transformers/models/fnet/__init__.py,sha256=spzYrdM_-MVYRr6Axeh_adtgX1pCDAsUJEpR-cPdxgE,3179 +transformers/models/fnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fnet/__pycache__/configuration_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/convert_fnet_original_flax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fnet/__pycache__/modeling_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet_fast.cpython-310.pyc,, +transformers/models/fnet/configuration_fnet.py,sha256=Bo0KeJXuaW22kjeS0SFRpDtAnqSVoIjTZLzEY5094Jo,5637 +transformers/models/fnet/convert_fnet_original_flax_checkpoint_to_pytorch.py,sha256=bxrdtJbyINwJtiIpagL3Ttkq0D5ujBK1Wi72fIR2vss,6912 +transformers/models/fnet/modeling_fnet.py,sha256=7xVl8KRse3ugfF0URJ7Xv9oURNVEEKp3JOFx90_lC_k,49043 +transformers/models/fnet/tokenization_fnet.py,sha256=DBOFFYsJ2W3z2tMKVFC3r1Hw48OdNKSAOqt--aRk15M,14549 +transformers/models/fnet/tokenization_fnet_fast.py,sha256=comrMbQXVPEhm8EGUiqgrvItcDiFLPwTm_YupT3S4fI,8064 +transformers/models/focalnet/__init__.py,sha256=RPvCimVzndLWR8r1MfUbrAiQTJEvJ6VGTM1OFmAS9-A,1989 +transformers/models/focalnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/configuration_focalnet.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/convert_focalnet_to_hf_format.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/modeling_focalnet.cpython-310.pyc,, +transformers/models/focalnet/configuration_focalnet.py,sha256=cEGW_Mlrb6aCQyORrNzXSDzy6pRpwwnGr8XfSv-x3yk,8127 +transformers/models/focalnet/convert_focalnet_to_hf_format.py,sha256=xBoop7K4unfPawCbmlv7BTQHpbJkaUWasrwsw8dW_KI,9450 +transformers/models/focalnet/modeling_focalnet.py,sha256=4LWagJUt4Zu0XJccaaaKpPcfiorL1XUudUG-yADj608,43186 +transformers/models/fsmt/__init__.py,sha256=e0xh51cBRMFkSYEcmZzyINHoXBKwgonWv3zEPqZuMYE,1675 +transformers/models/fsmt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/configuration_fsmt.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/convert_fsmt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/modeling_fsmt.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/tokenization_fsmt.cpython-310.pyc,, +transformers/models/fsmt/configuration_fsmt.py,sha256=GZrow13QeYa3jZ-olYXGlVdMN3nqRbnU4_i-fdeJd_g,10161 +transformers/models/fsmt/convert_fsmt_original_pytorch_checkpoint_to_pytorch.py,sha256=BWtn90XQAuWGp8k9zns5St9On_os395ESNgkaXy6y2g,11264 +transformers/models/fsmt/modeling_fsmt.py,sha256=Qo_LDfeYujqdZs99eeb6LElHYMFtZd4e2Q7Fd96M8I0,58402 +transformers/models/fsmt/tokenization_fsmt.py,sha256=uqmF6EjUAT7OQ3ZA0Ioj_m0eINcJZOGiMEEmHC67_Vo,19261 +transformers/models/funnel/__init__.py,sha256=QQgGGD4BfFL3j1qtC1oNuuagXUPYWw0KJ4XVKTzMvW0,4126 +transformers/models/funnel/__pycache__/__init__.cpython-310.pyc,, +transformers/models/funnel/__pycache__/configuration_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/convert_funnel_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/funnel/__pycache__/modeling_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/modeling_tf_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel_fast.cpython-310.pyc,, +transformers/models/funnel/configuration_funnel.py,sha256=k7ff4rr8IweCknqduGwjl2aNmrPMFK2oMLk_GntnTEE,7750 +transformers/models/funnel/convert_funnel_original_tf_checkpoint_to_pytorch.py,sha256=fdaL7-j0ZWjCKvvpS_gFYHBthQ8TFbGmkOmfd53enaI,2335 +transformers/models/funnel/modeling_funnel.py,sha256=wWxW9cODrXeCguOvdu-1U0gsJ4ID8k4M-Nbcw6MsWpM,69551 +transformers/models/funnel/modeling_tf_funnel.py,sha256=zCzTxVUVSxbxwJGhS7KkdLxFoFsN-I8_o8kyHukK2UE,80267 +transformers/models/funnel/tokenization_funnel.py,sha256=A7Xmjp3TgF-jjk6Lr2W_xfaqFWcbKAe8DlEGwhN8rTA,22369 +transformers/models/funnel/tokenization_funnel_fast.py,sha256=KpC1leC5Wi7e22-orBKLLIFeMt7tb3Y8N98-jpOV7TU,8644 +transformers/models/fuyu/__init__.py,sha256=SLRcFqITZh127We258kiNPRKoegottQTbpuCZ72dTBU,2184 +transformers/models/fuyu/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/configuration_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/convert_fuyu_model_weights_to_hf.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/image_processing_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/modeling_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/processing_fuyu.cpython-310.pyc,, +transformers/models/fuyu/configuration_fuyu.py,sha256=SaL8tMrWsMdBW9chJe7-RIxyUtagYM2ChS8ssRLiUxE,10155 +transformers/models/fuyu/convert_fuyu_model_weights_to_hf.py,sha256=c8A4qiUY47MfPeEG518qofxFdzut0me3EtFNizEHv6Q,4847 +transformers/models/fuyu/image_processing_fuyu.py,sha256=jYB8EWiRio_c5g4EkReAxLFFrv7fdoONlKGVGZnadxM,33810 +transformers/models/fuyu/modeling_fuyu.py,sha256=hto2ZclUlvFlg-OPGnjRyJwbtZB5VtyNjR0qfUll39o,17758 +transformers/models/fuyu/processing_fuyu.py,sha256=AQW_0vMDaOfOlOVjmmr4utHA-pT9Gvnc6ITlBpLrSa8,31896 +transformers/models/gemma/__init__.py,sha256=boIWLnLMFp69VbfjGEcoCMTSObbY_0OevWvwBOa29Xg,3339 +transformers/models/gemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gemma/__pycache__/configuration_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/convert_gemma_weights_to_hf.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modeling_flax_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modeling_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma_fast.cpython-310.pyc,, +transformers/models/gemma/configuration_gemma.py,sha256=zcwX5vkni6iKEHcuMsS_LIOhy-sukba7u9y8_DS0k3g,7141 +transformers/models/gemma/convert_gemma_weights_to_hf.py,sha256=UCoyJd4wVYKlikKMK0-9GRFAa-Cm3OtLt7oSJjXOuPA,7366 +transformers/models/gemma/modeling_flax_gemma.py,sha256=rDG3jua0r9HMiwbdirTDB2h2xEvuxtlTWoW59dXL1Dw,32332 +transformers/models/gemma/modeling_gemma.py,sha256=2OIgMCHmoBOyScSk9AzNyRNTYQ9M3csOVZ4oIDPXCjU,64133 +transformers/models/gemma/tokenization_gemma.py,sha256=CXHdN19ZMBvqfFeIEyF-p92iJO1umUakJ1sfPLOOaiY,13981 +transformers/models/gemma/tokenization_gemma_fast.py,sha256=bTIi46E_PXDmRwD8hQG0AI6HRlj-03Y7itWFA6tclQE,8279 +transformers/models/git/__init__.py,sha256=KG0HrIdVgj64GVVUk32IdidJRaC5BcjQZt62oVRL5Eo,1888 +transformers/models/git/__pycache__/__init__.cpython-310.pyc,, +transformers/models/git/__pycache__/configuration_git.cpython-310.pyc,, +transformers/models/git/__pycache__/convert_git_to_pytorch.cpython-310.pyc,, +transformers/models/git/__pycache__/modeling_git.cpython-310.pyc,, +transformers/models/git/__pycache__/processing_git.cpython-310.pyc,, +transformers/models/git/configuration_git.py,sha256=LKzTZXFIsdNyxaXEY6JKw2J0hQ-NNSOPYg04WkbaN3s,11310 +transformers/models/git/convert_git_to_pytorch.py,sha256=HzsGAVKq7fhWCgI89QsSEDUO1IaQn0LNPkprFq3-vYk,22390 +transformers/models/git/modeling_git.py,sha256=mNGhSVAdXmSVX_Hy95-romnN_RSBBExVqzzqH0-HG28,69114 +transformers/models/git/processing_git.py,sha256=z-nGl5S4cxDFam85wJWrONVASpM4auyjjUn_lhq4cZM,5381 +transformers/models/glpn/__init__.py,sha256=-5zqCuk1phx-Bjw3Mq-NJmPvusXfEYcNGIrFO27vr3s,2384 +transformers/models/glpn/__pycache__/__init__.cpython-310.pyc,, +transformers/models/glpn/__pycache__/configuration_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/convert_glpn_to_pytorch.cpython-310.pyc,, +transformers/models/glpn/__pycache__/feature_extraction_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/image_processing_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/modeling_glpn.cpython-310.pyc,, +transformers/models/glpn/configuration_glpn.py,sha256=dAqIy5-a8cbTOyBN781JAiYA0u1rFwVfUz89T5WRjI0,6068 +transformers/models/glpn/convert_glpn_to_pytorch.py,sha256=dT5q2vCISTu1DjoTkLSyHmlcR75n_CGhXxxknL5KjJQ,8558 +transformers/models/glpn/feature_extraction_glpn.py,sha256=S263LFeHVRym_jKt8KkTOjjtA1_BqARnUgbSFExgPN4,1172 +transformers/models/glpn/image_processing_glpn.py,sha256=-vAlAJdllzBjNJdB_OJn9NOx5gkDaB_sUYZN23Y7xGY,11003 +transformers/models/glpn/modeling_glpn.py,sha256=Zp2otpnM7nTzdd-DAUhOXANYtXyqIyqLvnFfCai2Fmk,31502 +transformers/models/gpt2/__init__.py,sha256=d_QyBAIVXohGlkOMWC9r03kE9uS2IHwXwPCsxnMGGkg,4674 +transformers/models/gpt2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/convert_gpt2_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_flax_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_tf_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_fast.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_tf.cpython-310.pyc,, +transformers/models/gpt2/configuration_gpt2.py,sha256=m-3GXLz7ryiNvroT6JVh7Ag7MfyYGs25jzNMFiPKZvY,12079 +transformers/models/gpt2/convert_gpt2_original_tf_checkpoint_to_pytorch.py,sha256=nRAxbikMz9v88rDqfrX8OwPvBKe7fiYC2fg-6BB8Mzk,2532 +transformers/models/gpt2/modeling_flax_gpt2.py,sha256=6vAeL1SwHlYUxTwHmfHXEYLuvTJoLRq5zl_GwUm5PiE,32014 +transformers/models/gpt2/modeling_gpt2.py,sha256=rJQj-4HoKSakDuXx8ha79J1-jlSCVm6kN3DT4KnDLs8,88050 +transformers/models/gpt2/modeling_tf_gpt2.py,sha256=HjAhqrJgp2YJ4IBfobuzND9jkVcBWBzAJuRXd9TYqj8,56691 +transformers/models/gpt2/tokenization_gpt2.py,sha256=R0pYfuNDBx2Rdp9zDGOGcWprMGEcDBzIFsUOXw1KBPI,13849 +transformers/models/gpt2/tokenization_gpt2_fast.py,sha256=lRMLo_ML0hP0K6IJtceoISZXiBaw2Z1h2MJCs693IvM,6535 +transformers/models/gpt2/tokenization_gpt2_tf.py,sha256=Ptg01f1bV0fAvI1JK6v-FE4lVKUPIiXrxxPrf8M7kgU,3833 +transformers/models/gpt_bigcode/__init__.py,sha256=waW0WeT6jgb8gWpaGmMZBJCYoqKzCbaQbyjHZkuEARE,2037 +transformers/models/gpt_bigcode/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_bigcode/__pycache__/configuration_gpt_bigcode.cpython-310.pyc,, +transformers/models/gpt_bigcode/__pycache__/modeling_gpt_bigcode.cpython-310.pyc,, +transformers/models/gpt_bigcode/configuration_gpt_bigcode.py,sha256=DucM152GIQPCLHztnilZZ3WR10WySLc1P6cIy8TnGvU,6382 +transformers/models/gpt_bigcode/modeling_gpt_bigcode.py,sha256=aywzb7EFlA0xzKY01aFp0KYtP60_8-F2TdmKa52GLXY,69693 +transformers/models/gpt_neo/__init__.py,sha256=tCBf4wXQijfaRh959WfU7_npuc1na00rwCZCgcxuTOo,2718 +transformers/models/gpt_neo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/configuration_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/convert_gpt_neo_mesh_tf_to_pytorch.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_flax_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/configuration_gpt_neo.py,sha256=gAqXIqy6_YiogYm6rwRPE2t_dqIoEiwLm7oePX2__gg,11931 +transformers/models/gpt_neo/convert_gpt_neo_mesh_tf_to_pytorch.py,sha256=SSlCsIZmkN010Cu64F4lxwHcQRsqEGbb7a6PqCSWJY0,2589 +transformers/models/gpt_neo/modeling_flax_gpt_neo.py,sha256=xgwE5UixFan9wDb9ScOd8DcEH-o1Iu-AX1bNkMWQFEA,28074 +transformers/models/gpt_neo/modeling_gpt_neo.py,sha256=VRC5JDwwaumJDna7PM0pZgv-DeL1uMbuwB_1KiAyIgE,58257 +transformers/models/gpt_neox/__init__.py,sha256=NETOJyNfZJ1SXJ4jc1heeVs2TMqXjlbminmJQKSnLnA,2595 +transformers/models/gpt_neox/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/configuration_gpt_neox.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/modeling_gpt_neox.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/tokenization_gpt_neox_fast.cpython-310.pyc,, +transformers/models/gpt_neox/configuration_gpt_neox.py,sha256=4-pmNwzHWZMJNYHsZgYJmWfXVYOQLfcsO77Up9BsWXY,8974 +transformers/models/gpt_neox/modeling_gpt_neox.py,sha256=_GHHApaLCnK7E9CR29GFz8g0kEvoAtoS2qC9vuN6N0I,64897 +transformers/models/gpt_neox/tokenization_gpt_neox_fast.py,sha256=muSciDUjACX1nufh4Nq5tX6QhM5QOz1YeEQdUiszB1Y,10211 +transformers/models/gpt_neox_japanese/__init__.py,sha256=7S5Q5Y8aQPbcoaPjIVo7s9ebHh0GLv3cA1TeAhzvFFA,2154 +transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/configuration_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/modeling_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/tokenization_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/configuration_gpt_neox_japanese.py,sha256=tnDNY86uFFFQ_SMrQcqYTGPxeYNKPlae2QsXfKgAQ-I,5668 +transformers/models/gpt_neox_japanese/modeling_gpt_neox_japanese.py,sha256=1jC2InCehrPxA2bKUOAn1Jlxt_mEdgARfSSXVlRFJ-w,32386 +transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py,sha256=7CvQVyzCJa5t-3McYp7z2qJRec3-ZthRoWguTp3EiVk,17082 +transformers/models/gpt_sw3/__init__.py,sha256=qJj7vF8ES37BwsKbJE1zV2rPUdmM3vx8mckIFuWrJSU,1361 +transformers/models/gpt_sw3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_sw3/__pycache__/convert_megatron_to_pytorch.cpython-310.pyc,, +transformers/models/gpt_sw3/__pycache__/tokenization_gpt_sw3.cpython-310.pyc,, +transformers/models/gpt_sw3/convert_megatron_to_pytorch.py,sha256=11EGXgi73zwRchm4aMlHE7tCom4_oGLQSWF1YMpBBQA,8156 +transformers/models/gpt_sw3/tokenization_gpt_sw3.py,sha256=cGST5TNnGR90L6CnCHIfnpobDARfCO4BinKaKQ6YaqY,13517 +transformers/models/gptj/__init__.py,sha256=wBErGYabUQpzDULOVQSE9vEvefKWJvJFoU9p0t54qDU,3280 +transformers/models/gptj/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gptj/__pycache__/configuration_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_flax_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_tf_gptj.cpython-310.pyc,, +transformers/models/gptj/configuration_gptj.py,sha256=zy0vdt6BICJlTG6olKuIHfzu2QMEWPPAk0Z_LyadJrU,8880 +transformers/models/gptj/modeling_flax_gptj.py,sha256=VaYTrxQosqkIqHcbKcDFinT_z3aofwdJLasWAqxjRlM,28525 +transformers/models/gptj/modeling_gptj.py,sha256=sWfh0DAi3PlxWkSZFZWEVFLdiQVdlAkwpJimXpbld3M,63142 +transformers/models/gptj/modeling_tf_gptj.py,sha256=Rbgvnc3X-wtnw9BKun5G3UcW--i-H6BFOldygQ5GYvU,48066 +transformers/models/gptsan_japanese/__init__.py,sha256=gkfCyeWUjR_u2kxoe0nD-gLdcFoS4SwjhQBNufTY86w,2294 +transformers/models/gptsan_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/configuration_gptsan_japanese.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/convert_gptsan_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/modeling_gptsan_japanese.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/tokenization_gptsan_japanese.cpython-310.pyc,, +transformers/models/gptsan_japanese/configuration_gptsan_japanese.py,sha256=TL57v1NF7w5TFfRRqYCpPM4kenFk_-bJ1C5z2OEqSTs,7230 +transformers/models/gptsan_japanese/convert_gptsan_tf_checkpoint_to_pytorch.py,sha256=syF4TCbLQByZhm5VqIFgXfzQ4zImmCua8UNjCYJP5t8,9793 +transformers/models/gptsan_japanese/modeling_gptsan_japanese.py,sha256=ttTVYnf2hox6oLiXpRV_AzjmFOLE8UVW1I1AZzkCTXY,66612 +transformers/models/gptsan_japanese/tokenization_gptsan_japanese.py,sha256=Aoncz2BgbOaYNJyqbj6ByCG3cAZoos-WK67m5QKHQeM,24310 +transformers/models/graphormer/__init__.py,sha256=SCL3NOPe62lQVk-qWrJD1enP6JNBWyPreg5EGaifjbE,1873 +transformers/models/graphormer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/graphormer/__pycache__/collating_graphormer.cpython-310.pyc,, +transformers/models/graphormer/__pycache__/configuration_graphormer.cpython-310.pyc,, +transformers/models/graphormer/__pycache__/modeling_graphormer.cpython-310.pyc,, +transformers/models/graphormer/algos_graphormer.pyx,sha256=b_Qlm1hKCHnAqx6oOLGC9LkivAV0K_AZRGgXT9MmBas,3635 +transformers/models/graphormer/collating_graphormer.py,sha256=1r_YqrFzC6uWCaPCsGMqNkvHNKs6SCV1bSw2qLyAYJA,6086 +transformers/models/graphormer/configuration_graphormer.py,sha256=ppv8lh6wroRM_q8iBIWISI4X5COJaZNwS8K2T-ygqbY,10481 +transformers/models/graphormer/modeling_graphormer.py,sha256=a1MmXQ2a71TcXEu109Wqa23B3QdNRvrzoUNXIaqZ8jw,37105 +transformers/models/grounding_dino/__init__.py,sha256=Erk4Xw6A3yKtLAFJ_XaAsp2JKUAYSZXA_NEhC-llHTo,2570 +transformers/models/grounding_dino/__pycache__/__init__.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/configuration_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/convert_grounding_dino_to_hf.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/image_processing_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/modeling_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/processing_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/configuration_grounding_dino.py,sha256=Nm24p_CCv7U57LjdzcuAQIPCfG-_7I65PaGIthB1YXU,15243 +transformers/models/grounding_dino/convert_grounding_dino_to_hf.py,sha256=U3T2-FjtYv9unK36_iA17Ifww7kWWYWpPbVC6u6-unQ,25445 +transformers/models/grounding_dino/image_processing_grounding_dino.py,sha256=Iphjv5yuvntKKtW-_VB9JQ-f0ZhCZu8NIW6QX885HcY,66200 +transformers/models/grounding_dino/modeling_grounding_dino.py,sha256=p2tAqOP3KdvKDtqHk9Peo4IcuEHwvA7ucaRZNuVF0vY,154095 +transformers/models/grounding_dino/processing_grounding_dino.py,sha256=kRjQfahwF3QAdW4_lgVmE1M2S-TX2PgLX-4XjKGL53c,9628 +transformers/models/groupvit/__init__.py,sha256=rO2THuhEVPYRh__0tgdPS9egtqSugEkoXU4lDMAg3q0,2875 +transformers/models/groupvit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/configuration_groupvit.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/convert_groupvit_nvlab_to_hf.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/modeling_groupvit.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/modeling_tf_groupvit.cpython-310.pyc,, +transformers/models/groupvit/configuration_groupvit.py,sha256=VIjM_1UnJOGvxhSqltaiDxYUOcoU4TQp8bO3ATqORBc,20796 +transformers/models/groupvit/convert_groupvit_nvlab_to_hf.py,sha256=9gQxkcjVNCP5lvV54SbbSsOjkKCHORcoiwq2gcczYCM,9775 +transformers/models/groupvit/modeling_groupvit.py,sha256=hqh41N5MbxNyAy3g61xCHqYBbJnYkj-0tHXTmT6lmb8,67884 +transformers/models/groupvit/modeling_tf_groupvit.py,sha256=w0Lc4OFGeUixsV2f0CZ0JL1laey4tnkAS6Fx7Zx3Wio,89848 +transformers/models/herbert/__init__.py,sha256=Sp9gQIqlUhZHausuaL2MFYDqJW4vvsVGLbVryR-kNl0,1472 +transformers/models/herbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert.cpython-310.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert_fast.cpython-310.pyc,, +transformers/models/herbert/tokenization_herbert.py,sha256=5xsZeIOJU5UQ1-u75m8c_pOtS5IoFdaMOiL43-dYR5Q,25042 +transformers/models/herbert/tokenization_herbert_fast.py,sha256=QITcJycMNQuUIumL_an1mNz55fJADkW9S6jdBmO99KM,5926 +transformers/models/hubert/__init__.py,sha256=rfeBnkDY2iMz8xs_cZY4wSMSxoXQeVQov-C42xhA0eE,2536 +transformers/models/hubert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/hubert/__pycache__/configuration_hubert.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_distilhubert_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_hubert_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_hubert_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/modeling_hubert.cpython-310.pyc,, +transformers/models/hubert/__pycache__/modeling_tf_hubert.cpython-310.pyc,, +transformers/models/hubert/configuration_hubert.py,sha256=DuxtNSM-RtwpGqGXMMtujQBv8015T51jruii7Hi4nyc,14774 +transformers/models/hubert/convert_distilhubert_original_s3prl_checkpoint_to_pytorch.py,sha256=ENEJNVBI7j5N6ajvUnNEAfSIM6VfEmpI8dF86R4EDog,8942 +transformers/models/hubert/convert_hubert_original_pytorch_checkpoint_to_pytorch.py,sha256=tVrpW4Mqkymh6pcLdYdTtkl0ykhSkHNvfTefbBIpR7w,10380 +transformers/models/hubert/convert_hubert_original_s3prl_checkpoint_to_pytorch.py,sha256=BtUOQ6Jf7kppeKreWA76AvQNdy_a63t2iuq0yHvEs4E,2895 +transformers/models/hubert/modeling_hubert.py,sha256=X6D6U51pn8DRbPt9EyoHm6sbFPhm_hYBj0H2dNm4fxA,60130 +transformers/models/hubert/modeling_tf_hubert.py,sha256=211G8mTNnXGEwRgYZSNGxHgGt0SseG26kzvwLw3BvZ4,70788 +transformers/models/ibert/__init__.py,sha256=uw-Mi7HIih0Or_1DeCK7Ooc20kBdmqokZ6GEDwOD9LU,2086 +transformers/models/ibert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ibert/__pycache__/configuration_ibert.cpython-310.pyc,, +transformers/models/ibert/__pycache__/modeling_ibert.cpython-310.pyc,, +transformers/models/ibert/__pycache__/quant_modules.cpython-310.pyc,, +transformers/models/ibert/configuration_ibert.py,sha256=eZRFwXoP-EqeqKzLRAUUcTjEGdAM0QaZ8TWm6flautE,7144 +transformers/models/ibert/modeling_ibert.py,sha256=fweAtT2llKdCyFB0oFYNSYYkrrQeTpTVbpRuH11Vyco,56727 +transformers/models/ibert/quant_modules.py,sha256=ItU76CIx0XcZCPOR21dz99J9k5rK2fzffQz0jJCuNmM,30072 +transformers/models/idefics/__init__.py,sha256=XnXH7RPak98A3W6H9eW1o8eiVgxgAMKoi6xAkKBOL8o,2360 +transformers/models/idefics/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics/__pycache__/configuration_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/image_processing_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/modeling_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/perceiver.cpython-310.pyc,, +transformers/models/idefics/__pycache__/processing_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/vision.cpython-310.pyc,, +transformers/models/idefics/configuration_idefics.py,sha256=bhqaO3X_dIyVmYX02y276VdEDzvQvgQncPycpPWdgHo,15467 +transformers/models/idefics/image_processing_idefics.py,sha256=xcHYUAzAgIaXk92aU0YY83scvQdpQekN37UJll9utdg,7801 +transformers/models/idefics/modeling_idefics.py,sha256=jsC5pHWvo6mVySwPGU-zEdJkczYI2rfy9th1-GBkFlY,72847 +transformers/models/idefics/perceiver.py,sha256=RtKLRu3IIjUHCYcLAgZyirDbxK-ZlKKts_to0fv1x6o,9432 +transformers/models/idefics/processing_idefics.py,sha256=QxKO8rSMZCWOsHaDwzZMWQ374rh6IXRpQUNbcOFg7JI,17932 +transformers/models/idefics/vision.py,sha256=B27HyrQNrY9l9o--jMQmL9NdkJRVqYt2u36TXiyNQSs,22502 +transformers/models/idefics2/__init__.py,sha256=ZUUTVILcQ-2gepqN7ZSS2qjrHr1JjBF8p8JtQv_fVR8,2315 +transformers/models/idefics2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/configuration_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/convert_idefics2_weights_to_hf.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/image_processing_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/modeling_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/processing_idefics2.cpython-310.pyc,, +transformers/models/idefics2/configuration_idefics2.py,sha256=XwZgRmu127ZWwyc71DQPSlJCGHhV5HN2dnfC3hriReg,11810 +transformers/models/idefics2/convert_idefics2_weights_to_hf.py,sha256=3nd_V1qNTv7DehZZQLLAeKh0086xvjECNuWBvJmFbNM,6669 +transformers/models/idefics2/image_processing_idefics2.py,sha256=RAJpeMN3iNaL4YJtV_6ATVYlva5h_pVFR_bRgHL3MWY,27422 +transformers/models/idefics2/modeling_idefics2.py,sha256=Hg5PpteEtNALJNeSGxSjCDNEz48rVFwJ93-xnmop01s,93563 +transformers/models/idefics2/processing_idefics2.py,sha256=jfud4Imt_3UMyCNpOR7n-9Yrq3HlYuW6ocxhIyXA3wg,16229 +transformers/models/imagegpt/__init__.py,sha256=aPsv_YVn82O_HHaFDIsYqe8bR8hs3sk1RUlcCtaUWcc,2658 +transformers/models/imagegpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/configuration_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/convert_imagegpt_original_tf2_to_pytorch.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/feature_extraction_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/image_processing_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/modeling_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/configuration_imagegpt.py,sha256=MtR2BysoI2OoG9dPOeabhdnCz7l5M47eHXHzc5DEbJM,8820 +transformers/models/imagegpt/convert_imagegpt_original_tf2_to_pytorch.py,sha256=yneGtcrTR4Ui38NG8ogK7N_4dAyTiVBkmc8JQERb2bs,2691 +transformers/models/imagegpt/feature_extraction_imagegpt.py,sha256=iCpQ4tU3Vml44KgO43kYJvv-RcZVxe8tc794gxUktuU,1200 +transformers/models/imagegpt/image_processing_imagegpt.py,sha256=UH8YSyNGl4jI4rrPb0HrjbPnKp3PSlykBCY4vdGhjA0,14692 +transformers/models/imagegpt/modeling_imagegpt.py,sha256=jmC1ZC5BzttNxnapndU9a607NSeUBCgvfjJ0Jl-aq2U,53680 +transformers/models/informer/__init__.py,sha256=VylZIY0U5EuIfEuvphPh-gCCgBtwRAByccv11nsTA5Q,1857 +transformers/models/informer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/informer/__pycache__/configuration_informer.cpython-310.pyc,, +transformers/models/informer/__pycache__/modeling_informer.cpython-310.pyc,, +transformers/models/informer/configuration_informer.py,sha256=G3nrYsT6Z8jdjJ3CL2OWjrY6bsIKDSs8fEiRMjXQaxU,12512 +transformers/models/informer/modeling_informer.py,sha256=o0pZ_I-KgRBgmm6lhQSSWIuoyqmMOvP7E1fDNPcPlSo,101597 +transformers/models/instructblip/__init__.py,sha256=GpbqWHExuUvlsDeouDhVv-f_etjU9Dwm006DwFiAMEg,2279 +transformers/models/instructblip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/configuration_instructblip.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/convert_instructblip_original_to_pytorch.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/modeling_instructblip.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/processing_instructblip.cpython-310.pyc,, +transformers/models/instructblip/configuration_instructblip.py,sha256=SFPht36C6hPZjZKYuOoXnN4kynYO0K7S6WKlh3msUyo,17169 +transformers/models/instructblip/convert_instructblip_original_to_pytorch.py,sha256=iustpBsjHHzjQzbAhPJvhI7ZBSXCDoa9njtK9m_gm_I,13399 +transformers/models/instructblip/modeling_instructblip.py,sha256=Gg1QQNpxU9x5r8BBaalV5oZnIQrX3MQvR3-UeV09w2o,71246 +transformers/models/instructblip/processing_instructblip.py,sha256=zJT2QvAzlJAFlADmSSr36VWNB6xLpazrqFmp3og5AE8,7856 +transformers/models/jamba/__init__.py,sha256=aD1sOCM0Rjk2I3Zh_fEh3xeot9EVYE3X3nKi8rP-KyI,1661 +transformers/models/jamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/jamba/__pycache__/configuration_jamba.cpython-310.pyc,, +transformers/models/jamba/__pycache__/modeling_jamba.cpython-310.pyc,, +transformers/models/jamba/configuration_jamba.py,sha256=c45AlRvpZ4NGGd81o1m_PhwMiyO7DFefibXk8tIz9A8,11249 +transformers/models/jamba/modeling_jamba.py,sha256=9NZmskgbYrr9nlluDO121loHPt6rTYKFbaIxKpyHFTc,88547 +transformers/models/jukebox/__init__.py,sha256=kZx3ZvfTUb90bEGC0UVrqOfoJvIWSBrUOR701WATaHI,2084 +transformers/models/jukebox/__pycache__/__init__.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/configuration_jukebox.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/convert_jukebox.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/modeling_jukebox.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/tokenization_jukebox.cpython-310.pyc,, +transformers/models/jukebox/configuration_jukebox.py,sha256=cN5kO0pVIOpa2Kwxfcwo8chuZlnybfyxOb2QnnUHM0M,26847 +transformers/models/jukebox/convert_jukebox.py,sha256=RBgOPbwIMv_42mUFJYxRv4IAGZn4cAzjTqjrMI7HtVg,11789 +transformers/models/jukebox/modeling_jukebox.py,sha256=ji9tQcWiTz7qVfmkKV7WzKA669GgNAStgBAUMwy_oqI,119566 +transformers/models/jukebox/tokenization_jukebox.py,sha256=7v083dQ8kcWvzJg4NbZUqm7y6BZVaR_tp3vqJFSOlA0,17349 +transformers/models/kosmos2/__init__.py,sha256=jUzMFMa0nRBdsr0AdK08cnugtfuAWiZTFgOow25AY5o,1967 +transformers/models/kosmos2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/configuration_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/convert_kosmos2_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/modeling_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/processing_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/configuration_kosmos2.py,sha256=368fp2Q3pEL14rasKErL34mbpUbs1bIskR6FLH1two8,13320 +transformers/models/kosmos2/convert_kosmos2_original_pytorch_checkpoint_to_pytorch.py,sha256=3ejv6hUd6irzFnmSuFVI6Eu1NVWmtJf3_ql2h9P4AHk,2724 +transformers/models/kosmos2/modeling_kosmos2.py,sha256=orwMCflw9GwQamBJ8QgpLKcoDFB58fYnFhjJDLQMtwA,94993 +transformers/models/kosmos2/processing_kosmos2.py,sha256=wwLhLGgBBgpFeRWC3os8SXLI18od-NJagHFJMe9QROo,29760 +transformers/models/layoutlm/__init__.py,sha256=x-7_rGXFn-NroxQIFjQru0Rz5VfmQmINEhahNPm7R8w,3787 +transformers/models/layoutlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/configuration_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/modeling_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/modeling_tf_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm_fast.cpython-310.pyc,, +transformers/models/layoutlm/configuration_layoutlm.py,sha256=LxYSGeoSHa9fqrGvMCsz3-8M617Ct8PFrWUNSyGUq4g,9181 +transformers/models/layoutlm/modeling_layoutlm.py,sha256=jFNDDKZg8nv7qxDKxLSeLNU-3QNSC_Yw8rzsLCFgCDE,60816 +transformers/models/layoutlm/modeling_tf_layoutlm.py,sha256=QW6Q_RzzrUCqLKAzGDcbaMO5faqzDw2CkjSYCEZ22lc,73197 +transformers/models/layoutlm/tokenization_layoutlm.py,sha256=YlVOSz-XoiLcIEN2VikUce957pvnYl1CIyImdqIU230,20943 +transformers/models/layoutlm/tokenization_layoutlm_fast.py,sha256=rqnk6TYEbN7tCP3ElDCVk6ukNL2tm4xKmxAlkZP27X4,7787 +transformers/models/layoutlmv2/__init__.py,sha256=Ue5kj1_LyJNklq6UPXvNuaAXj_gadMT8lXxwQwIPsvY,3439 +transformers/models/layoutlmv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/configuration_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/feature_extraction_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/image_processing_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/modeling_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/processing_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2_fast.cpython-310.pyc,, +transformers/models/layoutlmv2/configuration_layoutlmv2.py,sha256=30FlO29KiaXlzvrwYUV5IniBgmx5rOH3x8dRN4D1XiI,10985 +transformers/models/layoutlmv2/feature_extraction_layoutlmv2.py,sha256=M9bDCpKBLI5paxor4ioa2JjEDhSH9Np-PTbgHh2V9KI,1195 +transformers/models/layoutlmv2/image_processing_layoutlmv2.py,sha256=yV8J93JD6AR2chy87LDh2zvl5N60MvDEreNEzaI211Y,13809 +transformers/models/layoutlmv2/modeling_layoutlmv2.py,sha256=MjLO_uxU-NHmSxoqKEGBoCa6Lk31cQCZ46YM26yW-lQ,60530 +transformers/models/layoutlmv2/processing_layoutlmv2.py,sha256=xyhBq9pYYmNYOfK2c13gA-f1cWzu1fp0kO6FC7J9DfI,9292 +transformers/models/layoutlmv2/tokenization_layoutlmv2.py,sha256=ZpOPGao8BUSVPrIjNW16HRM_GkgJCXgS-VrSuW9zjmg,72063 +transformers/models/layoutlmv2/tokenization_layoutlmv2_fast.py,sha256=jEDcU7MIeieUnorFQQ9S-DYPbyqpaooQt5KRexDzxB8,37284 +transformers/models/layoutlmv3/__init__.py,sha256=A4PpxK2Rhqx_ybVzlT5h9W6SyRSwndLqD5-eVKBz4ok,4512 +transformers/models/layoutlmv3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/configuration_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/feature_extraction_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/image_processing_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_tf_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/processing_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3_fast.cpython-310.pyc,, +transformers/models/layoutlmv3/configuration_layoutlmv3.py,sha256=FoAmr5ghMAnT6P3XS6MkQ5vNJf5Pr0mUtUZPB3HexSI,13307 +transformers/models/layoutlmv3/feature_extraction_layoutlmv3.py,sha256=jWsmsi2mym0meek1lHWqfqxlJgMJdY3cgfQ_4ASEbto,1195 +transformers/models/layoutlmv3/image_processing_layoutlmv3.py,sha256=3zmcx39HvcXzHJeI70U0Jo2e6fkpUUorArlXlDHX-ow,18813 +transformers/models/layoutlmv3/modeling_layoutlmv3.py,sha256=JgAXoTWAzjaTuqd2tT9pD_0VVO6dPbcdfakOjgE3dCM,59813 +transformers/models/layoutlmv3/modeling_tf_layoutlmv3.py,sha256=hLD25aX1LQWwXoKO3DqLtzjfBpSx9wzWyKJS6CxyMfI,76785 +transformers/models/layoutlmv3/processing_layoutlmv3.py,sha256=ShtvBmZjGHbprdB14v2QsIgVir-74gEnTGHzvL31vCI,9143 +transformers/models/layoutlmv3/tokenization_layoutlmv3.py,sha256=UrCNLze0Z_6WX5KJ-qYxaAfEzOZfEofjx2Ky5hCiC_8,72055 +transformers/models/layoutlmv3/tokenization_layoutlmv3_fast.py,sha256=36lBDAt8h1qmAI2ACWUODe2Umcn4RWxSzl2MBmPtlnQ,39532 +transformers/models/layoutxlm/__init__.py,sha256=AIvjzuqRPFXFuWXxnOlp9pBXaIT5Zzx7fwtg2KKVETA,2037 +transformers/models/layoutxlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/processing_layoutxlm.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm_fast.cpython-310.pyc,, +transformers/models/layoutxlm/processing_layoutxlm.py,sha256=2xtffeErPXtu2tW_ya4YaHDoqWCljDPfoL2V1Jlo6JI,9242 +transformers/models/layoutxlm/tokenization_layoutxlm.py,sha256=FxqSo2IeMoeE9xwdVDWWClbFd4hvfwmSg6TrgH_kvDI,57299 +transformers/models/layoutxlm/tokenization_layoutxlm_fast.py,sha256=VPYbrW0yGx5dh9lMG1_6prxANqXcyfFHgK7MSuaSYGI,39769 +transformers/models/led/__init__.py,sha256=9CdjSo8a3H8LyFlzOxCmUUZG2icbvPJ_Q_hFcaKBf4E,3008 +transformers/models/led/__pycache__/__init__.cpython-310.pyc,, +transformers/models/led/__pycache__/configuration_led.cpython-310.pyc,, +transformers/models/led/__pycache__/modeling_led.cpython-310.pyc,, +transformers/models/led/__pycache__/modeling_tf_led.cpython-310.pyc,, +transformers/models/led/__pycache__/tokenization_led.cpython-310.pyc,, +transformers/models/led/__pycache__/tokenization_led_fast.cpython-310.pyc,, +transformers/models/led/configuration_led.py,sha256=5fYJMjSlrvlFor5N0cjDtoLRUNDt0U7orwavdu3NLaM,7515 +transformers/models/led/modeling_led.py,sha256=6ImPuU-K8flv0DX-jL5CuNc10WCOr78ie78cpbMUgUg,139153 +transformers/models/led/modeling_tf_led.py,sha256=drHWpT50oyMc1gLh2bNwE75K-IzP6-NYW5dj5QS5LAs,123072 +transformers/models/led/tokenization_led.py,sha256=H2B8JdOoxg6O0a_ul477ToPDDBmoUfiPGsg7zwFfe7U,19752 +transformers/models/led/tokenization_led_fast.py,sha256=oaeDl37DL_c_RzX1XbplbdQLT5F85m_cprAQ_8QQRHc,14542 +transformers/models/levit/__init__.py,sha256=bn2rphZqhhv59V7XPWBSS3nntAk8n8qi8o9uhqmi2do,2508 +transformers/models/levit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/levit/__pycache__/configuration_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/convert_levit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/levit/__pycache__/feature_extraction_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/image_processing_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/modeling_levit.cpython-310.pyc,, +transformers/models/levit/configuration_levit.py,sha256=xSTUIJE8OmQ0vGSTKVT6sGmvi5AfzUww45Au5ZdsDf8,5814 +transformers/models/levit/convert_levit_timm_to_pytorch.py,sha256=HKjk4WPa6DO_2CM0Qy9R3mAEOdbf71DtS-T4uqoQJ9I,6258 +transformers/models/levit/feature_extraction_levit.py,sha256=l2RHbrbg9MzRqKr_ErOo_AuiSv93Gj-Oq6w0v2p-Izw,1204 +transformers/models/levit/image_processing_levit.py,sha256=CD7HBX2SVeEV9eF6E3hvh6-Y051LSjvpkjy4Y8QUO3Q,17058 +transformers/models/levit/modeling_levit.py,sha256=NHsFjmJdDmT6x9_-6EeAOwsgXRl4PeADDPNjOkwpOCk,29416 +transformers/models/lilt/__init__.py,sha256=bIm8VAW84HA1oTl3ZITLrjMZ9VIyJ4s6_x9R9N767nM,1909 +transformers/models/lilt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/lilt/__pycache__/configuration_lilt.cpython-310.pyc,, +transformers/models/lilt/__pycache__/modeling_lilt.cpython-310.pyc,, +transformers/models/lilt/configuration_lilt.py,sha256=SNi4Rr4_hrCM4VUJZXbEtdPWG-qQg9L-dCeaaMqctDw,6791 +transformers/models/lilt/modeling_lilt.py,sha256=ZrwWsqOMK7PydbkrMIpH1-IK78U_VjkIKu-Y3eEjepE,52704 +transformers/models/llama/__init__.py,sha256=Jur2SZ5J29BTDTaoQfXv69e-ZpcX5NiKbzAP1DGV9-A,3349 +transformers/models/llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llama/__pycache__/configuration_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/convert_llama_weights_to_hf.cpython-310.pyc,, +transformers/models/llama/__pycache__/modeling_flax_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/modeling_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/tokenization_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/tokenization_llama_fast.cpython-310.pyc,, +transformers/models/llama/configuration_llama.py,sha256=_z3ZXQG4r1I8vFXjgLoG976t9jUxIleE39bRlY-PZKQ,9452 +transformers/models/llama/convert_llama_weights_to_hf.py,sha256=CC5jifkiq1F9LZZspnmW_G2TvGTaJ8o_KpjdmWq3uw8,14165 +transformers/models/llama/modeling_flax_llama.py,sha256=shWgZWAGG0NpsFldqkTNBpsylJHD4FBD9NGU_EmK3YE,30831 +transformers/models/llama/modeling_llama.py,sha256=tfs8mePhGNXub9Z4dK0pj7gyguwMqOjGaMLNY6D1tgk,73442 +transformers/models/llama/tokenization_llama.py,sha256=_V_1OSbNR-58lX4R3LT8iORA4wVPV1tH9hX-rBIHQ0s,22019 +transformers/models/llama/tokenization_llama_fast.py,sha256=ic94pHJ2-tx_YghLWM4iFrybBp-9lRlT7-TcTJPJy5w,13056 +transformers/models/llava/__init__.py,sha256=GJ1vhnHiwzzN27stoZkhMdatFwb0aAhIzxSi1KLckz0,1797 +transformers/models/llava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava/__pycache__/configuration_llava.cpython-310.pyc,, +transformers/models/llava/__pycache__/convert_llava_weights_to_hf.cpython-310.pyc,, +transformers/models/llava/__pycache__/modeling_llava.cpython-310.pyc,, +transformers/models/llava/__pycache__/processing_llava.cpython-310.pyc,, +transformers/models/llava/configuration_llava.py,sha256=Ai0mct3mlKEnri3jDDJU0-GjZFCIfnDQBhURpuCUdvM,6188 +transformers/models/llava/convert_llava_weights_to_hf.py,sha256=jqOHXrbRbkwXkpWF_elzKblom0oJgOKqA6r4C9ouCaA,5420 +transformers/models/llava/modeling_llava.py,sha256=q7fT2z9Bw4iLExJhUV5rkghwpV_2JxQGu2oXjBYeACs,29751 +transformers/models/llava/processing_llava.py,sha256=uWsMuJKQCs9cQTzC97CeMccF1vAdEi_5VfJOnqu0X3Q,7176 +transformers/models/llava_next/__init__.py,sha256=U1uTqs5hULnuuZQB6x8OBWUgZ4MmYwQ-BtaY9ph57ow,2363 +transformers/models/llava_next/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/configuration_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/convert_llava_next_weights_to_hf.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/image_processing_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/modeling_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/processing_llava_next.cpython-310.pyc,, +transformers/models/llava_next/configuration_llava_next.py,sha256=PJ4aqBe6XVOE3wZRQIC61noncYPER9FTfrVbMgymeX0,6220 +transformers/models/llava_next/convert_llava_next_weights_to_hf.py,sha256=wwsI9xSFffJ5xRRUJtZVD-omnhKfDjLVXSJPYuJwFYU,15760 +transformers/models/llava_next/image_processing_llava_next.py,sha256=gVsXdxFmPyRJJjXTwMLOV_KkeAGCoII-fpFLJEXUyu4,28939 +transformers/models/llava_next/modeling_llava_next.py,sha256=X9yLO1lCmm4Ra-2tI1HIAjIFtOA2EwURRUv1He0S-OI,36505 +transformers/models/llava_next/processing_llava_next.py,sha256=S518k2ob-SGkKUuFoBQkaKO9OIdj2VVDKJUb7sg1bpQ,7193 +transformers/models/longformer/__init__.py,sha256=mbx6LG2-PW5i_Ntq3kFn1MhnegTVAs0_ZOKAGeMi5ps,4196 +transformers/models/longformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/longformer/__pycache__/configuration_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/convert_longformer_original_pytorch_lightning_to_pytorch.cpython-310.pyc,, +transformers/models/longformer/__pycache__/modeling_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/modeling_tf_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer_fast.cpython-310.pyc,, +transformers/models/longformer/configuration_longformer.py,sha256=6z-sC3XxqwMgQU3xW1E-KGkdqvMc8QSWRSkWthHm3r4,8867 +transformers/models/longformer/convert_longformer_original_pytorch_lightning_to_pytorch.py,sha256=gKyYNmo8Of0j_h6x8JSHaYc6hTyzJFwWETi5KectvFM,3026 +transformers/models/longformer/modeling_longformer.py,sha256=EaavJ7dDbttkQp9sAzqMqvYKjhdQsVy6IBPChWDy5PU,113970 +transformers/models/longformer/modeling_tf_longformer.py,sha256=5nwjvFoxbJVBHjfCUyHQ59LRamKmTMTQR2Te7-gtgws,129450 +transformers/models/longformer/tokenization_longformer.py,sha256=t64oV7_8WJMD8b2uRr1SH9OaBPddZXBxAKsWbHpGCrk,16797 +transformers/models/longformer/tokenization_longformer_fast.py,sha256=NgQuhW2aHQRkRP59XONS8rfzqQ7rk2Pw3GTh611bX40,11671 +transformers/models/longt5/__init__.py,sha256=nN2BIwcwmdcMffrxzPKx9oeVWsHu9wt1BUJYIPWfm3Y,2546 +transformers/models/longt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/longt5/__pycache__/configuration_longt5.cpython-310.pyc,, +transformers/models/longt5/__pycache__/convert_longt5x_checkpoint_to_flax.cpython-310.pyc,, +transformers/models/longt5/__pycache__/modeling_flax_longt5.cpython-310.pyc,, +transformers/models/longt5/__pycache__/modeling_longt5.cpython-310.pyc,, +transformers/models/longt5/configuration_longt5.py,sha256=dh8qcgIEBuAzNxJ3OkxtdjEXd_DMBCMzHwZSGgX9zG8,8097 +transformers/models/longt5/convert_longt5x_checkpoint_to_flax.py,sha256=5LQpQWNG_8Fc0tU62eYf66RmJzUcb-RynDdrvziZEqw,11089 +transformers/models/longt5/modeling_flax_longt5.py,sha256=TBgoH7wMBAGNMilDvmg1U-394Z7ImK55Tm4saS-0CVs,105672 +transformers/models/longt5/modeling_longt5.py,sha256=jiAvt1CJlQYFikiNJgtglTs5DthqT4emKox8OPedTLM,106035 +transformers/models/luke/__init__.py,sha256=xuqWDYOtcrf1vEC71vfltl8ICWfW7GyU9sP8RWD-iU4,2383 +transformers/models/luke/__pycache__/__init__.cpython-310.pyc,, +transformers/models/luke/__pycache__/configuration_luke.cpython-310.pyc,, +transformers/models/luke/__pycache__/convert_luke_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/luke/__pycache__/modeling_luke.cpython-310.pyc,, +transformers/models/luke/__pycache__/tokenization_luke.cpython-310.pyc,, +transformers/models/luke/configuration_luke.py,sha256=tRGFoUmMOp8GoCAcweTSWbgVaAPr5aSlRfJexo6G3TI,6690 +transformers/models/luke/convert_luke_original_pytorch_checkpoint_to_pytorch.py,sha256=pfnDfBvJDRyCLBLdcsalZaKV01aEz0W1og2Z364hTDs,7467 +transformers/models/luke/modeling_luke.py,sha256=Dpwj3EnVnaH5B4ZCinHGxOsFmKacRi0uPjrEWnDtP34,103858 +transformers/models/luke/tokenization_luke.py,sha256=8r_ZbX0F719Ob5MTWgQZvT9TetdRDN0aAdY4LUcGhQg,84406 +transformers/models/lxmert/__init__.py,sha256=3rn46z5WOBmOrbr6e7zoIWh4F8Bf3hFBASDY0vxlxbI,3396 +transformers/models/lxmert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/configuration_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/convert_lxmert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/modeling_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/modeling_tf_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert_fast.cpython-310.pyc,, +transformers/models/lxmert/configuration_lxmert.py,sha256=ySHIfLgH1ZfM3wZ6yO8fOTm8naotfRarY5d-R1ZVWMs,9005 +transformers/models/lxmert/convert_lxmert_original_tf_checkpoint_to_pytorch.py,sha256=T3vqC76pis49OXeHODsBSBBDGDe6qnUBckwGOWySmpc,2109 +transformers/models/lxmert/modeling_lxmert.py,sha256=WE_v6Z1In05mTqEpRHxQzXh5lrNVvgXeYCfcgghOn1c,64958 +transformers/models/lxmert/modeling_tf_lxmert.py,sha256=F7SS0dxVRlR991pfUzwhoxpFABdRsB4tqp2CeTjhUW8,72721 +transformers/models/lxmert/tokenization_lxmert.py,sha256=0Se6v8_PKzz7B5twxG-as9qvq_VFNgd-PFU7WhJmDeM,20966 +transformers/models/lxmert/tokenization_lxmert_fast.py,sha256=-mHMI4WCLJoXt8nYvJRibyOYfJKByPW_a6nQV_L11PM,7720 +transformers/models/m2m_100/__init__.py,sha256=fT84ZTHmw2vMrme8MqfSoPZWSECY-SLXDG0AR8Z1qRc,1992 +transformers/models/m2m_100/__pycache__/__init__.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/configuration_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/convert_m2m100_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/modeling_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/tokenization_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/configuration_m2m_100.py,sha256=BoZVjTfxXk0SOALC-ijDnoDUwWGFtEHxLxLIll3kNLw,13461 +transformers/models/m2m_100/convert_m2m100_original_checkpoint_to_pytorch.py,sha256=xNG8NE20odOve8Z1zKPDHJr5Ev8jM30N-mJsJqfsXtM,3159 +transformers/models/m2m_100/modeling_m2m_100.py,sha256=5YQfx9TwuRJG1RPNb_nlFFFvj2J-MN9RNt7UGrIxlLI,75015 +transformers/models/m2m_100/tokenization_m2m_100.py,sha256=2XlY9d4-Nv92SaVyM3VC1Linw0jTLwGcpsL7jyG7R9Y,16320 +transformers/models/mamba/__init__.py,sha256=xLSIqiYCZgZDg4J4rpsc-olAcskXsCL0Ckh6CA_Prvw,1798 +transformers/models/mamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mamba/__pycache__/configuration_mamba.cpython-310.pyc,, +transformers/models/mamba/__pycache__/convert_mamba_ssm_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mamba/__pycache__/modeling_mamba.cpython-310.pyc,, +transformers/models/mamba/configuration_mamba.py,sha256=kzPRNKGFg0mLB6d7L2LX9mjuj8vHA5nVY-eoC5W7Nx4,7076 +transformers/models/mamba/convert_mamba_ssm_checkpoint_to_pytorch.py,sha256=BK6M1tYEwLwoM7NE3fdm0BRpZH8LvCTGvdvsee9taZA,6454 +transformers/models/mamba/modeling_mamba.py,sha256=QwPCMLFiG9h47aCII6SeAZrGML8z-cxNEVikk0DcacU,32605 +transformers/models/marian/__init__.py,sha256=_aQPsVh7jA_BTVbCkRprc2NmnLlkhfEtfJW_1WIwUqI,3444 +transformers/models/marian/__pycache__/__init__.cpython-310.pyc,, +transformers/models/marian/__pycache__/configuration_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/convert_marian_tatoeba_to_pytorch.cpython-310.pyc,, +transformers/models/marian/__pycache__/convert_marian_to_pytorch.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_flax_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_tf_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/tokenization_marian.cpython-310.pyc,, +transformers/models/marian/configuration_marian.py,sha256=dYTlIdcilG1GNacIz1acKC6bRv4jlZYN9--gNR6wsO4,18328 +transformers/models/marian/convert_marian_tatoeba_to_pytorch.py,sha256=N_YEEFgsGy2W-4QxeGD3bIIGNl_oYv64GkTw0WDpiaU,36254 +transformers/models/marian/convert_marian_to_pytorch.py,sha256=lggn1nlv2EBgLarnYE_SKkUnDPKDgngL_xOtBJxQIgY,26775 +transformers/models/marian/modeling_flax_marian.py,sha256=vt7iI4WBYOAhz36UqJcXPIUu5q8U6xY-wwAphjOQsco,64262 +transformers/models/marian/modeling_marian.py,sha256=ci45qB1A1IYKTfLOmGjnZjLCtkYg-Vlr_VXEkYbCmmw,82080 +transformers/models/marian/modeling_tf_marian.py,sha256=IEwr-j8xPUbuYNBN6mKzYmLyK0FrmbMVGoXRo4C944w,72682 +transformers/models/marian/tokenization_marian.py,sha256=wo7Hy2uRzFP_hX84DhoZTvWUeXtWhqG5-BxmcFAsg_0,16812 +transformers/models/markuplm/__init__.py,sha256=RjQ4xza9uhSlHJ11ZIHA19o-cWoC88fJvts8zYDOznY,2806 +transformers/models/markuplm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/configuration_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/feature_extraction_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/modeling_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/processing_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm_fast.cpython-310.pyc,, +transformers/models/markuplm/configuration_markuplm.py,sha256=dGjiJGaLZ2k3eqLqp93LWrfCFjB57uR_a47UkmtYf5A,7412 +transformers/models/markuplm/feature_extraction_markuplm.py,sha256=3V8MR36mQskKYQeaGrWuqWo9w5JG67nhRvxzWu7fR9s,6404 +transformers/models/markuplm/modeling_markuplm.py,sha256=qxIDO2S2FxWvrMvraf_YT7owmxQykvjZEdTEm93DS1I,58222 +transformers/models/markuplm/processing_markuplm.py,sha256=dCxh-u2OQvsoAeK0GWGDwMgZuLIgF7tu5Q7uERx5NwY,6348 +transformers/models/markuplm/tokenization_markuplm.py,sha256=qVpE_pPBwLioGbNHM2-uqoNiwFCoDACSCaExhZ0MvDc,68972 +transformers/models/markuplm/tokenization_markuplm_fast.py,sha256=v75Kirn2W7tCxDT-AULtW9G3Ew_53mg0HiHFhYYUFys,42939 +transformers/models/mask2former/__init__.py,sha256=_damTN4svyRG1tenZi3AEmsILg7QVyYbuWR_iXzrbXw,2357 +transformers/models/mask2former/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/configuration_mask2former.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/convert_mask2former_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/image_processing_mask2former.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/modeling_mask2former.cpython-310.pyc,, +transformers/models/mask2former/configuration_mask2former.py,sha256=kIO8nw1sWYX8CR0slZaoIlz458ffD9BCAzule31RMcM,12545 +transformers/models/mask2former/convert_mask2former_original_pytorch_checkpoint_to_pytorch.py,sha256=v4a-VTdnEHxZLAykOn5AgqLXZ9yFZzhY4CUu4c3XHUE,45688 +transformers/models/mask2former/image_processing_mask2former.py,sha256=Vj7p448RldI_FjAUSpj0UrptYIkBco7zTE7aErwwhkM,56953 +transformers/models/mask2former/modeling_mask2former.py,sha256=fPaM7RF73nS62864624WGE64DyQmcHdBKJv2Zn9ybVM,121194 +transformers/models/maskformer/__init__.py,sha256=Sy9sX8-Vb9Gnn9gjU34M4pDh3jJZd7vmr5aorB9N5lw,2945 +transformers/models/maskformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer_swin.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_swin_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/feature_extraction_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/image_processing_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer_swin.cpython-310.pyc,, +transformers/models/maskformer/configuration_maskformer.py,sha256=O5fbg-DYPkvs2HZ0M2OvU21o4RyyTxmX7vzA5SWwIwg,10463 +transformers/models/maskformer/configuration_maskformer_swin.py,sha256=kdf9AuIG8DYqJPIsvz_2rlIqSEZJ3UBI0IXlFdao3YM,7217 +transformers/models/maskformer/convert_maskformer_original_pytorch_checkpoint_to_pytorch.py,sha256=CEKaBhurc8x3mvE7YMqfULIoybxq0Guj0hGHJouG5s8,32237 +transformers/models/maskformer/convert_maskformer_resnet_to_pytorch.py,sha256=iUMC5om4caBO1eSeivN3sZYsbEtYZAeJZE7I1NIygR4,20732 +transformers/models/maskformer/convert_maskformer_swin_to_pytorch.py,sha256=-GWvua0iYDbJYZ7VUcywp0rf-jR7iKXz8az9N4r5k_0,20321 +transformers/models/maskformer/feature_extraction_maskformer.py,sha256=MMPQuQY2EnK4vixDve-I-PIFqCDWQNYYeVdAYvIY8HY,1214 +transformers/models/maskformer/image_processing_maskformer.py,sha256=CjaNU-cO2SU0DWDgn6GVBBiSPYuz1nBazRsHVDxND_Y,58796 +transformers/models/maskformer/modeling_maskformer.py,sha256=AmyHQu9CJRAWtSWoiOOV_dovrCVXpL_jvlOBjP-Hh5U,94251 +transformers/models/maskformer/modeling_maskformer_swin.py,sha256=2DyRWtHLA077-GWY0Z2mngv62I0RpGVHKr3NhIJm3c8,40758 +transformers/models/mbart/__init__.py,sha256=N1NqaZU1QPNt3r2VI3y4sv-XwdBkAtV-41REYSah7w4,4403 +transformers/models/mbart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mbart/__pycache__/configuration_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/convert_mbart_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_flax_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_tf_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart_fast.cpython-310.pyc,, +transformers/models/mbart/configuration_mbart.py,sha256=lpHgLTu647Hk7bxJfCxIwXiAY4YUnzBkYXHEU_-f7GM,18162 +transformers/models/mbart/convert_mbart_original_checkpoint_to_pytorch.py,sha256=xVW9Mj-jd7X_MImJCgS52Aok1CGPf-E6u8ptvG1hK8o,3035 +transformers/models/mbart/modeling_flax_mbart.py,sha256=uUgTTL5zTGbJZX45q4YoPKiSbizfXNsx8jr-T7P2C_c,75090 +transformers/models/mbart/modeling_mbart.py,sha256=MrVWKcOvoqX1Oy4gGbdeELtsWNUJU3PtWCsHwVmtnas,100931 +transformers/models/mbart/modeling_tf_mbart.py,sha256=JsKe79VRjtf9p1SgbH8dnbQGUd5fe5CnYgGNijT-Mys,74195 +transformers/models/mbart/tokenization_mbart.py,sha256=cyxJpDRR-_GxBmUqaxwXzWC5SOmgvlSSIsDdtF8N8xo,14106 +transformers/models/mbart/tokenization_mbart_fast.py,sha256=1ieIvKkfDtKZe_hHOaZNbSt6fzVPylKoYOtNI3T6rpw,10997 +transformers/models/mbart50/__init__.py,sha256=5ekQCS9OkL3_5UJXnu7Z5cVeCi76pVgAxHkC8qQ8XKk,1847 +transformers/models/mbart50/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50.cpython-310.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50_fast.cpython-310.pyc,, +transformers/models/mbart50/tokenization_mbart50.py,sha256=INTdGnO_YBeB7mWdpBgkz8PH-prQOKd1dP92qbBsKDE,16307 +transformers/models/mbart50/tokenization_mbart50_fast.py,sha256=4XQPT5nXMLElCwfHfy4uTolWe2VmD1HcXdVJH0jQ3oA,11594 +transformers/models/mega/__init__.py,sha256=sJJLSLHF1HMGGOkDRFol40JHptUCxSDiB0yUUbvDVL4,2140 +transformers/models/mega/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mega/__pycache__/configuration_mega.cpython-310.pyc,, +transformers/models/mega/__pycache__/convert_mega_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mega/__pycache__/modeling_mega.cpython-310.pyc,, +transformers/models/mega/configuration_mega.py,sha256=R3GBbjk7bx8U3WwmQCEKvonPwq4sjZz09npQTkRaWKQ,12681 +transformers/models/mega/convert_mega_original_pytorch_checkpoint_to_pytorch.py,sha256=FK9gAgMB5VEO2Fji39w100ywUJ8wA8utdmWRZFanb2c,13154 +transformers/models/mega/modeling_mega.py,sha256=Og3W7ctai3HEB4nn_XKmKOOxyfwysFl8qx9Vbxz3mMI,109507 +transformers/models/megatron_bert/__init__.py,sha256=TUAneYZq0bKIQqKDcED_EuJhgnzOnWNrNrye_x8KX90,2506 +transformers/models/megatron_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/configuration_megatron_bert.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/convert_megatron_bert_checkpoint.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/modeling_megatron_bert.cpython-310.pyc,, +transformers/models/megatron_bert/configuration_megatron_bert.py,sha256=8g4wxCt4-bc5xnm0aoESAknTnH7XWOzDt2RqS088-mk,6572 +transformers/models/megatron_bert/convert_megatron_bert_checkpoint.py,sha256=VAMD1MFdVG8w9cQkRfmlZCEvaMgoo-lyFI9deunD5OA,13686 +transformers/models/megatron_bert/modeling_megatron_bert.py,sha256=LM8kNMgqBDEAqKpVpCFVomjUx8P-6sqBdi17tMHccsE,83326 +transformers/models/megatron_gpt2/__init__.py,sha256=WycFl9cUevoXIBhB76qKtnNRIPMk2LoTDkmkfAfOy9M,630 +transformers/models/megatron_gpt2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/megatron_gpt2/__pycache__/checkpoint_reshaping_and_interoperability.cpython-310.pyc,, +transformers/models/megatron_gpt2/__pycache__/convert_megatron_gpt2_checkpoint.cpython-310.pyc,, +transformers/models/megatron_gpt2/checkpoint_reshaping_and_interoperability.py,sha256=NPoWPPSaT29iHoGRoyc1B_hdc67QNoytsVj_glQF430,36692 +transformers/models/megatron_gpt2/convert_megatron_gpt2_checkpoint.py,sha256=UPLXCjF4Fixnw_gy6kzxTK64ioxo_EIxwSVO6oKCqqQ,13661 +transformers/models/mgp_str/__init__.py,sha256=YMCtFGSXL18Kh4Pm3KTBEgtxlaDDYwb3WnMFsEsaJ-4,2164 +transformers/models/mgp_str/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/configuration_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/modeling_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/processing_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/tokenization_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/configuration_mgp_str.py,sha256=mtWn6cdfdjRwPuVHK5mPjUhDV3EDk7gwDdAEeD4x6dk,5881 +transformers/models/mgp_str/modeling_mgp_str.py,sha256=VG8u9M4zIvKZxb2nUtU0Uycfmcxl9dN2STTMQId813Q,21997 +transformers/models/mgp_str/processing_mgp_str.py,sha256=dh1MJ17yNZdoorG_Mi31Q7waqTnyRock-s4c2k_g0DQ,9298 +transformers/models/mgp_str/tokenization_mgp_str.py,sha256=CIz9yrKh2VPsckVtYJ0pynFgPhwYY9XuyJasmKD9mKo,3776 +transformers/models/mistral/__init__.py,sha256=b9KtZaVe1auCaeEzoRC_zvykp9KwyW8vqNpww-3jgls,2428 +transformers/models/mistral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mistral/__pycache__/configuration_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/convert_mistral_weights_to_hf.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_flax_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_mistral.cpython-310.pyc,, +transformers/models/mistral/configuration_mistral.py,sha256=KUIqpzgcLT_7MJphnDYjLYtECaxPH7A_vTRLczcZeYU,6986 +transformers/models/mistral/convert_mistral_weights_to_hf.py,sha256=bG8KXwc1rd3kSd5IothmZGiDiOfhERfh3VrS6_wOaoM,10725 +transformers/models/mistral/modeling_flax_mistral.py,sha256=1xBy97GmBslNjfZZ580ZAfqrRGviVILi0QGf1qbxDPE,31682 +transformers/models/mistral/modeling_mistral.py,sha256=qQ-grOKA7tSrydpeAK3a12Hg6xNeDraWCXX-iNt74Hg,63538 +transformers/models/mixtral/__init__.py,sha256=gUOb9IB2p_2uISpGaLaKXTWW0-nWVa4INgiTZmO8guE,1806 +transformers/models/mixtral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/configuration_mixtral.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/convert_mixtral_weights_to_hf.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/modeling_mixtral.cpython-310.pyc,, +transformers/models/mixtral/configuration_mixtral.py,sha256=fDcKtS7nez_luNZqh2TUaWttVIfaeCg01JMKmS8KAwI,8204 +transformers/models/mixtral/convert_mixtral_weights_to_hf.py,sha256=WExicalIwkZccqWyRjUU2LBvbL6cM6yiOG_Oby6t3Ok,9156 +transformers/models/mixtral/modeling_mixtral.py,sha256=kMSCKxNqxYQ-5u0io2hBbIoLjuTAIbLU7OMqlVVKoBM,73646 +transformers/models/mluke/__init__.py,sha256=Pj0GBjIU6vYdhEzO7M8O35c5Jj4ivIIGAiLABhN4K7U,1356 +transformers/models/mluke/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mluke/__pycache__/convert_mluke_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mluke/__pycache__/tokenization_mluke.cpython-310.pyc,, +transformers/models/mluke/convert_mluke_original_pytorch_checkpoint_to_pytorch.py,sha256=G6Z94-1_AiilSTU96PSjX_pdgFIx-b_bk8xlMKX5TuE,10185 +transformers/models/mluke/tokenization_mluke.py,sha256=vhnToeQkpEz4-UrFY7bNvQTtY768HqYJIymkMr1LxnE,80833 +transformers/models/mobilebert/__init__.py,sha256=Gpd8kL6D0UrD5ufVg0MjcknSeHhtlLnD3Bkrzqao4Ok,4604 +transformers/models/mobilebert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/configuration_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/convert_mobilebert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/modeling_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/modeling_tf_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert_fast.cpython-310.pyc,, +transformers/models/mobilebert/configuration_mobilebert.py,sha256=8Qr1xG8QJdOOAU25gronQyBATzHUBWTjHqi4lQ28VMs,8319 +transformers/models/mobilebert/convert_mobilebert_original_tf_checkpoint_to_pytorch.py,sha256=MRW9sorswIo4RiWq7PVVmaZsYm4wJEc1-DhcLzssDRU,2200 +transformers/models/mobilebert/modeling_mobilebert.py,sha256=1hkXfYNB1gqnRPhN6_cyD3FC7JkYIpvXsGAFzQXQohE,70492 +transformers/models/mobilebert/modeling_tf_mobilebert.py,sha256=rSLG0FE5IsFJ_Oft8e7B34qgIY2LNs6rfO8zB-Nd97U,83824 +transformers/models/mobilebert/tokenization_mobilebert.py,sha256=yeRHMwhu-UZVVWhOS307ThYnDE_JvXXASObFN-hn1AM,20951 +transformers/models/mobilebert/tokenization_mobilebert_fast.py,sha256=Yp5FcPJNWWdqk41D6xDu44gN8OWd0jj104BdDbGaqdg,7798 +transformers/models/mobilenet_v1/__init__.py,sha256=rbZvH8u5nov7gMxVexJZTVa8yJSIwI4ZHilp8sTEw64,2735 +transformers/models/mobilenet_v1/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/configuration_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/convert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/feature_extraction_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/image_processing_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/modeling_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/configuration_mobilenet_v1.py,sha256=eDDU1UZCqr7jqsSLaZo4AEDs-Bp-k9OSFvhTUJszIaM,4976 +transformers/models/mobilenet_v1/convert_original_tf_checkpoint_to_pytorch.py,sha256=XjGgfnPQBWp-0pNakJ1CNU1YnoYfeXCZ9WSIrTf02n8,4932 +transformers/models/mobilenet_v1/feature_extraction_mobilenet_v1.py,sha256=goR0AC-IhWMrQlvzSK_0Zej42JYN-oswSGNQWnIOENU,1222 +transformers/models/mobilenet_v1/image_processing_mobilenet_v1.py,sha256=7cu5EhkSZEaw2acPGiFQ9Dthq775OjiDA1THH3O_Rec,15814 +transformers/models/mobilenet_v1/modeling_mobilenet_v1.py,sha256=5PS_5KEL5by6zSiwKiOuFvyd_OsjmRi0Aq6DKD945Dw,18673 +transformers/models/mobilenet_v2/__init__.py,sha256=p4OHu9O6JD4N2TcjOgLu7S2u151xEvGwvdHizbzevc0,2830 +transformers/models/mobilenet_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/configuration_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/convert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/feature_extraction_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/image_processing_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/modeling_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/configuration_mobilenet_v2.py,sha256=cVT4iIhjLWpvLkdcL7PFsw4TPNnBR8gAVUAIgOYfp9o,6872 +transformers/models/mobilenet_v2/convert_original_tf_checkpoint_to_pytorch.py,sha256=acsdT3rMMqCPV9whw2xyiVK1UOs8tr8ySvYRFNRmVWM,6402 +transformers/models/mobilenet_v2/feature_extraction_mobilenet_v2.py,sha256=_IUVvyoMBsqymCoh-CVmoswZ4nOBpqFJlaoUfD8WQ3E,1222 +transformers/models/mobilenet_v2/image_processing_mobilenet_v2.py,sha256=MebPYCgZFQzhQO6-ImjmUte7VEyVdE-NoOP9-16mnds,18168 +transformers/models/mobilenet_v2/modeling_mobilenet_v2.py,sha256=VfeECqbXCid4JFWHLrCJBXESCrSyAP_ohoITFv7MSS8,34578 +transformers/models/mobilevit/__init__.py,sha256=AN8UeJz0pDko_ezgS5J4cYAZT3P6Hv2EZKlqZGnkgSI,3492 +transformers/models/mobilevit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/configuration_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/convert_mlcvnets_to_pytorch.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/feature_extraction_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/image_processing_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/modeling_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/modeling_tf_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/configuration_mobilevit.py,sha256=Wuobe30bTBHctDCJfEScHIH5NRZRwc4dKigCO1LfwSs,7634 +transformers/models/mobilevit/convert_mlcvnets_to_pytorch.py,sha256=Ng8zzr_CxIO9IFcf0ijXqR_EWJeAhhQ3HAkethSpCn4,12402 +transformers/models/mobilevit/feature_extraction_mobilevit.py,sha256=na2H01bKIhQsyCHayPaVase5HRGRmmO7zVDDuY76Uj0,1207 +transformers/models/mobilevit/image_processing_mobilevit.py,sha256=4R2jNDd2WCJLkHoY7Tcw2vTxNK-DHtZXo5EBcVz4_CE,21926 +transformers/models/mobilevit/modeling_mobilevit.py,sha256=TLDir_hBJQmzX0umT8gKY1oDWCjHpW79p2bwNQUDvrA,39916 +transformers/models/mobilevit/modeling_tf_mobilevit.py,sha256=G2JHw9KRbgsU-WTJZMnITp07v82UMA3brVxdaVUKI4E,54787 +transformers/models/mobilevitv2/__init__.py,sha256=kSj85QHMKZk8_MdSUYKIsFL6V8SCAJWQlzo1hlvlYw8,2111 +transformers/models/mobilevitv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/configuration_mobilevitv2.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/convert_mlcvnets_to_pytorch.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/modeling_mobilevitv2.cpython-310.pyc,, +transformers/models/mobilevitv2/configuration_mobilevitv2.py,sha256=JbYqOFvezBa32_Vj7XAGOkpykCr5lrEDr8yn5aIjkWs,7195 +transformers/models/mobilevitv2/convert_mlcvnets_to_pytorch.py,sha256=ZzEtog7BRgGK8W0zwC_peXQOOaBkuduPO3Tbq9_xtjo,12557 +transformers/models/mobilevitv2/modeling_mobilevitv2.py,sha256=Zh4ZyIiqnNI8gbiHlpqMk2pmjHenwTYBl2A4Ye3cp5c,38291 +transformers/models/mpnet/__init__.py,sha256=hyB4jNWDdoHWggavnqLZEF85f9a11vXSTKaLWTdPh-k,3875 +transformers/models/mpnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/configuration_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/modeling_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/modeling_tf_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet_fast.cpython-310.pyc,, +transformers/models/mpnet/configuration_mpnet.py,sha256=Nte9C0DNs8c5e5MNNSzHSrDHtlTgXSC8vNxG-Av1MOI,5397 +transformers/models/mpnet/modeling_mpnet.py,sha256=l9HpxRanbLMN-d1CwcY1QuQmybrsGyTglFuczRc3z7A,42584 +transformers/models/mpnet/modeling_tf_mpnet.py,sha256=SqFduEwKKdXhBc7Xuq5vkGS5pncpXtG6-wkJd95gH30,55565 +transformers/models/mpnet/tokenization_mpnet.py,sha256=rcUWsm6ts7ICY0V3R4edO3SEtrStuERIkOz1-T8C8JI,22126 +transformers/models/mpnet/tokenization_mpnet_fast.py,sha256=DUx1wGlRfhxppFPHfvT68EFY9MBLdcyzhBebBdcT5lQ,9158 +transformers/models/mpt/__init__.py,sha256=ZH7_XPJ100kSo0osi0XxzbkyFHj6HnS9ghjxpsqVXac,1977 +transformers/models/mpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mpt/__pycache__/configuration_mpt.cpython-310.pyc,, +transformers/models/mpt/__pycache__/modeling_mpt.cpython-310.pyc,, +transformers/models/mpt/configuration_mpt.py,sha256=ya7QViklVzl49I67jUR1CsJjITpJWlf2YxERvDr0oCA,11328 +transformers/models/mpt/modeling_mpt.py,sha256=cA-7cCbE_8pM6O6mlnAmD31QUdIlJONXmOzFEIgZrc0,40784 +transformers/models/mra/__init__.py,sha256=CotdFTXkFtz90MDv55my886vc-0VBxs8h3mnGs-z7WQ,2254 +transformers/models/mra/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mra/__pycache__/configuration_mra.cpython-310.pyc,, +transformers/models/mra/__pycache__/convert_mra_pytorch_to_pytorch.cpython-310.pyc,, +transformers/models/mra/__pycache__/modeling_mra.cpython-310.pyc,, +transformers/models/mra/configuration_mra.py,sha256=OKxqjOaXJHU1_Ly2Dv0CvOUtk93HnKYim6U2v6-Qkrw,6606 +transformers/models/mra/convert_mra_pytorch_to_pytorch.py,sha256=LhaVlQ4q88gtewg-geRYZ748xQ3brLLhyDIo-OGWSdI,4247 +transformers/models/mra/modeling_mra.py,sha256=5EBeNs9HhkLV6bt28XZmmDB1DHetneADa7ipxVFZa8E,61949 +transformers/models/mt5/__init__.py,sha256=q5f0AWvlyU1eQjk0OXCpMZ4OM3qNDq35Pv6RuxrWQeI,3597 +transformers/models/mt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mt5/__pycache__/configuration_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_flax_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_tf_mt5.cpython-310.pyc,, +transformers/models/mt5/configuration_mt5.py,sha256=3G5sz5XV_HSRRV4pCDbPhbKlIdJgKxq7Yd6fcisPvXQ,7900 +transformers/models/mt5/modeling_flax_mt5.py,sha256=1p8D9st-unpG0rcRGDrUQG__3GIFa77Wst8cYgOGVng,4243 +transformers/models/mt5/modeling_mt5.py,sha256=CfKL55AKbeodV1K78F1gyJT4RdNfWmwY1DxNcj9WsbI,113084 +transformers/models/mt5/modeling_tf_mt5.py,sha256=9Stq04drvy7iyZaptOzmDAWsUzXsKoTFTNsvCjceq_E,3326 +transformers/models/musicgen/__init__.py,sha256=EY9dwTvFbwcUcdSclI-kp8xvRO24giI4UJMAmiOWIr0,2099 +transformers/models/musicgen/__pycache__/__init__.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/configuration_musicgen.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/convert_musicgen_transformers.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/modeling_musicgen.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/processing_musicgen.cpython-310.pyc,, +transformers/models/musicgen/configuration_musicgen.py,sha256=HGXJukbgs6Kv_-Tic1aPV9PdCFuRB53x5pI_UdQigbI,11485 +transformers/models/musicgen/convert_musicgen_transformers.py,sha256=F-F2BnXZYxNcRjxFDs6OjL1Zy1VxKXVtbHY2dZKXuPY,9397 +transformers/models/musicgen/modeling_musicgen.py,sha256=fpRO52LgAwSIcTu7VOiDEpuWHNu3goz3uUY5xy_-7-A,144184 +transformers/models/musicgen/processing_musicgen.py,sha256=wJE7gvyKPFVyMj5O_pD1Tg1BCC3RizsRIyHo_eV4_os,5666 +transformers/models/musicgen_melody/__init__.py,sha256=juLVRBOSmHDQx5sK1_EOJwdsEVlAMeLeGsNoMWBvuN8,2822 +transformers/models/musicgen_melody/__pycache__/__init__.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/configuration_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/convert_musicgen_melody_transformers.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/feature_extraction_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/modeling_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/processing_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/configuration_musicgen_melody.py,sha256=Brm08IlkCxkcvX1SwAApwVNUwoA0FKTOpa3QKd2l3rE,12596 +transformers/models/musicgen_melody/convert_musicgen_melody_transformers.py,sha256=xH9oSDc7IibPUzBRVy-Ej49ahmPirUKS65zJGDv8eso,11355 +transformers/models/musicgen_melody/feature_extraction_musicgen_melody.py,sha256=XC80TogbFCW4uoyqrQmYyzji_oQMaPZu8eXKYvo5zTU,15226 +transformers/models/musicgen_melody/modeling_musicgen_melody.py,sha256=fB8MJdxl5sCF7gx5pnniPr-dJw6aiDaDeKVo1akqGJg,139123 +transformers/models/musicgen_melody/processing_musicgen_melody.py,sha256=4DbgucxyP7S7l0ndOkLnQzYgT6oaSLF1_KERckJYBEs,8633 +transformers/models/mvp/__init__.py,sha256=w3eswhHeLn9gayC1Cl8kfkkMGtD036aJeZF2541NmqM,2536 +transformers/models/mvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mvp/__pycache__/configuration_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/modeling_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp_fast.cpython-310.pyc,, +transformers/models/mvp/configuration_mvp.py,sha256=ojj9wCM4voL2IzXUIGCfV1IazatycYCfboTx1OiFL7g,8409 +transformers/models/mvp/modeling_mvp.py,sha256=g03ncnRytUZNOGrb1pdX3vUW0f_G6USCuqWjALlWXcQ,92806 +transformers/models/mvp/tokenization_mvp.py,sha256=JlR6l5XT5U4eU_20FRb69tmEXvexJa1d88moJ3jxj3E,16192 +transformers/models/mvp/tokenization_mvp_fast.py,sha256=Pa8ZaGtDrfhrWLnB9FPsO2OGU131E1l5HEAS25Nv6bc,12268 +transformers/models/nat/__init__.py,sha256=YY8yjsIBbTC1eZRAnR4_p_gHQ3n4JyywB2G1JQuM4AQ,1776 +transformers/models/nat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nat/__pycache__/configuration_nat.cpython-310.pyc,, +transformers/models/nat/__pycache__/modeling_nat.cpython-310.pyc,, +transformers/models/nat/configuration_nat.py,sha256=WEKNfKWwVlH-SR92Jvr4J3JJocqRukbIwWBlKkzS2EE,7068 +transformers/models/nat/modeling_nat.py,sha256=GXRWTDUeICyQKG2PS6w6GV_wC-tsBdeElbJZ0ACndIQ,39963 +transformers/models/nezha/__init__.py,sha256=ae3hJzlO_gAa20enOImKo15phpgIXk2_Zt8tVLAY3MU,2233 +transformers/models/nezha/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nezha/__pycache__/configuration_nezha.cpython-310.pyc,, +transformers/models/nezha/__pycache__/modeling_nezha.cpython-310.pyc,, +transformers/models/nezha/configuration_nezha.py,sha256=sGon9On54UmX--XHnq_XJO6nn99Q5XKP4kbXJnMD234,4911 +transformers/models/nezha/modeling_nezha.py,sha256=lAnbauROwjhz8d_b9TvYB6f0ENV-gTBdQVHxVe-ShXs,74706 +transformers/models/nllb/__init__.py,sha256=tM7_FdmE7zOQm68GoRQiRt1jbYfPea9kC24QJSSMgIE,1868 +transformers/models/nllb/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb.cpython-310.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb_fast.cpython-310.pyc,, +transformers/models/nllb/tokenization_nllb.py,sha256=eDOOWSyY_i2El1iJLeyIyw_L_6R-uTsnwqqBRvDBGGk,21177 +transformers/models/nllb/tokenization_nllb_fast.py,sha256=PrDf5r9NPXtZvV2i1DSqPr9IsEvU4cbSmSVdV4xE20A,16417 +transformers/models/nllb_moe/__init__.py,sha256=ULdz8wrqlqfamWMIQpjmmkPJPPznr34f2JxkYkqquCQ,1978 +transformers/models/nllb_moe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/configuration_nllb_moe.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/convert_nllb_moe_sharded_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/modeling_nllb_moe.cpython-310.pyc,, +transformers/models/nllb_moe/configuration_nllb_moe.py,sha256=wguMH_tP2_CxQ1L6lt79MhspBlQ8qtcY4UOEcVyqv4U,11268 +transformers/models/nllb_moe/convert_nllb_moe_sharded_original_checkpoint_to_pytorch.py,sha256=c9Zab9qVzNESk0U2exJNaoDwUQo_Q7ZpcZHViZjqTQQ,6477 +transformers/models/nllb_moe/modeling_nllb_moe.py,sha256=ZbkFDhVGw9jFn48JuNcNDPTSpCJnqyNQZ6vmKx1T5XQ,85158 +transformers/models/nougat/__init__.py,sha256=2cSw40yf-T81USela2GvWs-NSXWHkOa6zJ_3BO7QSCY,1914 +transformers/models/nougat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nougat/__pycache__/convert_nougat_to_hf.cpython-310.pyc,, +transformers/models/nougat/__pycache__/image_processing_nougat.cpython-310.pyc,, +transformers/models/nougat/__pycache__/processing_nougat.cpython-310.pyc,, +transformers/models/nougat/__pycache__/tokenization_nougat_fast.cpython-310.pyc,, +transformers/models/nougat/convert_nougat_to_hf.py,sha256=S6wb6SK-46EHmBvoNSu8n-C1RgbOwzL7XBtCSmTHLrM,10941 +transformers/models/nougat/image_processing_nougat.py,sha256=AfDySnr8HCJcNiMRLP8WM1Nl7d6ey7RFbLtd6bho2ts,24253 +transformers/models/nougat/processing_nougat.py,sha256=65OZ7-XvFeiEwFjEi69ZDY931w6NvHTHGo9EixCVxKU,6731 +transformers/models/nougat/tokenization_nougat_fast.py,sha256=6RGSauQ6VpkBv0PPvpyrJ_-Qhc3K1N8WlJ-WGgPr_s0,24702 +transformers/models/nystromformer/__init__.py,sha256=80Fr1KQ5iZtS-bmWIrqfo26_Yp43SbHRv_YSloD2J4I,2337 +transformers/models/nystromformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/configuration_nystromformer.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/convert_nystromformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/modeling_nystromformer.cpython-310.pyc,, +transformers/models/nystromformer/configuration_nystromformer.py,sha256=kdFsYx-kvXoKCrgT4gpuAMaNOxsHXc1oDHWCUuplkDw,6472 +transformers/models/nystromformer/convert_nystromformer_original_pytorch_checkpoint_to_pytorch.py,sha256=8K5IGFosME-LAljFLuTc09oce1IwxZDcxw1KPHsamqc,4197 +transformers/models/nystromformer/modeling_nystromformer.py,sha256=tcVoT_hFFbc6kb496I4aOvQvoTCrA8hmU8E5czgqa9A,48742 +transformers/models/olmo/__init__.py,sha256=FEnfRTkA2TAavu2m_O1LssOT6zevDYs0035_E5i5-t8,1732 +transformers/models/olmo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/olmo/__pycache__/configuration_olmo.cpython-310.pyc,, +transformers/models/olmo/__pycache__/convert_olmo_weights_to_hf.cpython-310.pyc,, +transformers/models/olmo/__pycache__/modeling_olmo.cpython-310.pyc,, +transformers/models/olmo/configuration_olmo.py,sha256=RcmB0knGCKxFQFDH5Gh2ypu45Gb7yiovCPSLw-K-EH4,9005 +transformers/models/olmo/convert_olmo_weights_to_hf.py,sha256=SI91Kn_B_m0oel2kuJ2LUMGqfaNZL4Q4sT2ydqNYZlE,9413 +transformers/models/olmo/modeling_olmo.py,sha256=i6cpv1QYrXvoU_BkCczyE3TgUn_aQN0xFJR8MI4aYZM,62608 +transformers/models/oneformer/__init__.py,sha256=mhWiuUMUOFF1ba9KLNdNJYPYScCLxlZ61WiyO995jjo,2402 +transformers/models/oneformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/configuration_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/convert_to_hf_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/image_processing_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/processing_oneformer.cpython-310.pyc,, +transformers/models/oneformer/configuration_oneformer.py,sha256=9JvCU8bi_KcdFMbWzvrhztW06x10xmTSwLqPzyCME8w,13639 +transformers/models/oneformer/convert_to_hf_oneformer.py,sha256=yBWS0SE1sGS9UqCzX2EdbhAiIWvBCumSBwutJ8VQFF4,50691 +transformers/models/oneformer/image_processing_oneformer.py,sha256=mwnXNsryPyA2Vc5IyxhPZGKV907ygpCEWAD1TCz_8c4,61446 +transformers/models/oneformer/modeling_oneformer.py,sha256=VhdI5egJWGD24nKHzh6BZOwCdt4W1_IyZljAUVYy4D0,143634 +transformers/models/oneformer/processing_oneformer.py,sha256=ahso8fGMLGb078QfY8T5o1bDj5OaptoMbIxiTIJGM7c,9377 +transformers/models/openai/__init__.py,sha256=5Y0BYw7AWmCFdxKdBMd4-wTi9wj6-8lX7Ii1WvFlfA8,3658 +transformers/models/openai/__pycache__/__init__.cpython-310.pyc,, +transformers/models/openai/__pycache__/configuration_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/convert_openai_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/openai/__pycache__/modeling_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/modeling_tf_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/tokenization_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/tokenization_openai_fast.cpython-310.pyc,, +transformers/models/openai/configuration_openai.py,sha256=K4R5f-M6amdHH9UkD4QKj_CRS1Liv-237nLhFFzX0ho,7180 +transformers/models/openai/convert_openai_original_tf_checkpoint_to_pytorch.py,sha256=nAomaHvwIi5gFuedK1WtT61GCu5tBxLE5zj6bY-fjGo,2666 +transformers/models/openai/modeling_openai.py,sha256=pGf2wwZOugId3hdeccKHrGhQjemKSgnd18meWURAY18,38348 +transformers/models/openai/modeling_tf_openai.py,sha256=ay7nG6vZa9WUpyIHfnywScXat8KzLxpYngFq5uE8vQw,41157 +transformers/models/openai/tokenization_openai.py,sha256=atE_RCEqT2cAW4F_w3bjgol1hrkRZW23DlclRpI-PMI,15161 +transformers/models/openai/tokenization_openai_fast.py,sha256=f-Q8i5p_-QqKcwn4FAz-7Mso5rrAJ5ea3l9qCA237q0,2522 +transformers/models/opt/__init__.py,sha256=MQ8MhQamtoySbkT8WbqZ48mMUxp5Ae_UGX2Sl3HKPEc,2977 +transformers/models/opt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/opt/__pycache__/configuration_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/convert_opt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_flax_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_tf_opt.cpython-310.pyc,, +transformers/models/opt/configuration_opt.py,sha256=szA8kNmDaySrS0-dKc9xnkjsj93n3oEr06jGUnflxWE,6660 +transformers/models/opt/convert_opt_original_pytorch_checkpoint_to_pytorch.py,sha256=7dHR6Tk9BBuFMEmHOxbu0jDf-gOnYFPsPLLH6SsA1gI,3858 +transformers/models/opt/modeling_flax_opt.py,sha256=MHJpXRbl4u1JcgWkV58DmS6n0wEOTYpZBeOJQFzdBT0,31541 +transformers/models/opt/modeling_opt.py,sha256=v3orKDmKUqV9UzYrRdOWepHWbepaINPo4tIluNocWx4,67821 +transformers/models/opt/modeling_tf_opt.py,sha256=SoVD0Dmrgak3O6SH2Qtlgn_2LFgfmHMM5hhAibKcVBI,49554 +transformers/models/owlv2/__init__.py,sha256=fvzKBoWfoB8-9hZKeId1Qvy3p_N9PLgsGoXzrg-fBzI,2606 +transformers/models/owlv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/configuration_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/convert_owlv2_to_hf.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/image_processing_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/modeling_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/processing_owlv2.cpython-310.pyc,, +transformers/models/owlv2/configuration_owlv2.py,sha256=6WJ3kNm8nuP3pr0zPF3vsJV-PUjRAUaG8PUL1DnaKfA,15569 +transformers/models/owlv2/convert_owlv2_to_hf.py,sha256=rF02k9XWTswf4P4ZZ76ekB3be6pRsFJLtbuWaJpyx3Y,22018 +transformers/models/owlv2/image_processing_owlv2.py,sha256=0KXB-hkcMZozinbj3XNCV5DrU0488Ljsan-FBoTX9I8,26857 +transformers/models/owlv2/modeling_owlv2.py,sha256=SICNuQQV1cFFwjg3Oe47ynrV4NOJKnruW3uQAz_EPGM,82637 +transformers/models/owlv2/processing_owlv2.py,sha256=WUAZC5nLIqVLseH1odt8F32mHZV2R2iaGe1eWq-9dMY,10046 +transformers/models/owlvit/__init__.py,sha256=zBsZnxDQ28eWv3rpN77KfHfIQPv4sIurjn-kNoykQyo,2915 +transformers/models/owlvit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/configuration_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/convert_owlvit_original_flax_to_hf.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/feature_extraction_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/image_processing_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/modeling_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/processing_owlvit.cpython-310.pyc,, +transformers/models/owlvit/configuration_owlvit.py,sha256=DiQQkU-sAB3v7wWvdaPqLONaHSFDcnRZOgecihzcurI,16760 +transformers/models/owlvit/convert_owlvit_original_flax_to_hf.py,sha256=tofzNZcVROwfYoV7pV6u50Am3TFm-XmuJEAGwNvRT9o,13988 +transformers/models/owlvit/feature_extraction_owlvit.py,sha256=yPO8FbUw3YabKbsV_ozKpIr6JixO9knVw1eMIHeiCtY,1186 +transformers/models/owlvit/image_processing_owlvit.py,sha256=vYcwjzcsheXUv-ZQARjVwuJGK6rJuAkQPy6GQPWE7uo,28604 +transformers/models/owlvit/modeling_owlvit.py,sha256=-79hyl8AnLg61C96IO-ziTZOSwif7sRdbdy_Z1nlKw8,76269 +transformers/models/owlvit/processing_owlvit.py,sha256=0nSZZV8HtYmywaCfUqMCWYadqAO3QtMi8S-Jt_y8ai0,11042 +transformers/models/patchtsmixer/__init__.py,sha256=z9KtbxxAyoNMB0DkWBDvpxmgfZMzx5B056p1nlLjhIE,2204 +transformers/models/patchtsmixer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/patchtsmixer/__pycache__/configuration_patchtsmixer.cpython-310.pyc,, +transformers/models/patchtsmixer/__pycache__/modeling_patchtsmixer.cpython-310.pyc,, +transformers/models/patchtsmixer/configuration_patchtsmixer.py,sha256=zIYc8MJd5-IyOtYtelhj4MyyRSSfpkQ-IsfkRKDdPb4,12636 +transformers/models/patchtsmixer/modeling_patchtsmixer.py,sha256=rvIs9cO-TCB3XfsiOqfnaqljnCfgbhOs9Ox9IutnyzY,87930 +transformers/models/patchtst/__init__.py,sha256=AyK9VUDx2iphFn8IMvgt49apReqE0VBTxrjDwE6fRhc,2071 +transformers/models/patchtst/__pycache__/__init__.cpython-310.pyc,, +transformers/models/patchtst/__pycache__/configuration_patchtst.cpython-310.pyc,, +transformers/models/patchtst/__pycache__/modeling_patchtst.cpython-310.pyc,, +transformers/models/patchtst/configuration_patchtst.py,sha256=f6-QffpEI96dwoJa3VaF4pe5yA4WsHe5TdUzWHnGQTA,12588 +transformers/models/patchtst/modeling_patchtst.py,sha256=Ik1QEFo2AFRqRH8WetZqaTBZZcve6Hy-PzOgwrYC5cc,91775 +transformers/models/pegasus/__init__.py,sha256=SXHYeNzkJrHfERo9lhqyvu3S75BYDmqceiFfim50Y_g,4111 +transformers/models/pegasus/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/configuration_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/convert_pegasus_tf_to_pytorch.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_flax_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_tf_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus_fast.cpython-310.pyc,, +transformers/models/pegasus/configuration_pegasus.py,sha256=A_bUgZAQkLLed1QVTetk4iv5HEYZ5o83cCbKwLECN8I,7571 +transformers/models/pegasus/convert_pegasus_tf_to_pytorch.py,sha256=9geJowNAukZc9FE2OEq0pXQi6ynw9k-2NFtlmISxpUg,5359 +transformers/models/pegasus/modeling_flax_pegasus.py,sha256=NbaPRG_BeTrZQbbZCxUOWxwdgSKSrHWkjTicOP3Yhvk,65974 +transformers/models/pegasus/modeling_pegasus.py,sha256=cLGjeXqI0QHoVj5WzpaKG42A8NSgmvqwKuiw5specCU,80560 +transformers/models/pegasus/modeling_tf_pegasus.py,sha256=8dfcnMG6muIhoLDDU-p3LCmnFX5itzOzSQipqm5mIeo,74202 +transformers/models/pegasus/tokenization_pegasus.py,sha256=zRyVOMqZunsKvEp0Hq4ZIdP8fwlMASOB_bTKk_TNaPg,13125 +transformers/models/pegasus/tokenization_pegasus_fast.py,sha256=eOwadGhA-cEAkwkDVL7AXO79GqF04XFP6IzsYA2jBuI,9942 +transformers/models/pegasus_x/__init__.py,sha256=M7Ef6UH-lQ53z-17c-XQi5nmmi-uVz8UKFHQe71LDVU,1828 +transformers/models/pegasus_x/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pegasus_x/__pycache__/configuration_pegasus_x.cpython-310.pyc,, +transformers/models/pegasus_x/__pycache__/modeling_pegasus_x.cpython-310.pyc,, +transformers/models/pegasus_x/configuration_pegasus_x.py,sha256=6-gA3-ZuyoXH05JLW0ByC6bZwRlHSGiMkyrHVQBszNw,8187 +transformers/models/pegasus_x/modeling_pegasus_x.py,sha256=Gsa3XrpmqiInfdYlLTxJAJn8ulySoGwjQUdZk2yZRdQ,75728 +transformers/models/perceiver/__init__.py,sha256=y-6ZMYh3FfGpj9A1gZafPXrfGKJoGKEenKlJT9ZZEw8,3293 +transformers/models/perceiver/__pycache__/__init__.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/configuration_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/convert_perceiver_haiku_to_pytorch.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/feature_extraction_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/image_processing_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/modeling_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/tokenization_perceiver.cpython-310.pyc,, +transformers/models/perceiver/configuration_perceiver.py,sha256=iRH1_I7sIbZ7cHN9VX8RCk7Rkr8L2-CKPRcdkVFejLo,12256 +transformers/models/perceiver/convert_perceiver_haiku_to_pytorch.py,sha256=f8p0sPVQv19tMDKkIM8IfTg60-SYX9MMABAzstxFt7k,21286 +transformers/models/perceiver/feature_extraction_perceiver.py,sha256=0lW_qh5ONtUwr0ARM9RB9hizA76wL6fmeofDrhbIsXI,1207 +transformers/models/perceiver/image_processing_perceiver.py,sha256=cAMSnIE8lGaciJZNu6BxdT4YccgYPwYPTZOjP5GQOVY,17940 +transformers/models/perceiver/modeling_perceiver.py,sha256=mfr-5rrSXQd5A-_QrbUscd88mg0zKAEJtg0R3mGqfTM,146577 +transformers/models/perceiver/tokenization_perceiver.py,sha256=VOWp64riIrTTB7oqvLBq7N6_U515ZWzaaVpwSx7SncI,8020 +transformers/models/persimmon/__init__.py,sha256=gp5VkpnXik0R_PBRitY6UBMcBDMmL41N8o1LjPW_Hmo,1835 +transformers/models/persimmon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/configuration_persimmon.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/convert_persimmon_weights_to_hf.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/modeling_persimmon.cpython-310.pyc,, +transformers/models/persimmon/configuration_persimmon.py,sha256=2aaAXelQi6liHfBO-Y3GD7P01viYvrRMZffRzcsmfE4,7766 +transformers/models/persimmon/convert_persimmon_weights_to_hf.py,sha256=F3NFcbCWD-UxFwgp2h-Nv78_M0p0LELPq4re30ZNIjU,4644 +transformers/models/persimmon/modeling_persimmon.py,sha256=3WjBbCWVV-WpYE1ygrSm68hZzugnSaE1yLrez8s9QVk,47096 +transformers/models/phi/__init__.py,sha256=cSkf7i5ur4JTXt8gWalgbD-HFoJeFjMVTH3u5IOfICE,1971 +transformers/models/phi/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phi/__pycache__/configuration_phi.cpython-310.pyc,, +transformers/models/phi/__pycache__/convert_phi_weights_to_hf.cpython-310.pyc,, +transformers/models/phi/__pycache__/modeling_phi.cpython-310.pyc,, +transformers/models/phi/configuration_phi.py,sha256=DAf4-DIrEm9Kf9PJ1jmlFAr_qNx8PHqSJXf-TFnTXdI,9181 +transformers/models/phi/convert_phi_weights_to_hf.py,sha256=XrjgtZm6GZQx01rZ0q52g6e4ajyZhl8n02QNchAD6BQ,7685 +transformers/models/phi/modeling_phi.py,sha256=HOwh6THQTkU-uHtWyW9kSlM1LihW2e3zsjrkXoAm30s,68404 +transformers/models/phobert/__init__.py,sha256=JDAAoG6FOpN1o5kgFBbHkoko9NsiioFi-ZAeAgR79nY,955 +transformers/models/phobert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phobert/__pycache__/tokenization_phobert.cpython-310.pyc,, +transformers/models/phobert/tokenization_phobert.py,sha256=vVIbAd64za3WGuIBg_oE1P08ZjyO_k_G6cozkOhsvnI,13093 +transformers/models/pix2struct/__init__.py,sha256=VSpzQStsFkcbIF3aftcNle95WQ7-cZzuWwDhjgzK-IU,2701 +transformers/models/pix2struct/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/configuration_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/convert_pix2struct_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/image_processing_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/modeling_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/processing_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/configuration_pix2struct.py,sha256=K9WDf0uYHsBnri4cOD35chHbumq4BFy5Yam9xa0Js1U,17392 +transformers/models/pix2struct/convert_pix2struct_original_pytorch_to_hf.py,sha256=m_S-9oxyN4PQafRbWQIP-G0NUDrTqxOmr8IwiHNCOuU,5886 +transformers/models/pix2struct/image_processing_pix2struct.py,sha256=snQZl3jqenJyk_wbmXK_hZJKO2Z5PyYEVFdVn1oeI6o,19727 +transformers/models/pix2struct/modeling_pix2struct.py,sha256=PRv5wLiWuc_vFOrYNwullQ0jgzY9OFaJWAb1iXzgQgc,82752 +transformers/models/pix2struct/processing_pix2struct.py,sha256=YFwg3KSy0SKXAkBucCTOwsOFSm7pFYj-M6bCViLYVqU,6960 +transformers/models/plbart/__init__.py,sha256=uNjyVJsOGh5eb2iNYSc7av9uNk-n3xB6rLv3BSRBKoY,2429 +transformers/models/plbart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/plbart/__pycache__/configuration_plbart.cpython-310.pyc,, +transformers/models/plbart/__pycache__/convert_plbart_original_checkpoint_to_torch.cpython-310.pyc,, +transformers/models/plbart/__pycache__/modeling_plbart.cpython-310.pyc,, +transformers/models/plbart/__pycache__/tokenization_plbart.cpython-310.pyc,, +transformers/models/plbart/configuration_plbart.py,sha256=3QlFT2I53WTd037uiBMl8EfFvMrqjxuwoatvsFFWHmk,8601 +transformers/models/plbart/convert_plbart_original_checkpoint_to_torch.py,sha256=BOXNudNSr1xevmHnvNpa_4ya3Q89m6J4lndQhCWSLB8,3553 +transformers/models/plbart/modeling_plbart.py,sha256=3fksu2VaEcuJHTbBJbk8x_t5W53PD3szCoJOgABWrVI,84390 +transformers/models/plbart/tokenization_plbart.py,sha256=GE-X-wTX7ML8tS8_-9SjfPSUs4fkq5gRt_6hOyGqbnk,18746 +transformers/models/poolformer/__init__.py,sha256=fzMbnIpAxBApWl0QVCU965q9km5dySep9Hjhml26r68,2586 +transformers/models/poolformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/configuration_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/convert_poolformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/feature_extraction_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/image_processing_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/modeling_poolformer.cpython-310.pyc,, +transformers/models/poolformer/configuration_poolformer.py,sha256=cJ_No-RVvQ6cBkyhYFldiliiZQ_gP1Q3JqsACbN0C8U,5677 +transformers/models/poolformer/convert_poolformer_original_to_pytorch.py,sha256=Vvlp7ju7kr2sg1NdXKma6vYGABjs4sVhPKhgFKPJRpk,7947 +transformers/models/poolformer/feature_extraction_poolformer.py,sha256=KDL4tg7hxwzQKYmGc6jMZfzeD9UCTb00oNfbejIjzmk,1214 +transformers/models/poolformer/image_processing_poolformer.py,sha256=fObDfm06UHCQ3fl6JeLmKM-UREdAiV5RPlwO4aYCaCQ,18325 +transformers/models/poolformer/modeling_poolformer.py,sha256=En2zhLB5rHAuki31Ov0tRay52-tXyNr1svHM3BtwyQc,17840 +transformers/models/pop2piano/__init__.py,sha256=wxMmbwwAuqcGF8MimtfwAf4JPJ5D8x8up-q4yRlwU5E,3819 +transformers/models/pop2piano/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/configuration_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/convert_pop2piano_weights_to_hf.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/feature_extraction_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/modeling_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/processing_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/tokenization_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/configuration_pop2piano.py,sha256=QCVZ_BJ7XvAV0bD9-G4tV_NXJPwTgqMeAgL-ofESzTw,6030 +transformers/models/pop2piano/convert_pop2piano_weights_to_hf.py,sha256=eZuC9RFueLoOmsaGWMa-6hNQyLBLTg9WXlRQRuiQerA,8626 +transformers/models/pop2piano/feature_extraction_pop2piano.py,sha256=SBNQB6aol_Uan2p_z33IQue9y4exatqd80XyzHGBoqY,19839 +transformers/models/pop2piano/modeling_pop2piano.py,sha256=QZ1lv2nJbsjlbIHD7oRh8MpvgdpWP-ULF04RckDN2E4,65620 +transformers/models/pop2piano/processing_pop2piano.py,sha256=ytBqku-v0wCqeK4_JVd-0SNCI7jmYltMb5wDzagn6V4,5525 +transformers/models/pop2piano/tokenization_pop2piano.py,sha256=Y3grUs2_4YvgUDxDAhe4hBBJe0RyAZq_ofx11jw1M5A,32677 +transformers/models/prophetnet/__init__.py,sha256=1w4cY9QLl0elN9_oFDScwrb0F12-54b5ylPrxCiqpFw,2157 +transformers/models/prophetnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/configuration_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/convert_prophetnet_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/modeling_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/tokenization_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/configuration_prophetnet.py,sha256=IGfCeoZGihbqnOTXCFnA_jebhsWVsfOrIBUZ7To_LBg,8973 +transformers/models/prophetnet/convert_prophetnet_original_pytorch_checkpoint_to_pytorch.py,sha256=EzgNdUzWNQowTUpyfXO-_RBZEw0sa5sVA1b7jbqFUxU,7055 +transformers/models/prophetnet/modeling_prophetnet.py,sha256=L6Fh66H5gaaQAmoUyhMng4UwbF6OsSVZ8Nsu3WB4BeQ,115458 +transformers/models/prophetnet/tokenization_prophetnet.py,sha256=5vc6UgMSkJlybAN5nDfDCeqxkItxl-1RUFsWLfX0LPg,20874 +transformers/models/pvt/__init__.py,sha256=FxRer-Bn0NI00eTjXYOlUzVNJMH50lB78JEWPk1BNuw,2384 +transformers/models/pvt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pvt/__pycache__/configuration_pvt.cpython-310.pyc,, +transformers/models/pvt/__pycache__/convert_pvt_to_pytorch.cpython-310.pyc,, +transformers/models/pvt/__pycache__/image_processing_pvt.cpython-310.pyc,, +transformers/models/pvt/__pycache__/modeling_pvt.cpython-310.pyc,, +transformers/models/pvt/configuration_pvt.py,sha256=gf4nBOZrbywy8Wxsy85BwEVPObgPo9oHkXHDc4HXlz8,7015 +transformers/models/pvt/convert_pvt_to_pytorch.py,sha256=1DIHp33moj_2LrWws9x02AZ9qRrVMCQ3jifvV3SxmFc,9738 +transformers/models/pvt/image_processing_pvt.py,sha256=dRcMJCdWkBPZek4hG6gbJ2zyDGRBWbpEGm4caGJZAIc,14267 +transformers/models/pvt/modeling_pvt.py,sha256=uZAEEDvOJpHIx-VxChXuiU60c_DSE7Wgv4THk_1SYE4,28350 +transformers/models/pvt_v2/__init__.py,sha256=juUzRcgqzQAI5MHUbyhwB3lIeQeTk05FR9n3YQFWAQo,1832 +transformers/models/pvt_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/configuration_pvt_v2.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/convert_pvt_v2_to_pytorch.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/modeling_pvt_v2.cpython-310.pyc,, +transformers/models/pvt_v2/configuration_pvt_v2.py,sha256=-vu-7Yg-hVJ6S5FSyqKTqX8M7w0cDO_S8k2UEKYQy6c,7963 +transformers/models/pvt_v2/convert_pvt_v2_to_pytorch.py,sha256=OqYTYB1bssEh4C-AwCFG0VDDcEWZa1Su5kUkrn_UcOo,12077 +transformers/models/pvt_v2/modeling_pvt_v2.py,sha256=iQL_48n_xGDak3-7A2TDLixYEa8t-HCdzLkuBhVnlrw,29417 +transformers/models/qdqbert/__init__.py,sha256=x3xI7kd5kpsjAvYJT8SrR5_uCeInhVA8repNZFRtXhU,2402 +transformers/models/qdqbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qdqbert/__pycache__/configuration_qdqbert.cpython-310.pyc,, +transformers/models/qdqbert/__pycache__/modeling_qdqbert.cpython-310.pyc,, +transformers/models/qdqbert/configuration_qdqbert.py,sha256=ZEEbypDoWw0fwcOV-5H24BmaP0fzcBxra3l-HI7yC0Y,5787 +transformers/models/qdqbert/modeling_qdqbert.py,sha256=FNtfupgOXaUvmlH9OiTzImEVx86ZV78U3-JGIAMpH80,77285 +transformers/models/qwen2/__init__.py,sha256=9gokBZ-g_YdJeUBfioDa7ZRVQdTgZ_nNQA03axWYwEw,2354 +transformers/models/qwen2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/configuration_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/modeling_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2_fast.cpython-310.pyc,, +transformers/models/qwen2/configuration_qwen2.py,sha256=caoAxItLWG4oMDOv4Rzp34w-m7xRYT4RzQYXsQPFWCs,6729 +transformers/models/qwen2/modeling_qwen2.py,sha256=8cehF87SDuYy5MF9CZgoYzBNbjC7RPDM3r1LJ93iWa0,63883 +transformers/models/qwen2/tokenization_qwen2.py,sha256=y9hRJ6oYYRa_4UyoQUPU_BlsrnTPKoEByiCQ3zelSmE,13913 +transformers/models/qwen2/tokenization_qwen2_fast.py,sha256=dwuIyJcKjM58NS8TLAUNhiYpjdMxdTgj0iXJCQ2j7rE,5159 +transformers/models/qwen2_moe/__init__.py,sha256=zrwptL-PWmTeTM7Z8JcUxeUiY1dViTHY0LGvP4BhNpg,1829 +transformers/models/qwen2_moe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2_moe/__pycache__/configuration_qwen2_moe.cpython-310.pyc,, +transformers/models/qwen2_moe/__pycache__/modeling_qwen2_moe.cpython-310.pyc,, +transformers/models/qwen2_moe/configuration_qwen2_moe.py,sha256=-DQ4llhbz9NXF77T5IfSZCMoO3KQexubDMTqITb1nos,8538 +transformers/models/qwen2_moe/modeling_qwen2_moe.py,sha256=tOra9gJt6OoN0X-GtMg-UCRmP8umduJ29kPFVHyF220,73498 +transformers/models/rag/__init__.py,sha256=omMwtpcTWBHYKZvt8NIxbACHhICmYWfeTgiC7O4U88g,2426 +transformers/models/rag/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rag/__pycache__/configuration_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/modeling_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/modeling_tf_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/retrieval_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/tokenization_rag.cpython-310.pyc,, +transformers/models/rag/configuration_rag.py,sha256=9B2B7I_Ep2pduixD8ZTJfBz1ZLPYhE3cioN8xDmrWZk,8339 +transformers/models/rag/modeling_rag.py,sha256=3Z76u5RQI5rfrEs3mECzu43gN9msavXjt35CUaFDNRo,85799 +transformers/models/rag/modeling_tf_rag.py,sha256=kEbSfcPwE94BqHh_h94XjoPd5OJcN5aQ8vNu23-rkUU,88806 +transformers/models/rag/retrieval_rag.py,sha256=DVxhTiqqcQzFtDruk_mx8oprFI7D5l9HGjuM17xvzPg,29923 +transformers/models/rag/tokenization_rag.py,sha256=O5gPSIP0dOyYEe5k4VjcCttsbAoAAZ6338z0IsWF690,4576 +transformers/models/realm/__init__.py,sha256=k3gccDAsk5YJYrjrd8hOZCc1q8KJR2EMoGhvEdF-OTU,2675 +transformers/models/realm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/realm/__pycache__/configuration_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/modeling_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/retrieval_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/tokenization_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/tokenization_realm_fast.cpython-310.pyc,, +transformers/models/realm/configuration_realm.py,sha256=k74z55xCo5QoRHXicrYAt2aPYxmYNvU_0JvyHa_o_cc,7653 +transformers/models/realm/modeling_realm.py,sha256=_HWjXt0j-I5WfgypfkqZhXeUAIrKVNtZ7w0vFQL-UuM,84062 +transformers/models/realm/retrieval_realm.py,sha256=86jQyu1U8QePlahXS8rGD_E6TlvEqQeqg21qSsAno-M,6370 +transformers/models/realm/tokenization_realm.py,sha256=XhYz17ej45dQtk35h1peHxcyVaS-hrE6Gu_FCJA4pJY,23127 +transformers/models/realm/tokenization_realm_fast.py,sha256=PuyK8CEVibBXCVGxrwKnz3siAvcr0JbUGD6DCLM1Yi0,10950 +transformers/models/recurrent_gemma/__init__.py,sha256=gUE-KRPGeD_b-aklMGn9oDbnSdZ8twDOQXUxL2zWkIo,1708 +transformers/models/recurrent_gemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/configuration_recurrent_gemma.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/convert_recurrent_gemma_to_hf.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/modeling_recurrent_gemma.cpython-310.pyc,, +transformers/models/recurrent_gemma/configuration_recurrent_gemma.py,sha256=R70TEAu3_3PwRdVJD7kOrQ5DbmuMQ6Z36jF0yWCyqVU,7714 +transformers/models/recurrent_gemma/convert_recurrent_gemma_to_hf.py,sha256=jZGkZ2FmNFWsZXz37gf86NjLRFbgLTK6C-ZO6-JChks,7965 +transformers/models/recurrent_gemma/modeling_recurrent_gemma.py,sha256=FO_judafGpCu8s-ChBW25WIrMKrR9ALbzCYkO3idH8s,43285 +transformers/models/reformer/__init__.py,sha256=MKhG4aefK429UY32oYQbVTLm1T2L_SIYS_TNnrWnTwA,3139 +transformers/models/reformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/reformer/__pycache__/configuration_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/convert_reformer_trax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/reformer/__pycache__/modeling_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer_fast.cpython-310.pyc,, +transformers/models/reformer/configuration_reformer.py,sha256=KnktX4WZXYzBWzuKUFEk0DD8sMEqTMuv476IDrRW5P4,13266 +transformers/models/reformer/convert_reformer_trax_checkpoint_to_pytorch.py,sha256=axn3FvdtVSdQT5V5u1-sfJ3sMV3YpEU6r5B10bTYZ8o,7818 +transformers/models/reformer/modeling_reformer.py,sha256=-6XZjj2memPGxK8ENCrEjSrKIUA-wKFpd7LiQYTdh74,115220 +transformers/models/reformer/tokenization_reformer.py,sha256=999VPmUyPWWb1wYfzo6GJ_-qKRw1ooSwZubUZ_gJMKA,6728 +transformers/models/reformer/tokenization_reformer_fast.py,sha256=oJutU_vcsfO8wONlB9zPIiVwOlmcsPm8hMKR80elYMc,4247 +transformers/models/regnet/__init__.py,sha256=KQR1LgyjMxE0d-7nACPCHiRXo0rSm93vfcy8puDXbuE,3168 +transformers/models/regnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/regnet/__pycache__/configuration_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/convert_regnet_seer_10b_to_pytorch.cpython-310.pyc,, +transformers/models/regnet/__pycache__/convert_regnet_to_pytorch.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_flax_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_tf_regnet.cpython-310.pyc,, +transformers/models/regnet/configuration_regnet.py,sha256=rksbsyBauYuJK0Kk0sB1qpFqn2igmdy_G0SCyGw57Go,4044 +transformers/models/regnet/convert_regnet_seer_10b_to_pytorch.py,sha256=zDPbUZRiO0lJl7hdUztm9JnUAbOI1Wv5wyHZdCKQ-d0,11770 +transformers/models/regnet/convert_regnet_to_pytorch.py,sha256=lvSaB1ny0EKvS4KfhTpbNjdrYI6xE1zmYctM_O_a_Ak,18719 +transformers/models/regnet/modeling_flax_regnet.py,sha256=2Ao7eODWcHufpZoNbGC4FbX6tZVE2bfWWrZSMbPGcMg,28410 +transformers/models/regnet/modeling_regnet.py,sha256=PBAvLx6VQ5OMFrjdXW5gu1fplONoUdItEe0SI_gt11E,17282 +transformers/models/regnet/modeling_tf_regnet.py,sha256=5Rrqu4B-mKgdRVzROCZ0iVycKJ9s6-yRAEbJ8aIrxao,24402 +transformers/models/rembert/__init__.py,sha256=XC3xr6aUNReL6SzFXr6TyAWPg9EXiBFl4o225gmkNQQ,4514 +transformers/models/rembert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rembert/__pycache__/configuration_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/convert_rembert_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/rembert/__pycache__/modeling_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/modeling_tf_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert_fast.cpython-310.pyc,, +transformers/models/rembert/configuration_rembert.py,sha256=qGyEjUnMzOvTdhB69fUbbz4cOEgb_hmFsPV4-K7_Bcs,7339 +transformers/models/rembert/convert_rembert_tf_checkpoint_to_pytorch.py,sha256=C-TS1MrtQHTxK3j5HUKwlcrQItW24T7_iPvtt8KGbAU,2208 +transformers/models/rembert/modeling_rembert.py,sha256=W0I5fogXdxKg0HjW8gxx8GqAUdXXq2DjTI53IYwBnb8,68242 +transformers/models/rembert/modeling_tf_rembert.py,sha256=yyNfB5wAlnoSOaTCw3mMlWiHiE4Wu1m19U4WOJDlauU,77785 +transformers/models/rembert/tokenization_rembert.py,sha256=36jnFfhXiaSSyGHHGQng6gcHPTBmApNRIFZ0acudQLk,10593 +transformers/models/rembert/tokenization_rembert_fast.py,sha256=QZxF21vv0eR_-SXTateBSIzUpE_ksyX6yNdvpUR_4Lk,9997 +transformers/models/resnet/__init__.py,sha256=n63hjzrOOmaIXaDS0F9thB531jarpWDBkXmgFaMBRbo,3216 +transformers/models/resnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/resnet/__pycache__/configuration_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/convert_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_flax_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_tf_resnet.cpython-310.pyc,, +transformers/models/resnet/configuration_resnet.py,sha256=iYo3k5z_nHTI0auXFXHux1Z2KukX5UKFXAV1zdF4z8o,6117 +transformers/models/resnet/convert_resnet_to_pytorch.py,sha256=ShZl8Ob5ElrgRujQCoGXWdIY_99UICrWqiHdSzFdOHc,7287 +transformers/models/resnet/modeling_flax_resnet.py,sha256=uJMz2FgVXm6ffwjiorCHkuPbCRra8VdN1vYILRuIgxY,24607 +transformers/models/resnet/modeling_resnet.py,sha256=h66AeS7VpdGODycEN_aOpdF-j4xUGPUiinyE9Y2_CtA,19362 +transformers/models/resnet/modeling_tf_resnet.py,sha256=jWdPo8kjVlReTCpPExtovB_uDvmFgZhkXCn41fIcl9I,23752 +transformers/models/roberta/__init__.py,sha256=GvGX0z6XPZtwkfCh4K2xagGOK0tlW0DT91QVQhTcA4o,5091 +transformers/models/roberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roberta/__pycache__/configuration_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/convert_roberta_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_flax_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_tf_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta_fast.cpython-310.pyc,, +transformers/models/roberta/configuration_roberta.py,sha256=udsSFUQ1_ZIXg_-F0EZL33gsSziMuugAnO32_dDJE9Q,7359 +transformers/models/roberta/convert_roberta_original_pytorch_checkpoint_to_pytorch.py,sha256=MmHtq9AhcXXd-V8Fz0XWC8n-PL-S1MSdFhTCVM6Cksk,8002 +transformers/models/roberta/modeling_flax_roberta.py,sha256=Bz5VgKKwWnVVmRFyHD11Ug7IlvgwOLIMbGI0lBkMHt8,56976 +transformers/models/roberta/modeling_roberta.py,sha256=jtqJFONRl66bRXy_xY2O-oQ5BWECpsk6DVQxLPl00So,71188 +transformers/models/roberta/modeling_tf_roberta.py,sha256=QLF5cfwQbjcHvdzcnx87a8s5Vz7qOJXO3GTPXGtPsHg,79979 +transformers/models/roberta/tokenization_roberta.py,sha256=4Ft2MWhG4ESEO2yJ0a_8jaUU98IxIH2oLaorhotpC1w,16451 +transformers/models/roberta/tokenization_roberta_fast.py,sha256=I_QlLewfaCL3Apb-tpGleYJ4hUErNMY6uTSajTDBa18,11422 +transformers/models/roberta_prelayernorm/__init__.py,sha256=C9bA_ah_10OCt_LUT1bsOJTUjSt6boV2frOKBtHCes4,5391 +transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/configuration_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/convert_roberta_prelayernorm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_flax_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_tf_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/configuration_roberta_prelayernorm.py,sha256=ciwjO--yU5b9Iy8NgIFvpOTyYfw7Ph0urUs0Mi6_yOI,7920 +transformers/models/roberta_prelayernorm/convert_roberta_prelayernorm_original_pytorch_checkpoint_to_pytorch.py,sha256=ti9rttSVMs3SemlZrVQFkDKKHBubrk29d4lQkpkI3Ro,2975 +transformers/models/roberta_prelayernorm/modeling_flax_roberta_prelayernorm.py,sha256=zMZKU2wl45qTh4ex3R9bf1PUVF12uC5vaVxIXQNqLNk,60537 +transformers/models/roberta_prelayernorm/modeling_roberta_prelayernorm.py,sha256=15wxCekW0VI80tCofOE_v68JhsqDd_g6Kh5NBk9XADI,73789 +transformers/models/roberta_prelayernorm/modeling_tf_roberta_prelayernorm.py,sha256=yUztEknHx5y5SCYeGD4AqrZg24VpCMh6C96Oq3xCISs,83157 +transformers/models/roc_bert/__init__.py,sha256=ItDlyJx76hWJLT_159wnQgdWC82bT-TG_FpFzjRqXaU,2875 +transformers/models/roc_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/configuration_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/modeling_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/tokenization_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/configuration_roc_bert.py,sha256=6M-ypuBQmDyE9Xb0wUipQ5GbE5aCh_fcAzaMQN_h-vU,8599 +transformers/models/roc_bert/modeling_roc_bert.py,sha256=rFs2KtEfNEDeyLBkwYJFRQznmz1jU-R39NxJSoFMc40,92994 +transformers/models/roc_bert/tokenization_roc_bert.py,sha256=PTfS5M-zkr_j65IkmdDcTdrcFiyP2J2VSIpCNoFB2vM,50197 +transformers/models/roformer/__init__.py,sha256=1EFy2Zdn9AdraO-fmIpYg1q_HLYq-7rT5qDL_8Gurnc,5333 +transformers/models/roformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roformer/__pycache__/configuration_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/convert_roformer_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_flax_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_tf_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer_fast.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_utils.cpython-310.pyc,, +transformers/models/roformer/configuration_roformer.py,sha256=qjJAt1_uh5PrBZrVXouHR2zJWQuAWTgeUtxB7xL_UeI,6904 +transformers/models/roformer/convert_roformer_original_tf_checkpoint_to_pytorch.py,sha256=G57qbbWpRH06sm041u6D3BdNE7mCPSDvlaNLOZjWdvY,2240 +transformers/models/roformer/modeling_flax_roformer.py,sha256=ammOyHhVp0FbXflVtY-RpMH8-nX3lYhb0A_lG6JJg0A,39100 +transformers/models/roformer/modeling_roformer.py,sha256=DugUhmdn5vqueKfTxE6McTCens2KznBT8NRLzrrcpjo,69220 +transformers/models/roformer/modeling_tf_roformer.py,sha256=U1ZtEFFTT3vfIuuIM4MTZg4DPs-lJdbNV7nXlAk3kZ8,66018 +transformers/models/roformer/tokenization_roformer.py,sha256=k0JWRQlo_yDxGQ7AvvHVVbMA--Q8fCdGJW-5F38TsC0,21993 +transformers/models/roformer/tokenization_roformer_fast.py,sha256=b11QDgBAetkJGh9VDCyxItOEHcKbXzplEKshs1WRMEQ,6678 +transformers/models/roformer/tokenization_utils.py,sha256=0ciH13qW2kCa5my1rPwfwAuSXX-jGzN0nzemvGvOBxw,2652 +transformers/models/rwkv/__init__.py,sha256=2uUo3Zi2By-3QKG7YkrEqllvFG4_SqJZ-NeplOxHCD4,1780 +transformers/models/rwkv/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/configuration_rwkv.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/convert_rwkv_checkpoint_to_hf.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/modeling_rwkv.cpython-310.pyc,, +transformers/models/rwkv/configuration_rwkv.py,sha256=Qrlj9KTeC6X8ii7uCuQ5_X1EzBn5kPFzuVud08XsbF8,5273 +transformers/models/rwkv/convert_rwkv_checkpoint_to_hf.py,sha256=oXXZN2tt_yWCRAkqpE6-7kDPMy4PyKaYmpMZwsH-IUE,6994 +transformers/models/rwkv/modeling_rwkv.py,sha256=dXDCWFosWKhINGhVgfs0P6BYaVB-X-oArEXI7txHYS4,37800 +transformers/models/sam/__init__.py,sha256=1wiFtdU-_NON6yx4QfFBk4vrfwN4hHv7JEA3CSGq_wU,2980 +transformers/models/sam/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sam/__pycache__/configuration_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/convert_sam_to_hf.cpython-310.pyc,, +transformers/models/sam/__pycache__/image_processing_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/modeling_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/modeling_tf_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/processing_sam.cpython-310.pyc,, +transformers/models/sam/configuration_sam.py,sha256=uUgPZxv3jV5YgW6ZNAGxCk2LomLnK6JqqPD1ZFX0PFk,13858 +transformers/models/sam/convert_sam_to_hf.py,sha256=bt3PXRVYpRlgu6Q7j5MoPredmVyY6t6xuOcfQlVCuSs,8542 +transformers/models/sam/image_processing_sam.py,sha256=8NImaUzsQDShcLgZG-NESbJY7_vgZ_3RooJPToRd37k,67313 +transformers/models/sam/modeling_sam.py,sha256=D0Ha4zH0g1O1nad5_WDsz4onPwA4onQKDIq0u07Obik,64760 +transformers/models/sam/modeling_tf_sam.py,sha256=8H4KOdytPwyeEnFIXiWNumo7Cb2EfYcmzH1NsFsCEgc,75549 +transformers/models/sam/processing_sam.py,sha256=qPln4ga6UimrOQ-nf7_ATDvn5L7q3xMEG7YQaXmHWjc,10930 +transformers/models/seamless_m4t/__init__.py,sha256=PRZMtfk0WN3i0ZSvQbv8wgqp4dOREyIvkgzx5obqn7I,3706 +transformers/models/seamless_m4t/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/configuration_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/convert_fairseq2_to_hf.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/feature_extraction_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/modeling_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/processing_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t_fast.cpython-310.pyc,, +transformers/models/seamless_m4t/configuration_seamless_m4t.py,sha256=0MjMfjtCGwB8bbWtLDhE1r_8zmtR7PIoh4zmlnWitBs,23568 +transformers/models/seamless_m4t/convert_fairseq2_to_hf.py,sha256=F2AQrS9rfpktVBSXvFLmND9gMtASSEOMlYPQ6v8VDdU,15960 +transformers/models/seamless_m4t/feature_extraction_seamless_m4t.py,sha256=pSStJq6iPGHLWGDiIWN-ZuGBmYSbTkT2ISrFK7Bj7W8,13561 +transformers/models/seamless_m4t/modeling_seamless_m4t.py,sha256=SQjQgeGDC-_fe23_jJprmhlilqmaqDcHo79ql5Nbpaw,201413 +transformers/models/seamless_m4t/processing_seamless_m4t.py,sha256=OrPvDJkAAIuoWglyxt1Z4H993tm-AyX3OxDcu4Gmps0,5893 +transformers/models/seamless_m4t/tokenization_seamless_m4t.py,sha256=4xSGOSx3XzpeY0nCAt0MxB0abOH8MQWNhIjp4SVcN1Y,25999 +transformers/models/seamless_m4t/tokenization_seamless_m4t_fast.py,sha256=9x1jPIO7Bb_WmFeYsZuTBT_hGucbsJshD_tJ5RBHE68,19884 +transformers/models/seamless_m4t_v2/__init__.py,sha256=eIGJqmaWPYi--eaUhctnu8W9EIihWP-uJsOORWLKVxg,2159 +transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/configuration_seamless_m4t_v2.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/convert_fairseq2_to_hf.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/configuration_seamless_m4t_v2.py,sha256=JTbmVGjOBvUXbdYKiTMKRb2vhg6C4fn8DbgD4oXUvGY,24428 +transformers/models/seamless_m4t_v2/convert_fairseq2_to_hf.py,sha256=B3ChRBL4biKHRNsLhAKRsZ547XyxI1uwiywDUC6jKXo,15084 +transformers/models/seamless_m4t_v2/modeling_seamless_m4t_v2.py,sha256=xjvyUcchcgP-3UeLeCOfBZZzRJXZbYjbmrzZAyjF7zM,228147 +transformers/models/segformer/__init__.py,sha256=T1k_hhB2iCL8zOY3rcG9erX0JbBS--OgU27-G0ZxR2o,3676 +transformers/models/segformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/segformer/__pycache__/configuration_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/convert_segformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/segformer/__pycache__/feature_extraction_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/image_processing_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/modeling_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/modeling_tf_segformer.cpython-310.pyc,, +transformers/models/segformer/configuration_segformer.py,sha256=FaD33v_h9BpMpYLpBhN_w58U7SYcCqCf0ygaLJeYj04,7467 +transformers/models/segformer/convert_segformer_original_to_pytorch.py,sha256=UXWvoxIi_vor0L5yPuqD7wUuy-vzSNtypQcrpLkTZFc,17092 +transformers/models/segformer/feature_extraction_segformer.py,sha256=yaRckmbmTyh1Oow3PnHLsjW4MURaWqddhTzG-PVcywk,1207 +transformers/models/segformer/image_processing_segformer.py,sha256=KO7UmIFZ-4MchZSg6PE3bp1ERgvez5EF_52CnxQZ-Co,23364 +transformers/models/segformer/modeling_segformer.py,sha256=X93UhhYjUNpFtYil3yceHHHbAL9me8Jo565jgzU7pwc,35414 +transformers/models/segformer/modeling_tf_segformer.py,sha256=S2Sv3dyD2s7SorgzNI8AWSNzD5cpulGrTNPzHT-cEB8,43722 +transformers/models/seggpt/__init__.py,sha256=wJaoAc_RPANBcGRc6ErzsvLzxW1zKGRi6YWCxHq77y0,2284 +transformers/models/seggpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/configuration_seggpt.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/convert_seggpt_to_hf.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/image_processing_seggpt.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/modeling_seggpt.cpython-310.pyc,, +transformers/models/seggpt/configuration_seggpt.py,sha256=HN1eDgAOfj7pWQYuNdtaRw1TaWh8X41MER3ZcbDIL7A,6563 +transformers/models/seggpt/convert_seggpt_to_hf.py,sha256=IsB0yzLF9kH5Lz4oBFLpMOeDLdC-SKOYDtFZhcpL6iA,9779 +transformers/models/seggpt/image_processing_seggpt.py,sha256=wdcV4Fl_lhPZCop2Rw5R_xoVpWN5Zv_2LQO0XY10zKc,31163 +transformers/models/seggpt/modeling_seggpt.py,sha256=ytf4BmWo_WrOmWYMT74tX06jvw4CAFYhfvszAy_PlSI,45300 +transformers/models/sew/__init__.py,sha256=VG7sYJFBweKB5Cb9lzyRYdjeG0olDM7cIQIUy4XQR8M,1778 +transformers/models/sew/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sew/__pycache__/configuration_sew.cpython-310.pyc,, +transformers/models/sew/__pycache__/convert_sew_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/sew/__pycache__/modeling_sew.cpython-310.pyc,, +transformers/models/sew/configuration_sew.py,sha256=Uuuteey5yStUZYZnJVO7l7-w0SdZ3jBoxv33BPPr26M,14277 +transformers/models/sew/convert_sew_original_pytorch_checkpoint_to_pytorch.py,sha256=TzlAoTl1DQUm3bhNxDlpXoxe-u1ZcMMbhrQsefGbFog,12745 +transformers/models/sew/modeling_sew.py,sha256=Rp11q4KUUzGo_rkwo9KTLBjzyqYEtxToEmFuLxNIRpk,53344 +transformers/models/sew_d/__init__.py,sha256=5d5VSrW-sTwr3H0e2js1KsRL7SM4GPiRPY9Hl_gVjWk,1804 +transformers/models/sew_d/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/configuration_sew_d.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/convert_sew_d_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/modeling_sew_d.cpython-310.pyc,, +transformers/models/sew_d/configuration_sew_d.py,sha256=AFcSDc3pw6Jwwwg-4WI5SXxoHSy_W7B9H993EiVjOgQ,16447 +transformers/models/sew_d/convert_sew_d_original_pytorch_checkpoint_to_pytorch.py,sha256=OeszH3N5vz1FbXoF-d-w6wDJ2A2MxvUMn9uDMpU7bro,13575 +transformers/models/sew_d/modeling_sew_d.py,sha256=IXvsgRJkhN8QFUx5r_LjJmc8j-TEIj1z7crS5S3BjNg,73708 +transformers/models/siglip/__init__.py,sha256=vuoROawTSIHtXkVVxhysxf-Cx7s3QCEMfvkUsJCxO7M,3124 +transformers/models/siglip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/siglip/__pycache__/configuration_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/convert_siglip_to_hf.cpython-310.pyc,, +transformers/models/siglip/__pycache__/image_processing_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/modeling_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/processing_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/tokenization_siglip.cpython-310.pyc,, +transformers/models/siglip/configuration_siglip.py,sha256=rrbWUFosgjjTsvOWjeMNueqIh5f3oEgg87zwxnwSYIw,13625 +transformers/models/siglip/convert_siglip_to_hf.py,sha256=Rg5BhRWVeIKxc9Dz0ZUCjhG3hasNtrORlIcOYoV7xS0,20830 +transformers/models/siglip/image_processing_siglip.py,sha256=wwUHCSEJENYaBPKyeSO3uEPixk4RQCVMFUhzrC2Q5BM,11775 +transformers/models/siglip/modeling_siglip.py,sha256=m_GDDo0NOE_O6_TgOnkoGJs0iQZ0au2McD54_ezKTUs,56299 +transformers/models/siglip/processing_siglip.py,sha256=x5A9CKyzNzOF0udXvMVQ4hMFBCbAdH-WnLAXqop75zk,7302 +transformers/models/siglip/tokenization_siglip.py,sha256=wflQIXvIGvoXsrTKeGGuZUo7kesIVnxFS5VBZvuiRfI,15953 +transformers/models/speech_encoder_decoder/__init__.py,sha256=987NzBteEbQy0IYY43B_JKolw2BbyX6Ox9s__xH0daQ,2037 +transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/configuration_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/convert_mbart_wav2vec2_seq2seq_original_to_pytorch.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/convert_speech_to_text_wav2vec2_seq2seq_original_to_pytorch.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_flax_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/configuration_speech_encoder_decoder.py,sha256=7hzCE73LcHbiq3b4pTsMdSwjtl4izOtoZE-ldVs8Bx4,4575 +transformers/models/speech_encoder_decoder/convert_mbart_wav2vec2_seq2seq_original_to_pytorch.py,sha256=EtCwDPHsete4dhXGu8OwkbRx7-47vbHRKUrb8j-6M2c,14754 +transformers/models/speech_encoder_decoder/convert_speech_to_text_wav2vec2_seq2seq_original_to_pytorch.py,sha256=04swyKsxEHHieCLUFPKzubV4W0ES1mZtbkgv-UDt7po,11971 +transformers/models/speech_encoder_decoder/modeling_flax_speech_encoder_decoder.py,sha256=i8GFLLxYQSh2uj6IAZNkGglUOt5C3VbSNvevYsoqSOs,44643 +transformers/models/speech_encoder_decoder/modeling_speech_encoder_decoder.py,sha256=U064X5_0R8t-uuU6z1S3025DqGhgRF7wz3Rg4cg7Kx4,32266 +transformers/models/speech_to_text/__init__.py,sha256=y2bX48UezdcJd_0EyTBq6xLWHL0vup-noE235__AYw8,3491 +transformers/models/speech_to_text/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/configuration_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/convert_s2t_fairseq_to_tfms.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/feature_extraction_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_tf_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/processing_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/tokenization_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/configuration_speech_to_text.py,sha256=qlnx21XZwtdAt2HLOml5VKB2lme0nUbOvU6pvIxQH_s,9882 +transformers/models/speech_to_text/convert_s2t_fairseq_to_tfms.py,sha256=v-5aSPwuCKCtqwU8gREj9wA2nm14Z97tg6wQ3S47gos,4478 +transformers/models/speech_to_text/feature_extraction_speech_to_text.py,sha256=bW4mXxoo1FKXFhfvstyPbWm8fMRMN1G7KXwkGN-vdxw,13176 +transformers/models/speech_to_text/modeling_speech_to_text.py,sha256=EKAWOKIJEStDhWZRMhW6ay1USDff-rHLz02Zq7CZjig,64505 +transformers/models/speech_to_text/modeling_tf_speech_to_text.py,sha256=wsWLyBQcxrsVHXTQs0wZTJiHkTCdjyx1w5xs0C6Qw60,74423 +transformers/models/speech_to_text/processing_speech_to_text.py,sha256=dtDsYvPg-jn-O5iiVDPH5154wOEDglsODuF4dPn7XYc,4818 +transformers/models/speech_to_text/tokenization_speech_to_text.py,sha256=u9cXinU0FieTCD4LcmB6sU2lK5DYjkN8RyGU3seKOt4,11400 +transformers/models/speech_to_text_2/__init__.py,sha256=zkmS9-WZTXByVUJqkt094wHCOT4zyVLO4Rn3B0JBCSo,2166 +transformers/models/speech_to_text_2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/configuration_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/modeling_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/processing_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/tokenization_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/configuration_speech_to_text_2.py,sha256=UQJCW5VUlSN_ZLxK5OM4GpGZLcyUkNPTA9DcEMgafGk,6108 +transformers/models/speech_to_text_2/modeling_speech_to_text_2.py,sha256=ZBFbuStKESxeJBfnkI418I_L6D5R1h-gB7srUWTk8OI,44113 +transformers/models/speech_to_text_2/processing_speech_to_text_2.py,sha256=J3Uv4HX7Y5zndYa3ZIROcEuLEfrw2piJC53AZmSkGnY,4790 +transformers/models/speech_to_text_2/tokenization_speech_to_text_2.py,sha256=YwsmogjE2We9H6o5hDPvDIBH_BRq4xUu6uTmd7AGNEI,8403 +transformers/models/speecht5/__init__.py,sha256=rI6eMJ1n9U8Mtn17i83U2qOhvcOQJudmFYU9roGYUno,2971 +transformers/models/speecht5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/configuration_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/convert_hifigan.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/convert_speecht5_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/feature_extraction_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/modeling_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/number_normalizer.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/processing_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/tokenization_speecht5.cpython-310.pyc,, +transformers/models/speecht5/configuration_speecht5.py,sha256=l2YOkdso_mS0eJaELrbOsZmtndbAQejRep43DuRgsFw,23646 +transformers/models/speecht5/convert_hifigan.py,sha256=CL9GSX_bimjm_hU2rE55MaNvTUjTtWD6qCtqNMaXy7I,4241 +transformers/models/speecht5/convert_speecht5_original_pytorch_checkpoint_to_pytorch.py,sha256=AyAjaeibe3002YZRT2maq1Yi8-iP1j7Ahs5qxYMjiJ0,17194 +transformers/models/speecht5/feature_extraction_speecht5.py,sha256=lcKx3NaIXx0PGITRKP0kA8SZK75kd1Sn8PNHLBn-ST0,17809 +transformers/models/speecht5/modeling_speecht5.py,sha256=LGB_28sFN-mTFUti-G7ejvH2Hxnxgn6ic31YtDnyXEw,153388 +transformers/models/speecht5/number_normalizer.py,sha256=cxnEUdHSISW5eAo15cLuVkZa65zMFuMFaJ8zAOQCsAA,7019 +transformers/models/speecht5/processing_speecht5.py,sha256=smqFdqKJQp9Vm1FDfmj7EvJeAZKSPB6u2AZMfsjsQa0,7562 +transformers/models/speecht5/tokenization_speecht5.py,sha256=dTeIcz0oFUY8Rg7vCkCWuJQotKIcvNZWXj9DYPDtO9Q,8913 +transformers/models/splinter/__init__.py,sha256=vo990AmnOkGy7xWuzB4qaAfJNrtFFLOImR4mlSl_jJ8,2532 +transformers/models/splinter/__pycache__/__init__.cpython-310.pyc,, +transformers/models/splinter/__pycache__/configuration_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/modeling_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter_fast.cpython-310.pyc,, +transformers/models/splinter/configuration_splinter.py,sha256=IcoRrF-fIuVO4fJc4Q0ufyyOOhJuUF8RgUrOvsyA-Fc,5695 +transformers/models/splinter/modeling_splinter.py,sha256=pM9_J0yHh_-HoN2HgWOE2ZcIOvrQMCrE5_4VuepLH30,53249 +transformers/models/splinter/tokenization_splinter.py,sha256=6y_XSErTLNRpI2faZTsiA5AJ6K-kOJVuhtUKv0flhvY,20920 +transformers/models/splinter/tokenization_splinter_fast.py,sha256=t-gbV9OTlANeZQ_XLiV5GYpp9qZW9i7VllaLKf47ztI,8565 +transformers/models/squeezebert/__init__.py,sha256=G8bhLM5DmRO6oIXmZT-W71i8hZK9589XpyLuwIs6W3M,2996 +transformers/models/squeezebert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/configuration_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/modeling_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert_fast.cpython-310.pyc,, +transformers/models/squeezebert/configuration_squeezebert.py,sha256=fLc9Szprw4a7p5Gi6b1TItqq9xccjprrolanKRXDgJI,7347 +transformers/models/squeezebert/modeling_squeezebert.py,sha256=FqEipgCvcfke3--lpcyJauaCZk2quBYCUqjfUO5Rkhw,45027 +transformers/models/squeezebert/tokenization_squeezebert.py,sha256=jTVxnrL-DWDzP7VopDyagQlSN161QuIto05TyUk9-z0,20893 +transformers/models/squeezebert/tokenization_squeezebert_fast.py,sha256=J22q1PJ-qa7ymcvvpmiX2ft2OxUDHi2Gdiny4rOiOZM,7819 +transformers/models/stablelm/__init__.py,sha256=DfGQ8YT2zSeiNRGOhIhypn-IFNOkXmqIt4BHzq8KnSU,1824 +transformers/models/stablelm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/stablelm/__pycache__/configuration_stablelm.cpython-310.pyc,, +transformers/models/stablelm/__pycache__/modeling_stablelm.cpython-310.pyc,, +transformers/models/stablelm/configuration_stablelm.py,sha256=4gI6bRhKYMaI00wDpHnCiKpLzh6NpzMZB0gc4yRMyHo,9450 +transformers/models/stablelm/modeling_stablelm.py,sha256=rrD8f5ToAfVkwH_J3ZcQlaMm1DbI3CnQ-nvAmpYNYTw,65166 +transformers/models/starcoder2/__init__.py,sha256=qUoxxHVVueu5KFeV8LWAoMmtBfwnYVjA-pdoCnho7tQ,1851 +transformers/models/starcoder2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/configuration_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/modeling_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/configuration_starcoder2.py,sha256=3H_EPAOpblcoJKB68G3SgDdSdM-jugjWP3Uwhf5Lqj4,6938 +transformers/models/starcoder2/modeling_starcoder2.py,sha256=MMrkh6uflnMxaMW7TJlSrU_iCsSnbsXP21jHcl1dSO0,63992 +transformers/models/superpoint/__init__.py,sha256=v0DSf2EqaAYJyCh2DMbwCXzVnPMF8SzuOUVqP4GOwV8,2334 +transformers/models/superpoint/__pycache__/__init__.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/configuration_superpoint.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/convert_superpoint_to_pytorch.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/image_processing_superpoint.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/modeling_superpoint.cpython-310.pyc,, +transformers/models/superpoint/configuration_superpoint.py,sha256=ry1MX8YgMekp7XkPlpFFgKv3c7IXgdW_0RZeRTnDPNg,4205 +transformers/models/superpoint/convert_superpoint_to_pytorch.py,sha256=tO1P6yqW46LY1hnWIJPOs4KjW0uZWkiVWW-GTOXbJGg,7243 +transformers/models/superpoint/image_processing_superpoint.py,sha256=Om_ry5alSPtghMVDfFXI2CwDYRNm4siwZGYPmqdNFlE,12510 +transformers/models/superpoint/modeling_superpoint.py,sha256=e93BoVcM7xmyvIoGWB02wSW6WSE6ps0lLaByU0qqwDY,21535 +transformers/models/swiftformer/__init__.py,sha256=y3EVx2oOV5GldnIhqN1uK316Lf68wv3IsTE4HGd2DSc,1990 +transformers/models/swiftformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/configuration_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/convert_swiftformer_original_to_hf.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/modeling_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/configuration_swiftformer.py,sha256=l5ZtRSZsELMkjsDQzY8q_fa07Hoa6p6p_fHveqABM6c,5303 +transformers/models/swiftformer/convert_swiftformer_original_to_hf.py,sha256=HsppMeVG__p-Z4sCLcGLnDhXP-AFe6ewWiifyEFL-xA,6239 +transformers/models/swiftformer/modeling_swiftformer.py,sha256=6nOd2r567sT-mC7SdBnGh75tTOZfAMkT5w43rUgwZh4,23089 +transformers/models/swin/__init__.py,sha256=lsSSO-igADN2rI7RV55GBIB-GG8mRQNnsT9A6J8IFtk,2703 +transformers/models/swin/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swin/__pycache__/configuration_swin.cpython-310.pyc,, +transformers/models/swin/__pycache__/convert_swin_simmim_to_pytorch.cpython-310.pyc,, +transformers/models/swin/__pycache__/convert_swin_timm_to_pytorch.cpython-310.pyc,, +transformers/models/swin/__pycache__/modeling_swin.cpython-310.pyc,, +transformers/models/swin/__pycache__/modeling_tf_swin.cpython-310.pyc,, +transformers/models/swin/configuration_swin.py,sha256=Ijjcs9LVMF3lZ8VlT4qRJYwrVHQfzsOd5xhDIAXMRls,8001 +transformers/models/swin/convert_swin_simmim_to_pytorch.py,sha256=Zb67GMulOozvN1L66EmQ9gKtLVUmyaWYgq_zPPdbGKs,6627 +transformers/models/swin/convert_swin_timm_to_pytorch.py,sha256=WKAiiEOxnv4_yjbLVsU9M50iwE_x0QEvbXrMZK1W_7Q,5805 +transformers/models/swin/modeling_swin.py,sha256=C0Gyrppbrkz-Zambvbkwkkz4Vs89tOXDazz00aF1cbM,60090 +transformers/models/swin/modeling_tf_swin.py,sha256=CvFCCZ6fnqqcWk1r8wTS6UuNRpFjZKsjRT8AyVapd5k,70774 +transformers/models/swin2sr/__init__.py,sha256=Nx5kG4ltMIhcqaGLYh7VYoju_qViNNYZGdGE0p-rz_4,2277 +transformers/models/swin2sr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/configuration_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/convert_swin2sr_original_to_pytorch.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/image_processing_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/modeling_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/configuration_swin2sr.py,sha256=d8FEyDhHTyNREZZ9hj0JUSDtxt0110KkGmjdWPITC0k,6911 +transformers/models/swin2sr/convert_swin2sr_original_to_pytorch.py,sha256=eZ1q75t9Na8iF_KkMXK9hHb0O0KyX9Bv1JhO3r94ZLA,11355 +transformers/models/swin2sr/image_processing_swin2sr.py,sha256=9GDG_McVWO6VSAZd64WZkSij78wIlxAq2LYVmyyfeeU,9544 +transformers/models/swin2sr/modeling_swin2sr.py,sha256=ceR_m_Noodnro2p817Xzqn_RLGdGLBDUvXt-GiMG_1o,50720 +transformers/models/swinv2/__init__.py,sha256=wYBHIbUFdjRY2cLLBWgHOOvE1ZNk6UD6Hj2qYYR2i5Q,1921 +transformers/models/swinv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/configuration_swinv2.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/convert_swinv2_timm_to_pytorch.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/modeling_swinv2.cpython-310.pyc,, +transformers/models/swinv2/configuration_swinv2.py,sha256=C4XGeME3RYIsVagj4f73sgXIhBr7kmkL7unq97rNycA,7617 +transformers/models/swinv2/convert_swinv2_timm_to_pytorch.py,sha256=OMyAAcVPs9DTojiHQCvLo7uTtaChsd1ANTY4IkS7iUY,7687 +transformers/models/swinv2/modeling_swinv2.py,sha256=MGvMlW02iYjDLJx0S8Jjy6eJsCsN9bx5vNaQ9gwmxqc,63826 +transformers/models/switch_transformers/__init__.py,sha256=71GlCMK0XfSUSoxmTxWjj-vmLJImHjlJjtUWkptdalA,2484 +transformers/models/switch_transformers/__pycache__/__init__.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/configuration_switch_transformers.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/convert_big_switch.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/convert_switch_transformers_original_flax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/modeling_switch_transformers.cpython-310.pyc,, +transformers/models/switch_transformers/configuration_switch_transformers.py,sha256=n3leQIeLxJhl2gfXlRF5MNWiTKI4d736uBCQ1UUJVf0,9116 +transformers/models/switch_transformers/convert_big_switch.py,sha256=wjMGjHXAqVool6fZQhdG_Av2Ujx9EDoZrtHC8RdDLk4,7659 +transformers/models/switch_transformers/convert_switch_transformers_original_flax_checkpoint_to_pytorch.py,sha256=AAJNkPcr_THjPN_8RUnOdBYbbYc6GOqXdgdjhx9FZyw,7593 +transformers/models/switch_transformers/modeling_switch_transformers.py,sha256=pGKIFiCGvhyTNZ-WlgJDuP7IFbWH0i47AcznpnyXhpI,87663 +transformers/models/t5/__init__.py,sha256=-WUyKPr21y-Gi15sZ8aW3vmykCW8tu5qZ6yKmOcOHso,4492 +transformers/models/t5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/t5/__pycache__/configuration_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5x_checkpoint_to_flax.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5x_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_flax_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_tf_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/tokenization_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-310.pyc,, +transformers/models/t5/configuration_t5.py,sha256=qUYQqgNSrXX1ggcobIQKUYaTNPV-HNE0CrXN93vWiAQ,7366 +transformers/models/t5/convert_t5_original_tf_checkpoint_to_pytorch.py,sha256=83tKCwYRSRW7zXtm9cmszqtPhpw44cH8Cj0SWUSBgN0,2120 +transformers/models/t5/convert_t5x_checkpoint_to_flax.py,sha256=CET5s9wlNOt-VxT9eu-NOMdNS22kX6mhEZQ-ox2mLK0,10538 +transformers/models/t5/convert_t5x_checkpoint_to_pytorch.py,sha256=GTF0FYHDDDBl2tcYgHcirqHOI2KOE2YkDG4ekzjh_Ao,10483 +transformers/models/t5/modeling_flax_t5.py,sha256=QhELmI-3YNpbMz75xqrUxTLCrPgYowKh0pJVaiJvDCo,74166 +transformers/models/t5/modeling_t5.py,sha256=hA3NJN2Goy2qXXY7Rw0fOiyYZszhDSsgXfZ7RzPNwyY,108657 +transformers/models/t5/modeling_tf_t5.py,sha256=I8tf-3Fqmuvz2oY0_fgJ9eW5EMzgsx1cMd69zuJrE3Q,77178 +transformers/models/t5/tokenization_t5.py,sha256=i5JinhKDrDAAZAu4u5rDQkHTrT9H6r9mG2v-QmwWZ1s,20017 +transformers/models/t5/tokenization_t5_fast.py,sha256=3pG5tJIw9Kfz4B18U4NVjiNjnHNCB20sERqLv-EwgGk,10112 +transformers/models/table_transformer/__init__.py,sha256=WHdzgCB7BwXZeZveOSQ2fBQKNsrsRmpdP1f5C2MfYn4,2065 +transformers/models/table_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/configuration_table_transformer.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/convert_table_transformer_to_hf.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/convert_table_transformer_to_hf_no_timm.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/modeling_table_transformer.cpython-310.pyc,, +transformers/models/table_transformer/configuration_table_transformer.py,sha256=CqNTrOoDKW1sq64DPASZC2iZ_S_ez6sjzSTIY0Wr9As,13345 +transformers/models/table_transformer/convert_table_transformer_to_hf.py,sha256=ItWZNI8n3yj-0fP-kbly0kq8yrb7Bc5Nz2HeInHnPdA,15095 +transformers/models/table_transformer/convert_table_transformer_to_hf_no_timm.py,sha256=IJWfYRPya5zeVUqynktWlkiD7seeQdyU4kagQFXV4pU,21186 +transformers/models/table_transformer/modeling_table_transformer.py,sha256=0nyr-s-i2vHvIXyuTWm-eL_BsBA2ft4Zorht0bom3_0,95298 +transformers/models/tapas/__init__.py,sha256=uGhdu01xgzBDD5edwGpuFl94A2WmFd6FA_U2YWJZReA,2952 +transformers/models/tapas/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tapas/__pycache__/configuration_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/convert_tapas_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/tapas/__pycache__/modeling_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/modeling_tf_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/tokenization_tapas.cpython-310.pyc,, +transformers/models/tapas/configuration_tapas.py,sha256=OBJ7wt_KLCVdnONe5t3aDU8TawqPwBXL9ck3Zq6JN4k,12361 +transformers/models/tapas/convert_tapas_original_tf_checkpoint_to_pytorch.py,sha256=OeIyLEtDJr1z2BEKH0bJNJOR5ZrxRyGM8RpMSC3TgHQ,5049 +transformers/models/tapas/modeling_tapas.py,sha256=1MBm057_AUQUDHRHaFFaWjC4YzCvTeU8rAprTd4fE_w,110372 +transformers/models/tapas/modeling_tf_tapas.py,sha256=VOGS49RrTRyVf4cC9rSqESnxwlgK8VD53U-CjqeMIQU,112090 +transformers/models/tapas/tokenization_tapas.py,sha256=H3oMFMjQbcG0miRUN1Mb2GbLB8GgcTtjs3xPALbCIdU,117025 +transformers/models/time_series_transformer/__init__.py,sha256=dtXXYFY750gxXLggZYQWy2iaq88scX8TYl021UEZHVs,2069 +transformers/models/time_series_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/time_series_transformer/__pycache__/configuration_time_series_transformer.cpython-310.pyc,, +transformers/models/time_series_transformer/__pycache__/modeling_time_series_transformer.cpython-310.pyc,, +transformers/models/time_series_transformer/configuration_time_series_transformer.py,sha256=4R_JEPblfWt4O4uKeeb_QMa70E2clQE_31mU4uS-ZCk,11773 +transformers/models/time_series_transformer/modeling_time_series_transformer.py,sha256=V76hRofPT1LsDYQ5KY3lUP3KR26vG4YDDBj1H_hl5qg,88669 +transformers/models/timesformer/__init__.py,sha256=eugQ_QcHxuxaGByRRLWyZZ_0ic66Mcz5qdwW_Qt-Nyg,1862 +transformers/models/timesformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/configuration_timesformer.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/convert_timesformer_to_pytorch.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/modeling_timesformer.cpython-310.pyc,, +transformers/models/timesformer/configuration_timesformer.py,sha256=mc2SDsVJVLD0DyIZoSUHWVbFWZjyrzhJAkWqvF2yyAY,5638 +transformers/models/timesformer/convert_timesformer_to_pytorch.py,sha256=TjOfPbEC4oVb5tlOgU2m9g36OBizDEEjm0bbcZz6Mq8,10176 +transformers/models/timesformer/modeling_timesformer.py,sha256=pWWTP1SPSvaGPjwl91oIXZk_FImXJ54d5wFabm2ZNxA,35253 +transformers/models/timm_backbone/__init__.py,sha256=rn9y1wXicP1g6IiI_tSWu7fnt5q_x6hfu3g9yQvovEU,1624 +transformers/models/timm_backbone/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timm_backbone/__pycache__/configuration_timm_backbone.cpython-310.pyc,, +transformers/models/timm_backbone/__pycache__/modeling_timm_backbone.cpython-310.pyc,, +transformers/models/timm_backbone/configuration_timm_backbone.py,sha256=PR-F13KbCSBdKgA8ASNh-gok8TLUFY1_7ke32AaasmA,3153 +transformers/models/timm_backbone/modeling_timm_backbone.py,sha256=AXDH5tWEWZYY7mTOWCwsiEvoImk-NdXBLw-EUEMqH4M,6614 +transformers/models/trocr/__init__.py,sha256=jevvndvNkGFaA2smYGtlhOnpGG5U6gIhmuwONgXNyeM,1818 +transformers/models/trocr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/trocr/__pycache__/configuration_trocr.cpython-310.pyc,, +transformers/models/trocr/__pycache__/convert_trocr_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/trocr/__pycache__/modeling_trocr.cpython-310.pyc,, +transformers/models/trocr/__pycache__/processing_trocr.cpython-310.pyc,, +transformers/models/trocr/configuration_trocr.py,sha256=O8vsr9zsshJLWLlyOXnhsukb93CbYmrYgCcD9U4ZP5c,6620 +transformers/models/trocr/convert_trocr_unilm_to_pytorch.py,sha256=7I6jyQ1hl9k_fweOgeMgKypDSSf4zL-7tjIoY09sprk,10166 +transformers/models/trocr/modeling_trocr.py,sha256=dpkbnfRc_KNxUk4WfxoYtcxmsJ5BsmvGs-jsnuQCJ1E,45377 +transformers/models/trocr/processing_trocr.py,sha256=-iyJv7DCOlG-iKtKhtKmgbQKyU4eGydKGJDeLmBFML4,5745 +transformers/models/tvlt/__init__.py,sha256=3hHJeODpJMJ9_06AAz0fAV7QCRljLoJcfXc69YypO9M,2687 +transformers/models/tvlt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/configuration_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/feature_extraction_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/image_processing_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/modeling_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/processing_tvlt.cpython-310.pyc,, +transformers/models/tvlt/configuration_tvlt.py,sha256=DNVoahCkmhinPBR6ELUSZ3ydsYmd4J-gqoEsUCOFdfA,8718 +transformers/models/tvlt/feature_extraction_tvlt.py,sha256=peyeHHDn8S6X6bQIf3rWs4fWwPYSjabGC0f106x35W4,10555 +transformers/models/tvlt/image_processing_tvlt.py,sha256=D7MBYY1GG8_FRtnxy6UQ_dmeCJIVhZrf7GtzvOX1A80,20085 +transformers/models/tvlt/modeling_tvlt.py,sha256=7uX3H2gkh0nrZhiGkad-qBUODceNNSTq35R8uXctBrM,57371 +transformers/models/tvlt/processing_tvlt.py,sha256=JaLjfV68tRz-Ts55YzccFCltQO4yZDTNW6DAreychSQ,3506 +transformers/models/tvp/__init__.py,sha256=nMCJ05vKe35hpbNHygmLeBkYUXDH2ZZLB5U5Ij0DG6A,2366 +transformers/models/tvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tvp/__pycache__/configuration_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/image_processing_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/modeling_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/processing_tvp.cpython-310.pyc,, +transformers/models/tvp/configuration_tvp.py,sha256=-ubk9NYjQwXVQTAebBSlYB1fpiKe1GYYa0xLgIeJm2E,10107 +transformers/models/tvp/image_processing_tvp.py,sha256=SiQUmjVpDimWZz_U-4U4rGX6iOw8Qh_WD5PZ5LAu70w,23178 +transformers/models/tvp/modeling_tvp.py,sha256=MxvrQ4iy4V1AApzRUsC5Cer0CudLYg55bcDgiK-17mQ,38791 +transformers/models/tvp/processing_tvp.py,sha256=6fJAgekPIOw95GpQ7b1_y76KGbC03upX9uH8XlbGdKE,6981 +transformers/models/udop/__init__.py,sha256=78SSiXPuOw6Y1OrVRWawWtLCcV3-vKqZLqKi7rWoQ4M,2864 +transformers/models/udop/__pycache__/__init__.cpython-310.pyc,, +transformers/models/udop/__pycache__/configuration_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/convert_udop_to_hf.cpython-310.pyc,, +transformers/models/udop/__pycache__/modeling_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/processing_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/tokenization_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/tokenization_udop_fast.cpython-310.pyc,, +transformers/models/udop/configuration_udop.py,sha256=qydT2Xc0oZ38m1MKJMVJwZqXLJUlPRFGwZZ8rgzyxVs,7747 +transformers/models/udop/convert_udop_to_hf.py,sha256=oPyHBW-tWHhWidgG9JGOl3e0s8vpF-xM1uZ8ecV-IEI,14414 +transformers/models/udop/modeling_udop.py,sha256=OWuvU145FRqbL-LAbLOkOFBHmYfomqUIpvGlFLAFIqg,94428 +transformers/models/udop/processing_udop.py,sha256=4r21EuC0M2gF5GAl9EuSiQ5l80sv7TjiEP_v6J5saqc,10119 +transformers/models/udop/tokenization_udop.py,sha256=hz6Ujnim0Ck6wBSjKAPNElw5gkW9EpprCHW5QRPE1Qw,71020 +transformers/models/udop/tokenization_udop_fast.py,sha256=kNTtZNUjVgGKgWeulE8Lq4hMAHtCWbRFPPU3hO65UpM,49159 +transformers/models/umt5/__init__.py,sha256=wcKbkdS_suuZCQs52Oz0lBegIa0QDSPZW2Q-XBpM3ns,1908 +transformers/models/umt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/umt5/__pycache__/configuration_umt5.cpython-310.pyc,, +transformers/models/umt5/__pycache__/convert_umt5_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/umt5/__pycache__/modeling_umt5.cpython-310.pyc,, +transformers/models/umt5/configuration_umt5.py,sha256=qXrQJ7Nwj1G6q8zo6UpqZ9HoVgbGDuOvdFUU63Qf7BA,7636 +transformers/models/umt5/convert_umt5_checkpoint_to_pytorch.py,sha256=mKcFjDTUYzC4S2faD9UMTQTIl5nwGbOp4QkcFxEEdv8,12070 +transformers/models/umt5/modeling_umt5.py,sha256=UZ7AFCi3sYo1ilhRVPADZ7R_RqxhW1R3xhg0akfbiII,86424 +transformers/models/unispeech/__init__.py,sha256=n4jtlc-pPF37uUx7mgB1GDnL2lQ-eKDI8xOLVVp840E,2018 +transformers/models/unispeech/__pycache__/__init__.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/configuration_unispeech.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/convert_unispeech_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/modeling_unispeech.cpython-310.pyc,, +transformers/models/unispeech/configuration_unispeech.py,sha256=1tUJoio0_kMqDZFGf9-P30P_IzWk8DJcW4VY4Z1jJnA,17556 +transformers/models/unispeech/convert_unispeech_original_pytorch_checkpoint_to_pytorch.py,sha256=bwfIAusfhFih5WJEIIokApShfuYhJoirPltvRz2-T7Y,11340 +transformers/models/unispeech/modeling_unispeech.py,sha256=9ACsIJ7h3ys4izC483Pyg8t9Oe8BFALEgeYsdXYivIk,72582 +transformers/models/unispeech_sat/__init__.py,sha256=gAf8t9qZaufCDyIyJICzCQTvrmV825BDZUKQoa08DhE,2267 +transformers/models/unispeech_sat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/configuration_unispeech_sat.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/convert_unispeech_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/convert_unispeech_sat_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/modeling_unispeech_sat.cpython-310.pyc,, +transformers/models/unispeech_sat/configuration_unispeech_sat.py,sha256=eFDK8KQX0hkBC1EIhbiU1H2pGw3KlgRpXUWnRQD-Dvo,18902 +transformers/models/unispeech_sat/convert_unispeech_original_s3prl_checkpoint_to_pytorch.py,sha256=CnSYjNr7S7Mqa7Feosf9Dx7eQTYScVHG-QprNkY8uLk,4870 +transformers/models/unispeech_sat/convert_unispeech_sat_original_pytorch_checkpoint_to_pytorch.py,sha256=NK_vA71Eq2q9P1x3ol-2Jlqjkv-Mi3NlXO9Ra7QUQsQ,9289 +transformers/models/unispeech_sat/modeling_unispeech_sat.py,sha256=NKUhfHTttkSEfnmCoxPeHRstxh6mTkdIWZy1kf1Z_IQ,86732 +transformers/models/univnet/__init__.py,sha256=aeEydP4QFet-MOxxwOZMKE-jGUG1spoCfXwMmESP27Y,1842 +transformers/models/univnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/univnet/__pycache__/configuration_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/convert_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/feature_extraction_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/modeling_univnet.cpython-310.pyc,, +transformers/models/univnet/configuration_univnet.py,sha256=eCemBN3MVYdqCoS4svOqO_R5hh8PhY4zVf_Ncqzcw94,6828 +transformers/models/univnet/convert_univnet.py,sha256=R2gqXfz8Oq2rwIUU01V7T_oSoDGG2A4Gety-R80Yn24,6364 +transformers/models/univnet/feature_extraction_univnet.py,sha256=snAVdQ5ClFX_Sw7upgvWyzJq4bUNRelRQaxcWxgHIgA,22821 +transformers/models/univnet/modeling_univnet.py,sha256=DaGTSHqwEXfp-nuUcCxbtaKVVS9s5jQFgAQ3uY40_n4,26874 +transformers/models/upernet/__init__.py,sha256=z2avy6tP_WpANiGPA5RCxT_9yPp0PfEDlfUjL9rQsXM,1535 +transformers/models/upernet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/upernet/__pycache__/configuration_upernet.cpython-310.pyc,, +transformers/models/upernet/__pycache__/convert_convnext_upernet_to_pytorch.cpython-310.pyc,, +transformers/models/upernet/__pycache__/convert_swin_upernet_to_pytorch.cpython-310.pyc,, +transformers/models/upernet/__pycache__/modeling_upernet.cpython-310.pyc,, +transformers/models/upernet/configuration_upernet.py,sha256=SoforpobnR_iSTAHHWAOON_zUZ8F5674SqjDMVyy2Ts,6719 +transformers/models/upernet/convert_convnext_upernet_to_pytorch.py,sha256=l_CJoXwANEE9rm5mwpHwbusIoJLmN8jNGjxsj6WhZrk,10271 +transformers/models/upernet/convert_swin_upernet_to_pytorch.py,sha256=lHV8SE_bZnxOo-zEJ21S2nY449uPVc3bpcl2JGKNEjA,14026 +transformers/models/upernet/modeling_upernet.py,sha256=_D-8NctwkTf3hfuAhawE3DEw_WjeOF8C31MkBLhJXWQ,17136 +transformers/models/videomae/__init__.py,sha256=Yrv0_yOkvyL6slti-bw1oFR8t8VO8-6b40yF0Lf2uV4,2519 +transformers/models/videomae/__pycache__/__init__.cpython-310.pyc,, +transformers/models/videomae/__pycache__/configuration_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/convert_videomae_to_pytorch.cpython-310.pyc,, +transformers/models/videomae/__pycache__/feature_extraction_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/image_processing_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/modeling_videomae.cpython-310.pyc,, +transformers/models/videomae/configuration_videomae.py,sha256=shuybmQw9SgQTm4r8TokU0kunxMqwhYgAnXHFbFhxbQ,6670 +transformers/models/videomae/convert_videomae_to_pytorch.py,sha256=rq2nT2ZJekra1G38kM2DH_qOvcZBDQFNgbCvH3mKZjY,13989 +transformers/models/videomae/feature_extraction_videomae.py,sha256=Hg5wmFhkbncqR3nfvtevV6msaUEqvLBf4mtO4aICYTI,1200 +transformers/models/videomae/image_processing_videomae.py,sha256=yMZGcXFd8YmK1uwf9tqOFtvild9yOAf8rJeXVxX3oNo,17000 +transformers/models/videomae/modeling_videomae.py,sha256=6xS4RxjkkQD-lXg4ZUwo4N_zVT_oa24pmEozn3mIE18,47382 +transformers/models/vilt/__init__.py,sha256=-fruuGWD0urXmb7STgXnrF3QY8J6Z6lfJuTneeL_BsM,2788 +transformers/models/vilt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vilt/__pycache__/configuration_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/convert_vilt_original_to_pytorch.cpython-310.pyc,, +transformers/models/vilt/__pycache__/feature_extraction_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/image_processing_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/modeling_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/processing_vilt.cpython-310.pyc,, +transformers/models/vilt/configuration_vilt.py,sha256=HxGlUnrFYqyePPCmDqdSKBaJTm-FlLeU1h5SR612N9E,6885 +transformers/models/vilt/convert_vilt_original_to_pytorch.py,sha256=IUSgkjLMZRUBuozW7OzL6TtD_jkO7ZfH51H6x6Qgjdk,12882 +transformers/models/vilt/feature_extraction_vilt.py,sha256=dC0Glwc_rDX7zqp8BxRtzaLogQGI4I4CjQCgxU7UORw,1172 +transformers/models/vilt/image_processing_vilt.py,sha256=9U68LczTq1t6iLBT46MCGeQ5PHJxcb9zjmgJHFtN8qg,23619 +transformers/models/vilt/modeling_vilt.py,sha256=LJte9B6mu9yuXmlm_oxaYLBJMG3yrP4MwueehfxiYD8,64971 +transformers/models/vilt/processing_vilt.py,sha256=0iOal8dCaE7JCQlZjbJ1-sHGxpDPZgUkMowEbxFRF2Q,6079 +transformers/models/vipllava/__init__.py,sha256=6lR_RtZD-Jzj6ZMOjo3JYuFRaBjVKmXquzPOB38z33k,1740 +transformers/models/vipllava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/configuration_vipllava.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/convert_vipllava_weights_to_hf.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/modeling_vipllava.cpython-310.pyc,, +transformers/models/vipllava/configuration_vipllava.py,sha256=UV6zEz55NxeJvaq4SgIdNbTqGymDgJkISbDQAxgOyXc,5813 +transformers/models/vipllava/convert_vipllava_weights_to_hf.py,sha256=u64-lOXDE0JMGhkGYJEtyrOh3gpeJtxSDC_dC08mc2c,4794 +transformers/models/vipllava/modeling_vipllava.py,sha256=YK7xeHpgR0m7muwu1a-VISpySVtJ0ERmv0s6Fc9TnBQ,29871 +transformers/models/vision_encoder_decoder/__init__.py,sha256=IRQsS-4Bz-cm6B97rSoeC62Z1l1wns0XVDZwBn1KBIU,2627 +transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/configuration_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_flax_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_tf_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/configuration_vision_encoder_decoder.py,sha256=6x7tdTBOrsvKOMy12NCtbPatY2qaqOJaVIGGxy3uPDw,8273 +transformers/models/vision_encoder_decoder/modeling_flax_vision_encoder_decoder.py,sha256=q2Tzd_KS4rB81YZk3zzb3KjtghP6vaPY4Snz_Kh52qQ,41535 +transformers/models/vision_encoder_decoder/modeling_tf_vision_encoder_decoder.py,sha256=-7ASqN2Qu4Ehcwr0WF0MTnrb28Fj3fCGFzGinhuQXak,36239 +transformers/models/vision_encoder_decoder/modeling_vision_encoder_decoder.py,sha256=TN-V-wtM_3E9psma7_p-GWcJL8nB4wHmhXDB_cMbKAY,34606 +transformers/models/vision_text_dual_encoder/__init__.py,sha256=kULrtY2Ie2eigdn63xnoEqRUlmKm31D9mUCJs4F62Lo,2730 +transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/configuration_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_flax_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_tf_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/processing_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/configuration_vision_text_dual_encoder.py,sha256=E7pT_zGc0uq9uzfKSBE6QiYjgSAotq0zYuC1bnzE5F0,4895 +transformers/models/vision_text_dual_encoder/modeling_flax_vision_text_dual_encoder.py,sha256=JP4ppqdIEvRfbpCtf0b3bJQcURI8YVvyTHe8wDRCRJg,26314 +transformers/models/vision_text_dual_encoder/modeling_tf_vision_text_dual_encoder.py,sha256=stdg94SN9NhHPelgqWBOJt-X7c4fBohXcBhDIl_TE68,28641 +transformers/models/vision_text_dual_encoder/modeling_vision_text_dual_encoder.py,sha256=kqB-zueOo28U1qXKRoR1njEyX6xRm45r0faBUKYH4wQ,24939 +transformers/models/vision_text_dual_encoder/processing_vision_text_dual_encoder.py,sha256=Wxw-ShdBxDkWK76hcJjHrvySp-uW0yrTvoqWouovhy8,6929 +transformers/models/visual_bert/__init__.py,sha256=OSQEpz1R0NjH9WvGkfsXKq_9LJTGfrHscqYd2xl9S_4,2235 +transformers/models/visual_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/configuration_visual_bert.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/convert_visual_bert_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/modeling_visual_bert.cpython-310.pyc,, +transformers/models/visual_bert/configuration_visual_bert.py,sha256=jpeHAEPrt9V8wm7SwFld7ekTGkvGOZoRaYANBLEu0nA,6838 +transformers/models/visual_bert/convert_visual_bert_original_pytorch_checkpoint_to_pytorch.py,sha256=BpXgEZ-5LdGIa0NK6BDZd_5VhKCqeWuu2oOQyUqcSRQ,5158 +transformers/models/visual_bert/modeling_visual_bert.py,sha256=KASEThu9KrqrNcC0m9klG8Xqy3reEHMamy_dXEIQjxo,69279 +transformers/models/vit/__init__.py,sha256=Kw3Pan4rUcu6RQsA7u-DpxMlmbzdmrA7GA3ha3nYO5k,3598 +transformers/models/vit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit/__pycache__/configuration_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/convert_dino_to_pytorch.cpython-310.pyc,, +transformers/models/vit/__pycache__/convert_vit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/vit/__pycache__/feature_extraction_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/image_processing_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_flax_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_tf_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_vit.cpython-310.pyc,, +transformers/models/vit/configuration_vit.py,sha256=EU6gd1A2VfAlpr3LEVl_iIGCgKNfuH99psLBaxDCNOw,5708 +transformers/models/vit/convert_dino_to_pytorch.py,sha256=CIkbWDBEgW5jmSWWoPZOosLLqCFiUz8oYgnj48JdtSM,8854 +transformers/models/vit/convert_vit_timm_to_pytorch.py,sha256=LY_UklTkw47xwnCcY8AzVFH-6g5B8t3GTuQ0PbyZyn0,10890 +transformers/models/vit/feature_extraction_vit.py,sha256=R-W_HNOybSpKxKGKfo4iDB4zGTRHeW1cq-29iwnbVl4,1165 +transformers/models/vit/image_processing_vit.py,sha256=ZTVKB_q7T0qGYcQG6VnMVxzog4VUwTKiC10-nFrUoyY,14185 +transformers/models/vit/modeling_flax_vit.py,sha256=KsTqlse5b5euRgYXhrXoNqCNvo0LEPBGuU_b0uNO0yo,25340 +transformers/models/vit/modeling_tf_vit.py,sha256=Ycwa5F6KssyHFtmpVhbOxq2XY1q36PX8wzOVGrOlgqA,37328 +transformers/models/vit/modeling_vit.py,sha256=QC7XTMBhtUyv1Csn2SrCOxB_kI7BsaNiFm65in4G75s,35594 +transformers/models/vit_hybrid/__init__.py,sha256=kJffDq49Rz34fkQnLISzCp18xqXkVFOIWciOsZMjc2I,2316 +transformers/models/vit_hybrid/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/configuration_vit_hybrid.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/convert_vit_hybrid_timm_to_pytorch.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/image_processing_vit_hybrid.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/modeling_vit_hybrid.cpython-310.pyc,, +transformers/models/vit_hybrid/configuration_vit_hybrid.py,sha256=zqfiKBj316i2Bapp2cCnIJM28XStCBVXuM6fhnVJwSs,8330 +transformers/models/vit_hybrid/convert_vit_hybrid_timm_to_pytorch.py,sha256=MymDN5E1N5g1g5k0mK0M-F2VeYy_Me-hRWdVNTRFocA,13413 +transformers/models/vit_hybrid/image_processing_vit_hybrid.py,sha256=aeHej-2dOTuFlDFTObIZH8hG1HXoefpyQPgH3owjo9A,16390 +transformers/models/vit_hybrid/modeling_vit_hybrid.py,sha256=Q4TUzxCKvFQCh2YmS7_WA6s-cwj53X4NtZ99imU3e90,31865 +transformers/models/vit_mae/__init__.py,sha256=-w9MTkUgGkYCX6q37upqBk7x-8g247YxYGVVAEJkIzk,2428 +transformers/models/vit_mae/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/configuration_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/convert_vit_mae_to_pytorch.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/modeling_tf_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/modeling_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/configuration_vit_mae.py,sha256=1fEivwPlBPZdnAt-CMbSHCkocm1NFjkn4-BJ2ar2aaY,6443 +transformers/models/vit_mae/convert_vit_mae_to_pytorch.py,sha256=Nj4Y5LS8H7xbyWNeLE9Vn0NFyXSQQYEcj1QQMzN1Hdg,7516 +transformers/models/vit_mae/modeling_tf_vit_mae.py,sha256=QBtXTmOdrC21lPOajqx6WCWKl2JVeDpeUM31oUFMYJ8,52979 +transformers/models/vit_mae/modeling_vit_mae.py,sha256=jidR9wteggmQ3Km2l8mO9W6bKkPlx_cWHNqw1vZdau8,42771 +transformers/models/vit_msn/__init__.py,sha256=4VVe0aSuBzHjTg4X2nuVet-9DgD5_dWlFkbLAr4bilc,1783 +transformers/models/vit_msn/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/configuration_vit_msn.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/convert_msn_to_pytorch.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/modeling_vit_msn.cpython-310.pyc,, +transformers/models/vit_msn/configuration_vit_msn.py,sha256=pJwLjgbPa6005C1Dsu6okI9xtjZ8c30Ktbx5Rz2D1r8,4936 +transformers/models/vit_msn/convert_msn_to_pytorch.py,sha256=1xBjqvbviFkGxhi_xq2956R7qZpFEBdKPNOQYb-SoIA,9841 +transformers/models/vit_msn/modeling_vit_msn.py,sha256=uQBGkcoCD2fEYkkUKRQuYBKJOowraBF4En3nvx503PQ,29649 +transformers/models/vitdet/__init__.py,sha256=Vaafapb4IUbKPzQUqPjhX6nvt14CTKlV51QneeQpTmc,1764 +transformers/models/vitdet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitdet/__pycache__/configuration_vitdet.cpython-310.pyc,, +transformers/models/vitdet/__pycache__/modeling_vitdet.cpython-310.pyc,, +transformers/models/vitdet/configuration_vitdet.py,sha256=lg4mW4J0xANSdpwR-RFxnnwtea04yn1n_cojrlUXbDg,7612 +transformers/models/vitdet/modeling_vitdet.py,sha256=1pfKZly7hJQwqzteIg-0GoIKL_V_6YvbTBnimwrdn4o,34925 +transformers/models/vitmatte/__init__.py,sha256=tl-h8_VOAHRT7VtJJJ-SFSl5lkHxfVEdDaCtm4ksJIg,2239 +transformers/models/vitmatte/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/configuration_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/convert_vitmatte_to_hf.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/image_processing_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/modeling_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/configuration_vitmatte.py,sha256=znJb9kFDk2x-5qTzSNJXP-snhRz8aefBcjO3mJFYgxw,6443 +transformers/models/vitmatte/convert_vitmatte_to_hf.py,sha256=1xctm78nmCLelPMqGJepxSyq5saKgA4by5CTzyxRPvc,6404 +transformers/models/vitmatte/image_processing_vitmatte.py,sha256=xeHDZXC_dJIBwbCt93GZlIJSMtgCaKZTyni3TiITGl8,13844 +transformers/models/vitmatte/modeling_vitmatte.py,sha256=xD57245zAanRx2O_N3dVHj6GFY8Ju03rcZp7fbZyH0M,12824 +transformers/models/vits/__init__.py,sha256=JoVFhlJ0-hhxN3ND-JsESyEcsihDbT6j0WPmIH9DjCA,1887 +transformers/models/vits/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vits/__pycache__/configuration_vits.cpython-310.pyc,, +transformers/models/vits/__pycache__/convert_original_checkpoint.cpython-310.pyc,, +transformers/models/vits/__pycache__/modeling_vits.cpython-310.pyc,, +transformers/models/vits/__pycache__/tokenization_vits.cpython-310.pyc,, +transformers/models/vits/configuration_vits.py,sha256=CveTctkJe70Jj99XrxpEYZHtWHHRQDWKHOryGwWgkiA,13955 +transformers/models/vits/convert_original_checkpoint.py,sha256=N6rRzBaJlMxRwT7u33kUyJKy-4fFTWTD6nu_RTTOGt0,18610 +transformers/models/vits/modeling_vits.py,sha256=cAOA8IBL_kwvhlSl7hJuSAtQ8UrPXZFr-N9R_j4xmN0,66229 +transformers/models/vits/tokenization_vits.py,sha256=jpRsDM97USMwMqZOy_Uf5DriSRkRtc5wG4CcBGcjq_A,8951 +transformers/models/vivit/__init__.py,sha256=Ajx0pvLrGGMBJruIaFHvqJiQyAM9BI9qLRi-5kyRT10,2441 +transformers/models/vivit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vivit/__pycache__/configuration_vivit.cpython-310.pyc,, +transformers/models/vivit/__pycache__/convert_vivit_flax_to_pytorch.cpython-310.pyc,, +transformers/models/vivit/__pycache__/image_processing_vivit.cpython-310.pyc,, +transformers/models/vivit/__pycache__/modeling_vivit.cpython-310.pyc,, +transformers/models/vivit/configuration_vivit.py,sha256=RLmQO66YtE6Y7dTp0zNzoYHeW2r0ocYbEmXSIkd-U9E,5212 +transformers/models/vivit/convert_vivit_flax_to_pytorch.py,sha256=yIwLQOx8eT-8AuYf_3KTfLwabCBdC1z_Z0WZDr4a7mM,9111 +transformers/models/vivit/image_processing_vivit.py,sha256=T9A7bohqmXrFK3rab9KMhtXnIQpUoFHod5GMmZPQLGw,19552 +transformers/models/vivit/modeling_vivit.py,sha256=bOUA9ovkebm7dNsv0QRkKRfNHoNTGm0R7SyGoZGTYDY,29977 +transformers/models/wav2vec2/__init__.py,sha256=eN9LbGY56T2Kz38zw3ChsiOkOHprtc4CgQjT8DSrUds,4139 +transformers/models/wav2vec2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/configuration_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/convert_wav2vec2_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/convert_wav2vec2_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/feature_extraction_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_flax_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_tf_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/processing_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/tokenization_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/configuration_wav2vec2.py,sha256=197c8uGexJC97Lzt9jFU5xzFdXCnDImJwSr81-LROdg,20149 +transformers/models/wav2vec2/convert_wav2vec2_original_pytorch_checkpoint_to_pytorch.py,sha256=hhc_QSStY43_pj4bIQf0TUWfiJo1KGkPuMTl16dP-ng,14293 +transformers/models/wav2vec2/convert_wav2vec2_original_s3prl_checkpoint_to_pytorch.py,sha256=CMjcWPEsvvPpX-OlMUJQxHNDErbJbDVqVSCoqo-9hDk,4838 +transformers/models/wav2vec2/feature_extraction_wav2vec2.py,sha256=D-yqFIpwjn_7LYJUmdnelRsn4qsoUrkZGX4Qsp5Y9CY,11511 +transformers/models/wav2vec2/modeling_flax_wav2vec2.py,sha256=iLm6d5m0LYQs0qKqg3Tdx7I6vgCB5QCmFY6MYrKu0RA,57331 +transformers/models/wav2vec2/modeling_tf_wav2vec2.py,sha256=D1izr4FsmoI_9NIDaQZ6M6AzPK7XvbMHG8zE00EUfRI,78708 +transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=UmQRi14Gah0qPvR7pSLKc0rxNfD4hvJ4BxgVpkARKLQ,106697 +transformers/models/wav2vec2/processing_wav2vec2.py,sha256=82JBzFgQxV5ZQgRYmMj3gqf3pxL8Q8nfdwnhsuUUZjU,7137 +transformers/models/wav2vec2/tokenization_wav2vec2.py,sha256=ck1JrMUbJ06enzXRHaP-qFbAOWO6AdYUc1Bj116arFw,38384 +transformers/models/wav2vec2_bert/__init__.py,sha256=yBuhwgvNayh1tKpyXnLCSmw877fgVbtI16Xag8BK6Wo,2300 +transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/configuration_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/convert_wav2vec2_seamless_checkpoint.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/modeling_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/processing_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/configuration_wav2vec2_bert.py,sha256=O38bgI-_nHVAjD7HZdazAit6da0SEdjEwE1Ow8TjaI4,18182 +transformers/models/wav2vec2_bert/convert_wav2vec2_seamless_checkpoint.py,sha256=MFwGdbwNt4jDlGDG6cc9T5PhKEd-PjFMUOci533PLG8,7420 +transformers/models/wav2vec2_bert/modeling_wav2vec2_bert.py,sha256=EJswXn0y88fJkdGBVn3NszOUGxF9nG_L-PMHhO4kepU,74577 +transformers/models/wav2vec2_bert/processing_wav2vec2_bert.py,sha256=DWMQCIdzOHFXFQA8ReGS-HLHfQYhUTpuj7jLMHZ8th0,7449 +transformers/models/wav2vec2_conformer/__init__.py,sha256=w6Z-Rd5ONNTFI-ioN5VvNPhW842-_rKASoHN6lGeJx4,2375 +transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/configuration_wav2vec2_conformer.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/convert_wav2vec2_conformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/modeling_wav2vec2_conformer.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/configuration_wav2vec2_conformer.py,sha256=m9LH8bwBZl9DRD2bkVgrCsZsUPqAuGbqhzfvkSjrPMk,20961 +transformers/models/wav2vec2_conformer/convert_wav2vec2_conformer_original_pytorch_checkpoint_to_pytorch.py,sha256=D8rojgR8DRaqVTZwYXd2qykIKlKf7EnMM6h3PzYPS0M,13382 +transformers/models/wav2vec2_conformer/modeling_wav2vec2_conformer.py,sha256=V2x9vTprzKy6HUMucDeF6aNrXJQT1tQuS09Qd3yXZkg,95597 +transformers/models/wav2vec2_phoneme/__init__.py,sha256=E2xRyViyzCISV8XE7YQ1gx5Wlx9_ACoPDB6ZZEm9bWo,993 +transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_phoneme/__pycache__/tokenization_wav2vec2_phoneme.cpython-310.pyc,, +transformers/models/wav2vec2_phoneme/tokenization_wav2vec2_phoneme.py,sha256=Ef4t56FJIAezlTc8x5hwqWwhjCXrfI7qL_-lC6ShIbI,23162 +transformers/models/wav2vec2_with_lm/__init__.py,sha256=d_lvk8QAia4BIKN7d_Uy3HdRqrDp_ZJHTDZ-nkHKwPA,981 +transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_with_lm/__pycache__/processing_wav2vec2_with_lm.cpython-310.pyc,, +transformers/models/wav2vec2_with_lm/processing_wav2vec2_with_lm.py,sha256=rB38_Sef9FlkFFd_AqJwbEraRdcp5wi1fNV1e7he7F8,29522 +transformers/models/wavlm/__init__.py,sha256=puMYnJLkFpkYKq7oH_ziapvzFYZMOyTHDqpN8IxzJPw,1959 +transformers/models/wavlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/configuration_wavlm.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/convert_wavlm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/convert_wavlm_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/modeling_wavlm.cpython-310.pyc,, +transformers/models/wavlm/configuration_wavlm.py,sha256=UqozZdZFuFUjrJlyZ9XyiMXnDZoG7QHMVH6Ct2O68-I,18634 +transformers/models/wavlm/convert_wavlm_original_pytorch_checkpoint_to_pytorch.py,sha256=tYQiS5CUNYoMWyxKnmkmDG6VW0lwapFxTrDSz4Pprm0,8580 +transformers/models/wavlm/convert_wavlm_original_s3prl_checkpoint_to_pytorch.py,sha256=Yo4K3ZxH5KXS3gCD7KTakUviJABV-gJGJHXFeV5Sc9I,4814 +transformers/models/wavlm/modeling_wavlm.py,sha256=CmZSbC2ajBx7QwhnqCoxb0atEcnXhFvUpBsje4qiG8E,78592 +transformers/models/whisper/__init__.py,sha256=Y9nksRYJ-dCwFFdnagINwcqEMrdRG7AtPKWRB4uXlmM,4346 +transformers/models/whisper/__pycache__/__init__.cpython-310.pyc,, +transformers/models/whisper/__pycache__/configuration_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/convert_openai_to_hf.cpython-310.pyc,, +transformers/models/whisper/__pycache__/english_normalizer.cpython-310.pyc,, +transformers/models/whisper/__pycache__/feature_extraction_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/generation_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_flax_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_tf_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/processing_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper_fast.cpython-310.pyc,, +transformers/models/whisper/configuration_whisper.py,sha256=rIVNZdLcuKu9n6coSkJsRsu4ZRgiCvK6TVTAHeiPsik,17010 +transformers/models/whisper/convert_openai_to_hf.py,sha256=yKg38L_4ogsB_rM8Yti91pWeiuS-Qxq-AIqvzedK_bU,14965 +transformers/models/whisper/english_normalizer.py,sha256=MTJ16OhstprR2X8owfEJmONqkoSHHyzztENejmEhSBM,22822 +transformers/models/whisper/feature_extraction_whisper.py,sha256=KVHCcp-M_s1ibQJkauM6vxCAQmj_Gtw9w33LOq6MkP4,14347 +transformers/models/whisper/generation_whisper.py,sha256=ufmzrp1iDp2smu4Kx6tRlLYnuQgpG19nnUeGhg7FwSk,84051 +transformers/models/whisper/modeling_flax_whisper.py,sha256=s4sI__pmItZAAJxzmgU8f1jy3Dk4fAn9uGyy6TAaJnM,73587 +transformers/models/whisper/modeling_tf_whisper.py,sha256=vaRenYGUVvzdLZNYNHT73T_IPIpy524zeHCMkPaej5w,84868 +transformers/models/whisper/modeling_whisper.py,sha256=U1FJTN82j784XLlBcca2Uj0o4DcNYF5c8Qv4J_pH1v8,105522 +transformers/models/whisper/processing_whisper.py,sha256=pO6wtcywcJq-lkA2rNrdINEvj7_6fjWvAUv7HWn70gE,3891 +transformers/models/whisper/tokenization_whisper.py,sha256=IIiOLgu7JOKl0zxROi4sJsyN6ZU3MN0l3jsNXM2pdD4,54506 +transformers/models/whisper/tokenization_whisper_fast.py,sha256=2rlVcnHDxEwm6F2arkR8x_CAZUIc2PpdtAXO9qwvLvE,28841 +transformers/models/x_clip/__init__.py,sha256=zWhh0KIKf1OaB3EezBv6YkgaxTESvEesITGqhiZYgHs,2053 +transformers/models/x_clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/configuration_x_clip.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/convert_x_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/modeling_x_clip.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/processing_x_clip.cpython-310.pyc,, +transformers/models/x_clip/configuration_x_clip.py,sha256=6tzgPQVDl1473h_ffpqbhuqdFonlrnlw8RNyls-EA4o,20407 +transformers/models/x_clip/convert_x_clip_original_pytorch_to_hf.py,sha256=WzXe8IKqSz4Bi78EIvRA6C3QiLL4c-SpARggHjIWtt4,18066 +transformers/models/x_clip/modeling_x_clip.py,sha256=PvV64zMhStEsh3DYna4Es4ZGMOs5aqVATp94rAe_X-g,70185 +transformers/models/x_clip/processing_x_clip.py,sha256=vuwuN_pNagPMfdvGJrSbhQVTslOHBMGFgYV2xD9BHsw,6897 +transformers/models/xglm/__init__.py,sha256=gSzCOADmOA0n4CxfKEhESj32_WqQ6ae6e0QjYyaJ-gs,3871 +transformers/models/xglm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xglm/__pycache__/configuration_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/convert_xglm_original_ckpt_to_trfms.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_flax_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_tf_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm_fast.cpython-310.pyc,, +transformers/models/xglm/configuration_xglm.py,sha256=2hhVobBTfmh_msU3wC7ZgJvdpOSAlyo83CcwR9Fld3U,5943 +transformers/models/xglm/convert_xglm_original_ckpt_to_trfms.py,sha256=9fjXP40nMFbiI9H0VV66Buqk9JQrPhAFERCOBYHl_7g,2325 +transformers/models/xglm/modeling_flax_xglm.py,sha256=5-ubc4mqp9vhZFUUcyy8FzwwbS_xHpIA6pWIC9keOcg,33117 +transformers/models/xglm/modeling_tf_xglm.py,sha256=CaD9Z4Vg3L-hV5st7Tf0yrSeggoEHs5aH4I2v7LpIyU,45376 +transformers/models/xglm/modeling_xglm.py,sha256=3AB9uJ08yuLM-lxwuYSo5ccaZK9wAwDaUV6pMPZRdpw,38697 +transformers/models/xglm/tokenization_xglm.py,sha256=YsFY375ftb3luEKiJI7UqrmRin2rByJhad6DOppocRI,12482 +transformers/models/xglm/tokenization_xglm_fast.py,sha256=7Cicl8Ilnoq_RbwK136hhQgBiL6-tf0h3vjxifUwAzE,7588 +transformers/models/xlm/__init__.py,sha256=tYpOIDQrMDWgJJ-OTPmX2NZngDrxqo47NRfA1dyNQgY,3292 +transformers/models/xlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm/__pycache__/configuration_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/convert_xlm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlm/__pycache__/modeling_tf_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/modeling_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/tokenization_xlm.cpython-310.pyc,, +transformers/models/xlm/configuration_xlm.py,sha256=1JiLuR2V2BNnh_wbuT9rNGP5r96PlyUa_zhGaLpHCqw,11105 +transformers/models/xlm/convert_xlm_original_pytorch_checkpoint_to_pytorch.py,sha256=R2wBMzp-IIiBhTOHrgYacy3bX79BN1dh_DdHcO7fE1Y,2934 +transformers/models/xlm/modeling_tf_xlm.py,sha256=OA5OnPerp84VrehstTjbHBTsEgnbqpHs_hutVY1YkCY,56510 +transformers/models/xlm/modeling_xlm.py,sha256=KuCcnqkr_qocGTJGL7Ylq1O2pnP0uDcNwn_wvAutqaA,54772 +transformers/models/xlm/tokenization_xlm.py,sha256=Q1Wtl0sHLqYwC6T5AK4qFt7PqMWzZxkS417X9V8Muvk,24448 +transformers/models/xlm_prophetnet/__init__.py,sha256=_YI-mEgntKjkMoW1RztiRlYdwvonIVpmO2ZQjm6Gezc,2615 +transformers/models/xlm_prophetnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_prophetnet/__pycache__/configuration_xlm_prophetnet.cpython-310.pyc,, +transformers/models/xlm_prophetnet/__pycache__/modeling_xlm_prophetnet.cpython-310.pyc,, +transformers/models/xlm_prophetnet/__pycache__/tokenization_xlm_prophetnet.cpython-310.pyc,, +transformers/models/xlm_prophetnet/configuration_xlm_prophetnet.py,sha256=qJmRUY1xj-yXQWQKKpt6unyn6CvY5PMC07yQo5-v_vo,9022 +transformers/models/xlm_prophetnet/modeling_xlm_prophetnet.py,sha256=BlLGRuSxuEc0rJn8_x7NAnRwcMW0T17H9OdaoF92VM0,119413 +transformers/models/xlm_prophetnet/tokenization_xlm_prophetnet.py,sha256=Pr5C0xf5mHrUV3ICfzBzOSmUOieBWxSUdzviGJeVxBE,13270 +transformers/models/xlm_roberta/__init__.py,sha256=Uhk9z5Xv2w8KrHfe0Hzc5ndpgmn5k6_dcZw6OCWye1A,5825 +transformers/models/xlm_roberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/configuration_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_flax_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_tf_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta_fast.cpython-310.pyc,, +transformers/models/xlm_roberta/configuration_xlm_roberta.py,sha256=Xm1j7VzQay7AQknB0V-ddFyNQI8_5xPKVFio0vB5hRc,7617 +transformers/models/xlm_roberta/modeling_flax_xlm_roberta.py,sha256=ryQSpeUg7--uLV--jn71z6j6cN6_H4X0T9ZwokhcUC4,58553 +transformers/models/xlm_roberta/modeling_tf_xlm_roberta.py,sha256=onbEA6X_HSc7T05WkwXYTZj85bEL0GOD2zW1zuXn4E4,81930 +transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=nDtRHKoLSvo2Ig1up_-Nu29FLYJqT-e4YvFSxO_58LE,72881 +transformers/models/xlm_roberta/tokenization_xlm_roberta.py,sha256=zs0jtuN2rWdeTq_LNge0rM3b6L5eXxik3UajWKQcoH8,12707 +transformers/models/xlm_roberta/tokenization_xlm_roberta_fast.py,sha256=LB8BJZiCSS5YDwCcxR7uB_5PLf_ROPWV2sTj_xoicfo,7922 +transformers/models/xlm_roberta_xl/__init__.py,sha256=Q3eFSJ5cKAt-2cJLXKdWW28TLujRqjebIBzlqSvK0U4,2405 +transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/configuration_xlm_roberta_xl.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/convert_xlm_roberta_xl_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/modeling_xlm_roberta_xl.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/configuration_xlm_roberta_xl.py,sha256=JeVuxCicCGlzPIBIDyAntpHnZ2SDMubwaJKnwWvlKtw,7369 +transformers/models/xlm_roberta_xl/convert_xlm_roberta_xl_original_pytorch_checkpoint_to_pytorch.py,sha256=zVa6azx9rd33D3JkH2uqJ6W20TosJyWi9eLm3LNtc5U,8228 +transformers/models/xlm_roberta_xl/modeling_xlm_roberta_xl.py,sha256=Gy4ZVXeVFlXrg7AMYTBLf8SzqSnH0KZSGZdI9TH2JhA,68996 +transformers/models/xlnet/__init__.py,sha256=-jvIW4RkN8qTjJPEEmIvK6pO8c9NB0Q4JlzY7CWHWUI,4288 +transformers/models/xlnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/configuration_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/convert_xlnet_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/modeling_tf_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/modeling_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet_fast.cpython-310.pyc,, +transformers/models/xlnet/configuration_xlnet.py,sha256=UkS71BwRCEE9DaX8iaNku3oOFYYuS4O3orBVmoA-ZYE,11023 +transformers/models/xlnet/convert_xlnet_original_tf_checkpoint_to_pytorch.py,sha256=iodIP1W2FNMjel9V31jR7RcHqs8zGX8TK3YdQ65lEbk,3688 +transformers/models/xlnet/modeling_tf_xlnet.py,sha256=CYAX9HUghU-ALClD9tb54x1q04DdcJLnyAJ-KMw-Fsc,77705 +transformers/models/xlnet/modeling_xlnet.py,sha256=Ioir1-wykC8dZZjU65JZVUTZxt7Ka-DpOkUFWKHVi5I,92938 +transformers/models/xlnet/tokenization_xlnet.py,sha256=L1LK5KSQu-_SYbMiHY3YVjRe8mN4RwpzbLce8ZGfTMI,15703 +transformers/models/xlnet/tokenization_xlnet_fast.py,sha256=B9tuPW-QKQeHEZKZcdiUm0cvWQHjpdQsikmR5nQbYro,9366 +transformers/models/xmod/__init__.py,sha256=uoKu7ACrFCEwDUwL06kwYCcUbHt9P3bLIcHLtMtjw-I,2325 +transformers/models/xmod/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xmod/__pycache__/configuration_xmod.cpython-310.pyc,, +transformers/models/xmod/__pycache__/convert_xmod_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xmod/__pycache__/modeling_xmod.cpython-310.pyc,, +transformers/models/xmod/configuration_xmod.py,sha256=kRuyyFhh55KorV0Yjxn7OIQnzr05lFFbUHJxMp8Vv6Q,9206 +transformers/models/xmod/convert_xmod_original_pytorch_checkpoint_to_pytorch.py,sha256=yFSAtXjxbAy6uXBg2XinRbk3VSEBOsWj1ugBhVNrGjQ,9859 +transformers/models/xmod/modeling_xmod.py,sha256=SH9MjgN2bUIF7y530NXoQt02YvSpT04n6u_UknRIUu4,76287 +transformers/models/yolos/__init__.py,sha256=DwUvf4HvS249i-g_ykayoDwxJnO7yH4pUJ7UhDE36iY,2400 +transformers/models/yolos/__pycache__/__init__.cpython-310.pyc,, +transformers/models/yolos/__pycache__/configuration_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/convert_yolos_to_pytorch.cpython-310.pyc,, +transformers/models/yolos/__pycache__/feature_extraction_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/image_processing_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/modeling_yolos.cpython-310.pyc,, +transformers/models/yolos/configuration_yolos.py,sha256=xaESSqrvFRXeIwA77KZxUD7qjhfStVwooJ6q6ry9p8A,7669 +transformers/models/yolos/convert_yolos_to_pytorch.py,sha256=g9sI7E-yfoyuXLc2OlN5bFxkc6ZTM243T1Wi8eUwnT0,11259 +transformers/models/yolos/feature_extraction_yolos.py,sha256=0ebN1Be4y86C2yyN2rMQ9AbguEDjcQ7fkabropUpwcs,1481 +transformers/models/yolos/image_processing_yolos.py,sha256=dDa1FAxtaZ2-R1AOTNAOxXfoTwnHWyu7-Pk-NeaCR70,63183 +transformers/models/yolos/modeling_yolos.py,sha256=4p4u0SKMQ2ax6WX0r1YGrsBLLq-QtnX933I34F7JPEg,58511 +transformers/models/yoso/__init__.py,sha256=oV8Bo29EwsQRWVZy2nIaea2ArpOnhkENfp0nFfSKcB4,2074 +transformers/models/yoso/__pycache__/__init__.cpython-310.pyc,, +transformers/models/yoso/__pycache__/configuration_yoso.cpython-310.pyc,, +transformers/models/yoso/__pycache__/convert_yoso_pytorch_to_pytorch.cpython-310.pyc,, +transformers/models/yoso/__pycache__/modeling_yoso.cpython-310.pyc,, +transformers/models/yoso/configuration_yoso.py,sha256=-HIYc9aPMfMxkOwSL8LgT08TOdIhL_Oy9IvTCuVayz8,6785 +transformers/models/yoso/convert_yoso_pytorch_to_pytorch.py,sha256=VjPOSLINfkiaHx8M3dTNMdC8hXh3M1yyhIQ9t4Vzqk0,4115 +transformers/models/yoso/modeling_yoso.py,sha256=byuWK88BLAkNntMf8X1a4aka7tgCrK56pixQeMAIKVs,54720 +transformers/onnx/__init__.py,sha256=wALLY4TPOK2iPrFcfZf_WiEmTRAU6dAWHElxGdexr58,1548 +transformers/onnx/__main__.py,sha256=JZ9ZmeRsnDitwTMWb-dFT8W9AEmMoMKLQ3SvbyCkY0w,9497 +transformers/onnx/__pycache__/__init__.cpython-310.pyc,, +transformers/onnx/__pycache__/__main__.cpython-310.pyc,, +transformers/onnx/__pycache__/config.cpython-310.pyc,, +transformers/onnx/__pycache__/convert.cpython-310.pyc,, +transformers/onnx/__pycache__/features.cpython-310.pyc,, +transformers/onnx/__pycache__/utils.cpython-310.pyc,, +transformers/onnx/config.py,sha256=zPDgC_HSLmMeqPkcLv_Y8EfbfLLEDLqPrvrfQCRyhl8,32556 +transformers/onnx/convert.py,sha256=ZSh9jQE6B6cCxhlSbKLHxNmj48HkXXdl-HF7iGtZy5k,19369 +transformers/onnx/features.py,sha256=GSuwZj760THxAkDmJYROt43La0GaY-bA19j2bE-XYVI,28264 +transformers/onnx/utils.py,sha256=39Uw_GkFBsTb6ZvMIHRTnI289aQDhc6hwfEapaBGE-o,3625 +transformers/optimization.py,sha256=SxPwa3FVqwNB2BSxVThnZhB5Hiu3bW3h-897ea1S-Jw,36329 +transformers/optimization_tf.py,sha256=HCVXeXok1IdVtFxO_SodBQ2TAvfkF_YkhdU7hXuy9Dg,16855 +transformers/pipelines/__init__.py,sha256=eeKQJ-QECHkGFCys8BCneUpkZ80Q0rBqqgF-YQnpHb0,51248 +transformers/pipelines/__pycache__/__init__.cpython-310.pyc,, +transformers/pipelines/__pycache__/audio_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/audio_utils.cpython-310.pyc,, +transformers/pipelines/__pycache__/automatic_speech_recognition.cpython-310.pyc,, +transformers/pipelines/__pycache__/base.cpython-310.pyc,, +transformers/pipelines/__pycache__/conversational.cpython-310.pyc,, +transformers/pipelines/__pycache__/depth_estimation.cpython-310.pyc,, +transformers/pipelines/__pycache__/document_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/feature_extraction.cpython-310.pyc,, +transformers/pipelines/__pycache__/fill_mask.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_feature_extraction.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_segmentation.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_to_image.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_to_text.cpython-310.pyc,, +transformers/pipelines/__pycache__/mask_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/object_detection.cpython-310.pyc,, +transformers/pipelines/__pycache__/pt_utils.cpython-310.pyc,, +transformers/pipelines/__pycache__/question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/table_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/text2text_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_to_audio.cpython-310.pyc,, +transformers/pipelines/__pycache__/token_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/video_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/visual_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_audio_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_image_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_object_detection.cpython-310.pyc,, +transformers/pipelines/audio_classification.py,sha256=bWia-wQ7hNfj0RsR7BuG7Yq_B1-Vwka7E-xVVAZB820,8821 +transformers/pipelines/audio_utils.py,sha256=x5JXEWedeMlYcz32JS5HLWBTpy0FPXJvCns_WnXYOnA,9137 +transformers/pipelines/automatic_speech_recognition.py,sha256=pXpTGtfRCdoLAeRRa-IwOnr9lmLviBdmkFoSa08lhHo,37956 +transformers/pipelines/base.py,sha256=ybBFbzYKKzaDvrMZOJTAuRlZRiw97n8TbgH741YcCBA,56728 +transformers/pipelines/conversational.py,sha256=WCbEcYS1Rejwaa_IwGLEIw5FrsRr01FtCQUuV9yPgiI,14730 +transformers/pipelines/depth_estimation.py,sha256=cghYx32OHn4xlqFSlzQ8ryA8fyDC7dt6c-X3ll8xEkA,4477 +transformers/pipelines/document_question_answering.py,sha256=_2lGgDvlwapWVR11a8L4RUIAI23wfjfhF-d2qXX_Xc8,23553 +transformers/pipelines/feature_extraction.py,sha256=Ar_hPljY1Fa_xAsRYX4cCCss1vf-iC5uuKYHp3rejd0,3374 +transformers/pipelines/fill_mask.py,sha256=jnZMK5aZyxlttXtzUISh3ZgvbcI7dIj-nB3Fk37N7Qw,11634 +transformers/pipelines/image_classification.py,sha256=VZgMpoN0Q0wVvdRRSVkzn_B_B6BonvgUA3-ptjVl6w0,8591 +transformers/pipelines/image_feature_extraction.py,sha256=KGFNi5skdOd9bc9GXDBBiqzTPpW986keROZFIMw2-ms,4636 +transformers/pipelines/image_segmentation.py,sha256=ABQM2DBouXYAqQyvofMvybwcVLRdM-YqrHsM6yKJf_s,9124 +transformers/pipelines/image_to_image.py,sha256=phQzbKf01swnGcSfWcm3dQ4ZMrxIW99s8_HTQj533ts,4938 +transformers/pipelines/image_to_text.py,sha256=w46iSfXuDXhkv-hKJE_P2TBKWwEvysdEf6cfXuHlZQs,7996 +transformers/pipelines/mask_generation.py,sha256=kJtIjpCHPouBeLD88JpSV1lROXLctgY7Bqy3XFJ_Jj0,13108 +transformers/pipelines/object_detection.py,sha256=TFPHpG6u1cdxvvM_XEv7eXo79KV8_aobOuRsh47IBpM,7931 +transformers/pipelines/pt_utils.py,sha256=tVI0GBVS7wbsbDEWeFLKju7fVG7I5-xVscndq-ykRko,12768 +transformers/pipelines/question_answering.py,sha256=BMmqntQHVdDukTmluGTKYnZnfbcy8EKYZE31nmaE06U,29886 +transformers/pipelines/table_question_answering.py,sha256=cq-xxL2izvKZIDHlCuFkKsiqmXEe37KyO4YeYZyCqQA,19830 +transformers/pipelines/text2text_generation.py,sha256=XfaCd5zKtAWfmZEIi8FfWFGWDqkbBoFliJw73kw6Q2c,17230 +transformers/pipelines/text_classification.py,sha256=PLnCk29dHn8J_wYpQPMUTDRaBKUUBw-4JnEKHLS--fM,10471 +transformers/pipelines/text_generation.py,sha256=FDRrlJP3bV1Q96B6RoDHQ1kfXx660Q5G7_Z9hqbX7xI,18230 +transformers/pipelines/text_to_audio.py,sha256=XoYuTtUWYXTJGwjlcQO74Tp2fWsXBEA6u3eRyGevWVo,8345 +transformers/pipelines/token_classification.py,sha256=nw-DEE_Pw8gZHjYi3xAONcLcAIQikgwrJRpchq6PxtU,26713 +transformers/pipelines/video_classification.py,sha256=m8jtUP4_0F1HtNEyjcabAVKa5WrWTzagAVP5JM1DH1s,5398 +transformers/pipelines/visual_question_answering.py,sha256=Ukk93_x3hqhtiL9g0c7kPtaPziLMkODQrn-_NXu9p_4,6817 +transformers/pipelines/zero_shot_audio_classification.py,sha256=2aNgax0toCNMP6r2JuFNl6ytOgU3glFJ_v5EuhFf7vg,6711 +transformers/pipelines/zero_shot_classification.py,sha256=WLgjtF0fOEaCiQb9QUu9vcNfJLP9M5nRnJGTgXgRKKU,12347 +transformers/pipelines/zero_shot_image_classification.py,sha256=gTo4C1fMa_Ljhing7OMUf7pxX7NH8Wert-tO-2CRybY,6844 +transformers/pipelines/zero_shot_object_detection.py,sha256=MBs9217WUE3Fc_Jdc-gOtO2i38B3-2yVxAsnlXaVyks,9472 +transformers/processing_utils.py,sha256=-gERmK7YE0yI0gA_Xw1A0-E5ZWM2-V2kTKTmZ1P3OtI,22729 +transformers/pytorch_utils.py,sha256=MQrkW99x_iymGVpZbqO30RGRCEvGiU-IM-TuDdAvDwE,11856 +transformers/quantizers/__init__.py,sha256=hCprQnoI20-O1FSMSRgD-P9_NKEzN7kEfY66_BrQxz0,699 +transformers/quantizers/__pycache__/__init__.cpython-310.pyc,, +transformers/quantizers/__pycache__/auto.cpython-310.pyc,, +transformers/quantizers/__pycache__/base.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_aqlm.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_awq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_gptq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_quanto.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizers_utils.cpython-310.pyc,, +transformers/quantizers/auto.py,sha256=rhAGKewmFRPnrGovnytUwUoOEhxoSuBBn3HKDz1wJ7Y,6851 +transformers/quantizers/base.py,sha256=auFO3aXVfp3ztN3Hg8fh_YhGyQvy5IbHxXioBCBS3TY,9145 +transformers/quantizers/quantizer_aqlm.py,sha256=cBO72I147eZzE_9W041rcY1GB5FCcxd3_ynKPGFbZWc,3681 +transformers/quantizers/quantizer_awq.py,sha256=-F7nKTAk3RbrOZMoXMUDM5PUiCoq8YRffEwEQrDEb_s,5137 +transformers/quantizers/quantizer_bnb_4bit.py,sha256=vnDjztTP4CpDXjer_rE1EnXrlQmizXYDfIBM7SKAFBM,14574 +transformers/quantizers/quantizer_bnb_8bit.py,sha256=X2nEGlEj6RYLvkmBDCN8srb8jMMFCAIdBMRVgvTcUO0,13000 +transformers/quantizers/quantizer_gptq.py,sha256=ZWNQY2WF6mzUV-SwYg1PZIM0kZ3JJyYGe3gF2mZcZ58,3878 +transformers/quantizers/quantizer_quanto.py,sha256=-qIlgLelWfk3ExYnWBul2z2L7B9Nw7ijxgDmC8g3Ygw,7847 +transformers/quantizers/quantizers_utils.py,sha256=6bgmf8mLxow6gXonTFX7PLfqFsf6plUj7DOeXnXhwMM,1066 +transformers/safetensors_conversion.py,sha256=1998DSxTsl99crqBPfNXRq7Xb6ABRc5Ts4R5oxUM9p0,4570 +transformers/sagemaker/__init__.py,sha256=fKtKAHamz_CLL9jPGCa2E-1n8RmuS-58qGtzZuKc3qg,730 +transformers/sagemaker/__pycache__/__init__.cpython-310.pyc,, +transformers/sagemaker/__pycache__/trainer_sm.cpython-310.pyc,, +transformers/sagemaker/__pycache__/training_args_sm.cpython-310.pyc,, +transformers/sagemaker/trainer_sm.py,sha256=7GsKLtjdMfKp98OwHD7RcBsl745OOwHAaBswkfLkfsE,1044 +transformers/sagemaker/training_args_sm.py,sha256=4ZnQhITfMwT0y2Y2MvkI11PEB_yfTX5Z7WrPKt0VXD8,5389 +transformers/testing_utils.py,sha256=fUUcoPYpuvCIN2WaLyokAWhgK8bm_ZuOU_eKi2CZGJ8,83993 +transformers/tf_utils.py,sha256=9TlTj8qlWobJ0e-lNx47m3Pu1eDY6S6dm5AIIekyNtw,10091 +transformers/time_series_utils.py,sha256=LjOgIvLmP0v6fJoqGo8lCD1kr3sXx9O_jmI-qJejtPU,7520 +transformers/tokenization_utils.py,sha256=SuyV-6xCXMhOqDdXExtGeXWUkjWt4gV3fz3PWjbjkuA,44595 +transformers/tokenization_utils_base.py,sha256=V30CjRRHJfokRpeOLvdRdPZADs7pweZuL2-zAurCmBk,199241 +transformers/tokenization_utils_fast.py,sha256=tpErvsUzI0RSiZJJtdmi7LbEuIltXnul9FrhAFCuIoM,37523 +transformers/tools/__init__.py,sha256=hI6M7zNUTyRE3BiZtL1VM8CcpYqxTrFR7lS0U6T7InM,2955 +transformers/tools/__pycache__/__init__.cpython-310.pyc,, +transformers/tools/__pycache__/agent_types.cpython-310.pyc,, +transformers/tools/__pycache__/agents.cpython-310.pyc,, +transformers/tools/__pycache__/base.cpython-310.pyc,, +transformers/tools/__pycache__/document_question_answering.cpython-310.pyc,, +transformers/tools/__pycache__/evaluate_agent.cpython-310.pyc,, +transformers/tools/__pycache__/image_captioning.cpython-310.pyc,, +transformers/tools/__pycache__/image_question_answering.cpython-310.pyc,, +transformers/tools/__pycache__/image_segmentation.cpython-310.pyc,, +transformers/tools/__pycache__/prompts.cpython-310.pyc,, +transformers/tools/__pycache__/python_interpreter.cpython-310.pyc,, +transformers/tools/__pycache__/speech_to_text.cpython-310.pyc,, +transformers/tools/__pycache__/text_classification.cpython-310.pyc,, +transformers/tools/__pycache__/text_question_answering.cpython-310.pyc,, +transformers/tools/__pycache__/text_summarization.cpython-310.pyc,, +transformers/tools/__pycache__/text_to_speech.cpython-310.pyc,, +transformers/tools/__pycache__/translation.cpython-310.pyc,, +transformers/tools/agent_types.py,sha256=6ZVzmPwWiMtJXKUZ33fKzfUFp-v_qfI901MKj2pbQRY,9093 +transformers/tools/agents.py,sha256=1t7eUTYriK4jIQMFcJvtYzsivDR3XEkeaFv_LcFVhCo,30737 +transformers/tools/base.py,sha256=L7OBvSj233hqZmuwn3R0Xfz7naTtWbbZrXxs8v1Rj7s,30612 +transformers/tools/document_question_answering.py,sha256=7qSMr0fQYadiGOoVMXNrImls3_O-hcdDbLrlSc3cvxU,3337 +transformers/tools/evaluate_agent.py,sha256=JvMKk9NoJLZTRnY_VAC_cSHWAO-Rx-Dl8Vt31kpBbfw,24721 +transformers/tools/image_captioning.py,sha256=x1PfWpDozWSZuue633XwEPPBTr_zEX9mgrYar-8LqXQ,1745 +transformers/tools/image_question_answering.py,sha256=UNOzIcmkckh1W1bqlj31h61eXGAZ1TZ831iqytyO4NQ,1969 +transformers/tools/image_segmentation.py,sha256=1BbHSYTz3q8DlTMHBnKdibp7JCHZydPdNoyl7TObfN8,2103 +transformers/tools/prompts.py,sha256=1YXY_A5Zfyd_rudKzB4ShQ9OR_E5bHeh9bcgBVt1ltQ,1558 +transformers/tools/python_interpreter.py,sha256=aSn1bnuQT9_xteXNcJdlmi39IzX1FZRqSaoGEQRS-PE,9999 +transformers/tools/speech_to_text.py,sha256=m3LCJxMpJykL9aD8rZ4H3ROGtt59LcLozw-6963XjCE,1482 +transformers/tools/text_classification.py,sha256=snyBoLTERnfl7YKKAgZctWhow6sEXQdS4bcWYUxJnyU,2475 +transformers/tools/text_question_answering.py,sha256=mGO3E0nL71Jzn4jeC2_RgLRDtuqbld77mQ2T7jw4aPc,1967 +transformers/tools/text_summarization.py,sha256=-8TY4P4LL4c7bQcD9y8Vi5Rfiaw8nAiY_aP5yXicq_g,1691 +transformers/tools/text_to_speech.py,sha256=vuJU2dC2d5J1kVdGjSBJCBdsTiOli2J7OabAposOFfA,2424 +transformers/tools/translation.py,sha256=fu05jVYbJUFmNvmwd4mjQOqzGt1JSy6QbpuAd2uChOE,8493 +transformers/trainer.py,sha256=RN4aGF7cV0fcmGLHaawobOmnAbDxLOueYCYMQdUgz2g,213748 +transformers/trainer_callback.py,sha256=apM_43C6-iJAl2TPmjS-8pFNvQQyp5yLLBbtCfPNWII,25174 +transformers/trainer_pt_utils.py,sha256=SjbjchrwMLfKKdezWApvgLGMieDcRaMqSjIsY7mgSHc,58875 +transformers/trainer_seq2seq.py,sha256=6oSCG9GlQmUBpasw3nFI_ngF6KCrxPixL91ob7CQMCk,17240 +transformers/trainer_utils.py,sha256=B0VzRksgwpXUVAbx56aacfEY-WSnpg1lo_rou2qTaEg,30861 +transformers/training_args.py,sha256=SfRhYwHUHBn3cjQDSSMZWrPAPbe7NaCr4MOguOzXSoM,145727 +transformers/training_args_seq2seq.py,sha256=k8qyPQAo5GWlcToN3tnzW7dE4xyP7i7HRjP_sgxlllA,4308 +transformers/training_args_tf.py,sha256=esUsNAj6kNNMu1LJLxfELJAJiTq7HD6fHz3GvI_mKJg,14570 +transformers/utils/__init__.py,sha256=VevayYs3KBHWmrALLFXXFF9P3ZMoGKlgJmmGeqtAqds,7656 +transformers/utils/__pycache__/__init__.cpython-310.pyc,, +transformers/utils/__pycache__/backbone_utils.cpython-310.pyc,, +transformers/utils/__pycache__/bitsandbytes.cpython-310.pyc,, +transformers/utils/__pycache__/constants.cpython-310.pyc,, +transformers/utils/__pycache__/doc.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_detectron2_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_flax_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_music_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_pt_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_speech_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tf_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tokenizers_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_torchaudio_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_vision_objects.cpython-310.pyc,, +transformers/utils/__pycache__/fx.cpython-310.pyc,, +transformers/utils/__pycache__/generic.cpython-310.pyc,, +transformers/utils/__pycache__/hp_naming.cpython-310.pyc,, +transformers/utils/__pycache__/hub.cpython-310.pyc,, +transformers/utils/__pycache__/import_utils.cpython-310.pyc,, +transformers/utils/__pycache__/logging.cpython-310.pyc,, +transformers/utils/__pycache__/model_parallel_utils.cpython-310.pyc,, +transformers/utils/__pycache__/notebook.cpython-310.pyc,, +transformers/utils/__pycache__/peft_utils.cpython-310.pyc,, +transformers/utils/__pycache__/quantization_config.cpython-310.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2.cpython-310.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2_new.cpython-310.pyc,, +transformers/utils/__pycache__/versions.cpython-310.pyc,, +transformers/utils/backbone_utils.py,sha256=ruggZsHu9IJ3IVPa4Dvvvqx9Sj1mB-8P24C2VV7RPTo,16309 +transformers/utils/bitsandbytes.py,sha256=LzOKwcHWAxxZZv-7Ts9Q0vlEYvHd18affVgVbiR3Tzs,1040 +transformers/utils/constants.py,sha256=sZsUwOnA3CbtN1svs9YoaNLTTsAc9RVaITsgpf8K4iI,282 +transformers/utils/doc.py,sha256=eObKDEpC1z-05BNXHi1hYNjQMPsWSN1SNMa7IFkRmN8,40737 +transformers/utils/dummy_detectron2_objects.py,sha256=n7Pt_7sbVBNfohKGcOARB-ZcPcJRbjEAcoLd2vTXndU,340 +transformers/utils/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.py,sha256=n6pY4s7zCII3dzo7Ejd0RviHa_pMateuDEwbbHgsTUY,902 +transformers/utils/dummy_flax_objects.py,sha256=ANFq3CYhCByAWqcFIY2z-DzVNizlaH6oGSMX0XmIz_Y,33561 +transformers/utils/dummy_keras_nlp_objects.py,sha256=AVWt2orICCUXi754bkavvqPzYO91PjER-FlUZAw2jZc,294 +transformers/utils/dummy_music_objects.py,sha256=1lxIebYUOdHJWMQ_T5IQgPgcO_wp_8YM_HGc3skuGVg,458 +transformers/utils/dummy_pt_objects.py,sha256=ztkcIY5tn6C537NhVQXj9JPIOa1hsp0g8K2Pkc4WV0U,232487 +transformers/utils/dummy_sentencepiece_and_tokenizers_objects.py,sha256=BgPLr8Wz8A-17K86x04N21CKXtWNQLJEWx2c4aZRqaA,286 +transformers/utils/dummy_sentencepiece_objects.py,sha256=KcSrwciSpiurqsxBoR34G5NuSrc2Clf1Q7N_CjanBlc,6455 +transformers/utils/dummy_speech_objects.py,sha256=9eFm1cjdsYOPBoAz9JTgP35Bg8WF2C9AZ_y1hFpKZdQ,465 +transformers/utils/dummy_tensorflow_text_objects.py,sha256=43V0IA2kb9gtuL0S1OL1eRFFxzQwKg4pPjMVuXUB5qg,306 +transformers/utils/dummy_tf_objects.py,sha256=7zY-UmprSrqdj16liMGgtfhXEnDPaOn6QGBW267EG5o,67955 +transformers/utils/dummy_tokenizers_objects.py,sha256=2Zywdoz7Nr1rA8fLFCx4F-JaKAcoSHBszpCFGuuAyAU,11456 +transformers/utils/dummy_torchaudio_objects.py,sha256=9A7Y4643_hTaqqZKlL-O524wRnrmNtODxisuDdO_7kU,488 +transformers/utils/dummy_vision_objects.py,sha256=xOxbsDSXdr172U3VwEh7VsQkvEuQBzunOUqMiozT-Bs,14893 +transformers/utils/fx.py,sha256=RuII5DVdwVWjmgEDF-0g57qpOpqwaPQhkuYn_oFVEd0,50589 +transformers/utils/generic.py,sha256=uIaZJ203H2zJOeEO4HI5W2SUyMunoV3Sr22voTONQ4s,23946 +transformers/utils/hp_naming.py,sha256=vqcOXcDOyqbISWo8-ClUJUOBVbZM1h08EcymTwcRthc,4979 +transformers/utils/hub.py,sha256=GvdfxYlPkPKr5yIXqv_wPBjAbC9gW8A-DvlTiQIbpLs,55844 +transformers/utils/import_utils.py,sha256=9pIoFqqYO7tSNAVMAl-ijjpHOONoczICu21nFGehWAE,53250 +transformers/utils/logging.py,sha256=X6FDZSn9Vbo81QHn80TGVsk9LGHe4OdWDCTCnCF5V7A,11609 +transformers/utils/model_parallel_utils.py,sha256=XbGU9IlFF59K_aplRxUGVnTfIZ9mpbLomKqQ08ooTew,2272 +transformers/utils/notebook.py,sha256=PiEiHpfuqxd3M1U3MPD8bmeO8bvtTbLfOxnL-cZWHQY,15558 +transformers/utils/peft_utils.py,sha256=as1XSRYa4-skewnlVom74qb-vgoZkGJtcXeNEUndAlo,5217 +transformers/utils/quantization_config.py,sha256=gnOvIE3HK3mha8PODRXV5-pnQMg0W5V12opy_rKmmLA,42093 +transformers/utils/sentencepiece_model_pb2.py,sha256=XiQs9uMEusfAZP6t6IBuTTX9yl7LiOyJEi7Ib-Wzmq0,50677 +transformers/utils/sentencepiece_model_pb2_new.py,sha256=FwTW0nkCiPCErmGk0s27BniKmkORcfnNk-w7NBGkCuA,6621 +transformers/utils/versions.py,sha256=C-Tqr4qGSHH64ygIBCSo8gA6azz7Dbzh8zdc_yjMkX8,4337 diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..57e3d840d59a650ac5bccbad5baeec47d155f0ad --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a7995ed6f21261a78509c57d57daba51ecf1a7d --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +transformers-cli = transformers.commands.transformers_cli:main diff --git a/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..976a2b1f3998279c10c413279a095be86bf69167 --- /dev/null +++ b/venv/lib/python3.10/site-packages/transformers-4.40.1.dist-info/top_level.txt @@ -0,0 +1 @@ +transformers