diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/INSTALLER b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/LICENSE b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..67590a5e5be5a5a2dde3fe53a7512e404a896c22 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2018, Martin Durant +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/METADATA b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..f885cd9222a0b1e67a0f676d20a9ea2c07553dfb --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/METADATA @@ -0,0 +1,167 @@ +Metadata-Version: 2.1 +Name: fsspec +Version: 2024.3.1 +Summary: File-system specification +Home-page: https://github.com/fsspec/filesystem_spec +Maintainer: Martin Durant +Maintainer-email: mdurant@anaconda.com +License: BSD +Project-URL: Changelog, https://filesystem-spec.readthedocs.io/en/latest/changelog.html +Project-URL: Documentation, https://filesystem-spec.readthedocs.io/en/latest/ +Keywords: file +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: abfs +Requires-Dist: adlfs ; extra == 'abfs' +Provides-Extra: adl +Requires-Dist: adlfs ; extra == 'adl' +Provides-Extra: arrow +Requires-Dist: pyarrow >=1 ; extra == 'arrow' +Provides-Extra: dask +Requires-Dist: dask ; extra == 'dask' +Requires-Dist: distributed ; extra == 'dask' +Provides-Extra: devel +Requires-Dist: pytest ; extra == 'devel' +Requires-Dist: pytest-cov ; extra == 'devel' +Provides-Extra: dropbox +Requires-Dist: dropboxdrivefs ; extra == 'dropbox' +Requires-Dist: requests ; extra == 'dropbox' +Requires-Dist: dropbox ; extra == 'dropbox' +Provides-Extra: entrypoints +Provides-Extra: full +Requires-Dist: adlfs ; extra == 'full' +Requires-Dist: aiohttp !=4.0.0a0,!=4.0.0a1 ; extra == 'full' +Requires-Dist: dask ; extra == 'full' +Requires-Dist: distributed ; extra == 'full' +Requires-Dist: dropbox ; extra == 'full' +Requires-Dist: dropboxdrivefs ; extra == 'full' +Requires-Dist: fusepy ; extra == 'full' +Requires-Dist: gcsfs ; extra == 'full' +Requires-Dist: libarchive-c ; extra == 'full' +Requires-Dist: ocifs ; extra == 'full' +Requires-Dist: panel ; extra == 'full' +Requires-Dist: paramiko ; extra == 'full' +Requires-Dist: pyarrow >=1 ; extra == 'full' +Requires-Dist: pygit2 ; extra == 'full' +Requires-Dist: requests ; extra == 'full' +Requires-Dist: s3fs ; extra == 'full' +Requires-Dist: smbprotocol ; extra == 'full' +Requires-Dist: tqdm ; extra == 'full' +Provides-Extra: fuse +Requires-Dist: fusepy ; extra == 'fuse' +Provides-Extra: gcs +Requires-Dist: gcsfs ; extra == 'gcs' +Provides-Extra: git +Requires-Dist: pygit2 ; extra == 'git' +Provides-Extra: github +Requires-Dist: requests ; extra == 'github' +Provides-Extra: gs +Requires-Dist: gcsfs ; extra == 'gs' +Provides-Extra: gui +Requires-Dist: panel ; extra == 'gui' +Provides-Extra: hdfs +Requires-Dist: pyarrow >=1 ; extra == 'hdfs' +Provides-Extra: http +Requires-Dist: aiohttp !=4.0.0a0,!=4.0.0a1 ; extra == 'http' +Provides-Extra: libarchive +Requires-Dist: libarchive-c ; extra == 'libarchive' +Provides-Extra: oci +Requires-Dist: ocifs ; extra == 'oci' +Provides-Extra: s3 +Requires-Dist: s3fs ; extra == 's3' +Provides-Extra: sftp +Requires-Dist: paramiko ; extra == 'sftp' +Provides-Extra: smb +Requires-Dist: smbprotocol ; extra == 'smb' +Provides-Extra: ssh +Requires-Dist: paramiko ; extra == 'ssh' +Provides-Extra: tqdm +Requires-Dist: tqdm ; extra == 'tqdm' + +# filesystem_spec + +[![PyPI version](https://badge.fury.io/py/fsspec.svg)](https://pypi.python.org/pypi/fsspec/) +[![Anaconda-Server Badge](https://anaconda.org/conda-forge/fsspec/badges/version.svg)](https://anaconda.org/conda-forge/fsspec) +![Build](https://github.com/fsspec/filesystem_spec/workflows/CI/badge.svg) +[![Docs](https://readthedocs.org/projects/filesystem-spec/badge/?version=latest)](https://filesystem-spec.readthedocs.io/en/latest/?badge=latest) +[![PyPi downloads](https://img.shields.io/pypi/dm/fsspec?label=pypi%20downloads&style=flat)](https://pepy.tech/project/fsspec) + +A specification for pythonic filesystems. + +## Install + +```bash +pip install fsspec +``` + +would install the base fsspec. Various optionally supported features might require specification of custom +extra require, e.g. `pip install fsspec[ssh]` will install dependencies for `ssh` backends support. +Use `pip install fsspec[full]` for installation of all known extra dependencies. + +Up-to-date package also provided through conda-forge distribution: + +```bash +conda install -c conda-forge fsspec +``` + + +## Purpose + +To produce a template or specification for a file-system interface, that specific implementations should follow, +so that applications making use of them can rely on a common behaviour and not have to worry about the specific +internal implementation decisions with any given backend. Many such implementations are included in this package, +or in sister projects such as `s3fs` and `gcsfs`. + +In addition, if this is well-designed, then additional functionality, such as a key-value store or FUSE +mounting of the file-system implementation may be available for all implementations "for free". + +## Documentation + +Please refer to [RTD](https://filesystem-spec.readthedocs.io/en/latest/?badge=latest) + +## Develop + +fsspec uses GitHub Actions for CI. Environment files can be found +in the "ci/" directory. Note that the main environment is called "py38", +but it is expected that the version of python installed be adjustable at +CI runtime. For local use, pick a version suitable for you. + +### Testing + +Tests can be run in the dev environment, if activated, via ``pytest fsspec``. + +The full fsspec suite requires a system-level docker, docker-compose, and fuse +installation. If only making changes to one backend implementation, it is +not generally necessary to run all tests locally. + +It is expected that contributors ensure that any change to fsspec does not +cause issues or regressions for either other fsspec-related packages such +as gcsfs and s3fs, nor for downstream users of fsspec. The "downstream" CI +run and corresponding environment file run a set of tests from the dask +test suite, and very minimal tests against pandas and zarr from the +test_downstream.py module in this repo. + +### Code Formatting + +fsspec uses [Black](https://black.readthedocs.io/en/stable) to ensure +a consistent code format throughout the project. +Run ``black fsspec`` from the root of the filesystem_spec repository to +auto-format your code. Additionally, many editors have plugins that will apply +``black`` as you edit files. ``black`` is included in the ``tox`` environments. + +Optionally, you may wish to setup [pre-commit hooks](https://pre-commit.com) to +automatically run ``black`` when you make a git commit. +Run ``pre-commit install --install-hooks`` from the root of the +filesystem_spec repository to setup pre-commit hooks. ``black`` will now be run +before you commit, reformatting any changed files. You can format without +committing via ``pre-commit run`` or skip these checks with ``git commit +--no-verify``. diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/RECORD b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..9afca34810c31218652469db522c8183649871cc --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/RECORD @@ -0,0 +1,104 @@ +fsspec-2024.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fsspec-2024.3.1.dist-info/LICENSE,sha256=LcNUls5TpzB5FcAIqESq1T53K0mzTN0ARFBnaRQH7JQ,1513 +fsspec-2024.3.1.dist-info/METADATA,sha256=Wv4QVGqB4lYfHfgP-Cfby1Nce57WYXXAhH0f6Ju5FUM,6786 +fsspec-2024.3.1.dist-info/RECORD,, +fsspec-2024.3.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +fsspec-2024.3.1.dist-info/top_level.txt,sha256=blt2pDrQDwN3Gklcw13CSPLQRd6aaOgJ8AxqrW395MI,7 +fsspec/__init__.py,sha256=C8G5rqeNAes0NHYFhdxcw-ZMg4yDipqzSNc4NIR7uoc,2010 +fsspec/__pycache__/__init__.cpython-310.pyc,, +fsspec/__pycache__/_version.cpython-310.pyc,, +fsspec/__pycache__/archive.cpython-310.pyc,, +fsspec/__pycache__/asyn.cpython-310.pyc,, +fsspec/__pycache__/caching.cpython-310.pyc,, +fsspec/__pycache__/callbacks.cpython-310.pyc,, +fsspec/__pycache__/compression.cpython-310.pyc,, +fsspec/__pycache__/config.cpython-310.pyc,, +fsspec/__pycache__/conftest.cpython-310.pyc,, +fsspec/__pycache__/core.cpython-310.pyc,, +fsspec/__pycache__/dircache.cpython-310.pyc,, +fsspec/__pycache__/exceptions.cpython-310.pyc,, +fsspec/__pycache__/fuse.cpython-310.pyc,, +fsspec/__pycache__/generic.cpython-310.pyc,, +fsspec/__pycache__/gui.cpython-310.pyc,, +fsspec/__pycache__/mapping.cpython-310.pyc,, +fsspec/__pycache__/parquet.cpython-310.pyc,, +fsspec/__pycache__/registry.cpython-310.pyc,, +fsspec/__pycache__/spec.cpython-310.pyc,, +fsspec/__pycache__/transaction.cpython-310.pyc,, +fsspec/__pycache__/utils.cpython-310.pyc,, +fsspec/_version.py,sha256=wHq_BWmvVWba6IeFL8lWxmbw1fXQhCR4NJHH8b77Nxs,500 +fsspec/archive.py,sha256=S__DzfZj-urAN3tp2W6jJ6YDiXG1fAl7FjvWUN73qIE,2386 +fsspec/asyn.py,sha256=AOd2SXH2YPCaQL5jA6IegYevdMFkAnGD7Seh9DC2gSE,36404 +fsspec/caching.py,sha256=TrZqKo3drK9Afujg7grZRiLNcmgUr84rnvMcojzURnI,28819 +fsspec/callbacks.py,sha256=BDIwLzK6rr_0V5ch557fSzsivCElpdqhXr5dZ9Te-EE,9210 +fsspec/compression.py,sha256=Yyd8FXw2rwWRtVoRVah_yguv-J7BUcBo4yDu6Qt52a0,4859 +fsspec/config.py,sha256=LF4Zmu1vhJW7Je9Q-cwkRc3xP7Rhyy7Xnwj26Z6sv2g,4279 +fsspec/conftest.py,sha256=fVfx-NLrH_OZS1TIpYNoPzM7efEcMoL62reHOdYeFCA,1245 +fsspec/core.py,sha256=kkwJ7IR3-i1C9SAZ_oGrPpbM5hqpBZ2OLkuHU5a1sYE,22471 +fsspec/dircache.py,sha256=YzogWJrhEastHU7vWz-cJiJ7sdtLXFXhEpInGKd4EcM,2717 +fsspec/exceptions.py,sha256=xcS7LiRrQ748kvOB9mrUR14kpjNztrHgEkZWi9M-VaI,330 +fsspec/fuse.py,sha256=66amOa6wdIbS0DMhhfAPUoOB37HPorfXD1izV0prmTY,10145 +fsspec/generic.py,sha256=jIA7wBwtUzJhTth78PTzWbOBjGom2e4IjmQ_KBSlHPg,13575 +fsspec/gui.py,sha256=XKoXZpUhRE7jOhRCJH4-jRbKhVu56aS8h9tecvPD3nc,13932 +fsspec/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fsspec/implementations/__pycache__/__init__.cpython-310.pyc,, +fsspec/implementations/__pycache__/arrow.cpython-310.pyc,, +fsspec/implementations/__pycache__/cache_mapper.cpython-310.pyc,, +fsspec/implementations/__pycache__/cache_metadata.cpython-310.pyc,, +fsspec/implementations/__pycache__/cached.cpython-310.pyc,, +fsspec/implementations/__pycache__/dask.cpython-310.pyc,, +fsspec/implementations/__pycache__/data.cpython-310.pyc,, +fsspec/implementations/__pycache__/dbfs.cpython-310.pyc,, +fsspec/implementations/__pycache__/dirfs.cpython-310.pyc,, +fsspec/implementations/__pycache__/ftp.cpython-310.pyc,, +fsspec/implementations/__pycache__/git.cpython-310.pyc,, +fsspec/implementations/__pycache__/github.cpython-310.pyc,, +fsspec/implementations/__pycache__/http.cpython-310.pyc,, +fsspec/implementations/__pycache__/jupyter.cpython-310.pyc,, +fsspec/implementations/__pycache__/libarchive.cpython-310.pyc,, +fsspec/implementations/__pycache__/local.cpython-310.pyc,, +fsspec/implementations/__pycache__/memory.cpython-310.pyc,, +fsspec/implementations/__pycache__/reference.cpython-310.pyc,, +fsspec/implementations/__pycache__/sftp.cpython-310.pyc,, +fsspec/implementations/__pycache__/smb.cpython-310.pyc,, +fsspec/implementations/__pycache__/tar.cpython-310.pyc,, +fsspec/implementations/__pycache__/webhdfs.cpython-310.pyc,, +fsspec/implementations/__pycache__/zip.cpython-310.pyc,, +fsspec/implementations/arrow.py,sha256=_7TLuV6ZzNlpmUU_v6ud56u2wadzsKmY5qugPBxgMEs,8649 +fsspec/implementations/cache_mapper.py,sha256=iHgBA6gjzDJ7_mBboHFzpLTf55HP3UEwUOZ43xyUK4M,2429 +fsspec/implementations/cache_metadata.py,sha256=ZvyA7Y3KK-5Ct4E5pELzD6mH_5T03XqaKVT96qYDADU,8576 +fsspec/implementations/cached.py,sha256=CuxQXQ6f-MRnLvsRzvauEhpmXEgicZZCfViKjIu1kn4,33029 +fsspec/implementations/dask.py,sha256=CXZbJzIVOhKV8ILcxuy3bTvcacCueAbyQxmvAkbPkrk,4466 +fsspec/implementations/data.py,sha256=LDLczxRh8h7x39Zjrd-GgzdQHr78yYxDlrv2C9Uxb5E,1658 +fsspec/implementations/dbfs.py,sha256=cix9OYUveuSOx5UO5uRUwNUkYqjzyY0fkKnca1kTgZ0,15014 +fsspec/implementations/dirfs.py,sha256=inDIRSDPhI1_ud1MMBFrpZQ11VIAMJ_dZQtbE4V08Ng,11384 +fsspec/implementations/ftp.py,sha256=rp6cTog8xqjDPlKdSLKcsyP7K593_ByMabxGbNSEpTo,11655 +fsspec/implementations/git.py,sha256=vKGI-Vd5q4H2RrvhebkPc9NwlfkZ980OUGhebeCw-M0,4034 +fsspec/implementations/github.py,sha256=0kIiKkeAaROuHgdWBHVQFrzJ2ZfoDgymCehL_kJXHYA,7565 +fsspec/implementations/http.py,sha256=yr6t0OhLwZx_pvNQ05detAehcQjRw0Pg6XdwWv81jxk,29601 +fsspec/implementations/jupyter.py,sha256=B2uj7OEm7yIk-vRSsO37_ND0t0EBvn4B-Su43ibN4Pg,3811 +fsspec/implementations/libarchive.py,sha256=5_I2DiLXwQ1JC8x-K7jXu-tBwhO9dj7tFLnb0bTnVMQ,7102 +fsspec/implementations/local.py,sha256=XLsBoG4lf92w5ZddmbHXcStThSYxHgeoJEd2Mp5Uo0Y,14327 +fsspec/implementations/memory.py,sha256=tlaGCekgZ2Z_9n8B6hsSpo6_V89PwjugkOmD445QrqE,9778 +fsspec/implementations/reference.py,sha256=3dPi55riD_cROCafpeoUm2Xbb1vpXpyQijl09f5jTsE,43871 +fsspec/implementations/sftp.py,sha256=fMY9XZcmpjszQ2tCqO_TPaJesaeD_Dv7ptYzgUPGoO0,5631 +fsspec/implementations/smb.py,sha256=_hR5MKwDUCi3u5zEzvnenIWRIGPMhuUryWRYvNSp0WE,10804 +fsspec/implementations/tar.py,sha256=dam78Tp_CozybNqCY2JYgGBS3Uc9FuJUAT9oB0lolOs,4111 +fsspec/implementations/webhdfs.py,sha256=wqVfno7z0TY1HepaIvKTUUcl_bi5NkV6qWsST8t_s7Y,16745 +fsspec/implementations/zip.py,sha256=vc1fNz-yO8uWQ9bQUqBFYpTcgsfZQq9vDwwg4Aufs9Y,4417 +fsspec/mapping.py,sha256=hSsiRo-dgAOj6oHf67bF3i11U4xREglXToHGUX4GhRY,8261 +fsspec/parquet.py,sha256=qVxDhwc960SGOt5etcYAJxCr-7HQKP01687KpDR02Gw,19463 +fsspec/registry.py,sha256=VjnfGUg39lvlCDxOoGSeYm8UL12wZC5yQ5n8-W92eD0,11173 +fsspec/spec.py,sha256=3t96RgizRN_slIuHXnuR0bXjVUfBS1TfuDrEua4oQvE,66277 +fsspec/tests/abstract/__init__.py,sha256=i1wcFixV6QhOwdoB24c8oXjzobISNqiKVz9kl2DvAY8,10028 +fsspec/tests/abstract/__pycache__/__init__.cpython-310.pyc,, +fsspec/tests/abstract/__pycache__/common.cpython-310.pyc,, +fsspec/tests/abstract/__pycache__/copy.cpython-310.pyc,, +fsspec/tests/abstract/__pycache__/get.cpython-310.pyc,, +fsspec/tests/abstract/__pycache__/put.cpython-310.pyc,, +fsspec/tests/abstract/common.py,sha256=1GQwNo5AONzAnzZj0fWgn8NJPLXALehbsuGxS3FzWVU,4973 +fsspec/tests/abstract/copy.py,sha256=gU5-d97U3RSde35Vp4RxPY4rWwL744HiSrJ8IBOp9-8,19967 +fsspec/tests/abstract/get.py,sha256=vNR4HztvTR7Cj56AMo7_tx7TeYz1Jgr_2Wb8Lv-UiBY,20755 +fsspec/tests/abstract/put.py,sha256=7aih17OKB_IZZh1Mkq1eBDIjobhtMQmI8x-Pw-S_aZk,21201 +fsspec/transaction.py,sha256=xliRG6U2Zf3khG4xcw9WiB-yAoqJSHEGK_VjHOdtgo0,2398 +fsspec/utils.py,sha256=kYBiiw_0AEFJPCYwYlbZ7QxiGwuXdaaxrK6KM3mcFak,23052 diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/WHEEL b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..bab98d675883cc7567a79df485cd7b4f015e376f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..968fea66e533ba30593c7fbfe750c36fae2f3cfe --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec-2024.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +fsspec diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec/caching.py b/llmeval-env/lib/python3.10/site-packages/fsspec/caching.py new file mode 100644 index 0000000000000000000000000000000000000000..b1a51f1e2184f09d2ac7fdb8ce2e3596775ccce3 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec/caching.py @@ -0,0 +1,881 @@ +from __future__ import annotations + +import collections +import functools +import logging +import math +import os +import threading +import warnings +from concurrent.futures import Future, ThreadPoolExecutor +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Generic, + NamedTuple, + OrderedDict, + TypeVar, +) + +if TYPE_CHECKING: + import mmap + + from typing_extensions import ParamSpec + + P = ParamSpec("P") +else: + P = TypeVar("P") + +T = TypeVar("T") + + +logger = logging.getLogger("fsspec") + +Fetcher = Callable[[int, int], bytes] # Maps (start, end) to bytes + + +class BaseCache: + """Pass-though cache: doesn't keep anything, calls every time + + Acts as base class for other cachers + + Parameters + ---------- + blocksize: int + How far to read ahead in numbers of bytes + fetcher: func + Function of the form f(start, end) which gets bytes from remote as + specified + size: int + How big this file is + """ + + name: ClassVar[str] = "none" + + def __init__(self, blocksize: int, fetcher: Fetcher, size: int) -> None: + self.blocksize = blocksize + self.fetcher = fetcher + self.size = size + + def _fetch(self, start: int | None, stop: int | None) -> bytes: + if start is None: + start = 0 + if stop is None: + stop = self.size + if start >= self.size or start >= stop: + return b"" + return self.fetcher(start, stop) + + +class MMapCache(BaseCache): + """memory-mapped sparse file cache + + Opens temporary file, which is filled blocks-wise when data is requested. + Ensure there is enough disc space in the temporary location. + + This cache method might only work on posix + """ + + name = "mmap" + + def __init__( + self, + blocksize: int, + fetcher: Fetcher, + size: int, + location: str | None = None, + blocks: set[int] | None = None, + ) -> None: + super().__init__(blocksize, fetcher, size) + self.blocks = set() if blocks is None else blocks + self.location = location + self.cache = self._makefile() + + def _makefile(self) -> mmap.mmap | bytearray: + import mmap + import tempfile + + if self.size == 0: + return bytearray() + + # posix version + if self.location is None or not os.path.exists(self.location): + if self.location is None: + fd = tempfile.TemporaryFile() + self.blocks = set() + else: + fd = open(self.location, "wb+") + fd.seek(self.size - 1) + fd.write(b"1") + fd.flush() + else: + fd = open(self.location, "r+b") + + return mmap.mmap(fd.fileno(), self.size) + + def _fetch(self, start: int | None, end: int | None) -> bytes: + logger.debug(f"MMap cache fetching {start}-{end}") + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + start_block = start // self.blocksize + end_block = end // self.blocksize + need = [i for i in range(start_block, end_block + 1) if i not in self.blocks] + while need: + # TODO: not a for loop so we can consolidate blocks later to + # make fewer fetch calls; this could be parallel + i = need.pop(0) + sstart = i * self.blocksize + send = min(sstart + self.blocksize, self.size) + logger.debug(f"MMap get block #{i} ({sstart}-{send}") + self.cache[sstart:send] = self.fetcher(sstart, send) + self.blocks.add(i) + + return self.cache[start:end] + + def __getstate__(self) -> dict[str, Any]: + state = self.__dict__.copy() + # Remove the unpicklable entries. + del state["cache"] + return state + + def __setstate__(self, state: dict[str, Any]) -> None: + # Restore instance attributes + self.__dict__.update(state) + self.cache = self._makefile() + + +class ReadAheadCache(BaseCache): + """Cache which reads only when we get beyond a block of data + + This is a much simpler version of BytesCache, and does not attempt to + fill holes in the cache or keep fragments alive. It is best suited to + many small reads in a sequential order (e.g., reading lines from a file). + """ + + name = "readahead" + + def __init__(self, blocksize: int, fetcher: Fetcher, size: int) -> None: + super().__init__(blocksize, fetcher, size) + self.cache = b"" + self.start = 0 + self.end = 0 + + def _fetch(self, start: int | None, end: int | None) -> bytes: + if start is None: + start = 0 + if end is None or end > self.size: + end = self.size + if start >= self.size or start >= end: + return b"" + l = end - start + if start >= self.start and end <= self.end: + # cache hit + return self.cache[start - self.start : end - self.start] + elif self.start <= start < self.end: + # partial hit + part = self.cache[start - self.start :] + l -= len(part) + start = self.end + else: + # miss + part = b"" + end = min(self.size, end + self.blocksize) + self.cache = self.fetcher(start, end) # new block replaces old + self.start = start + self.end = self.start + len(self.cache) + return part + self.cache[:l] + + +class FirstChunkCache(BaseCache): + """Caches the first block of a file only + + This may be useful for file types where the metadata is stored in the header, + but is randomly accessed. + """ + + name = "first" + + def __init__(self, blocksize: int, fetcher: Fetcher, size: int) -> None: + super().__init__(blocksize, fetcher, size) + self.cache: bytes | None = None + + def _fetch(self, start: int | None, end: int | None) -> bytes: + start = start or 0 + end = end or self.size + if start < self.blocksize: + if self.cache is None: + if end > self.blocksize: + data = self.fetcher(0, end) + self.cache = data[: self.blocksize] + return data[start:] + self.cache = self.fetcher(0, self.blocksize) + part = self.cache[start:end] + if end > self.blocksize: + part += self.fetcher(self.blocksize, end) + return part + else: + return self.fetcher(start, end) + + +class BlockCache(BaseCache): + """ + Cache holding memory as a set of blocks. + + Requests are only ever made ``blocksize`` at a time, and are + stored in an LRU cache. The least recently accessed block is + discarded when more than ``maxblocks`` are stored. + + Parameters + ---------- + blocksize : int + The number of bytes to store in each block. + Requests are only ever made for ``blocksize``, so this + should balance the overhead of making a request against + the granularity of the blocks. + fetcher : Callable + size : int + The total size of the file being cached. + maxblocks : int + The maximum number of blocks to cache for. The maximum memory + use for this cache is then ``blocksize * maxblocks``. + """ + + name = "blockcache" + + def __init__( + self, blocksize: int, fetcher: Fetcher, size: int, maxblocks: int = 32 + ) -> None: + super().__init__(blocksize, fetcher, size) + self.nblocks = math.ceil(size / blocksize) + self.maxblocks = maxblocks + self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block) + + def __repr__(self) -> str: + return ( + f"" + ) + + def cache_info(self): + """ + The statistics on the block cache. + + Returns + ------- + NamedTuple + Returned directly from the LRU Cache used internally. + """ + return self._fetch_block_cached.cache_info() + + def __getstate__(self) -> dict[str, Any]: + state = self.__dict__ + del state["_fetch_block_cached"] + return state + + def __setstate__(self, state: dict[str, Any]) -> None: + self.__dict__.update(state) + self._fetch_block_cached = functools.lru_cache(state["maxblocks"])( + self._fetch_block + ) + + def _fetch(self, start: int | None, end: int | None) -> bytes: + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + + # byte position -> block numbers + start_block_number = start // self.blocksize + end_block_number = end // self.blocksize + + # these are cached, so safe to do multiple calls for the same start and end. + for block_number in range(start_block_number, end_block_number + 1): + self._fetch_block_cached(block_number) + + return self._read_cache( + start, + end, + start_block_number=start_block_number, + end_block_number=end_block_number, + ) + + def _fetch_block(self, block_number: int) -> bytes: + """ + Fetch the block of data for `block_number`. + """ + if block_number > self.nblocks: + raise ValueError( + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" + ) + + start = block_number * self.blocksize + end = start + self.blocksize + logger.info("BlockCache fetching block %d", block_number) + block_contents = super()._fetch(start, end) + return block_contents + + def _read_cache( + self, start: int, end: int, start_block_number: int, end_block_number: int + ) -> bytes: + """ + Read from our block cache. + + Parameters + ---------- + start, end : int + The start and end byte positions. + start_block_number, end_block_number : int + The start and end block numbers. + """ + start_pos = start % self.blocksize + end_pos = end % self.blocksize + + if start_block_number == end_block_number: + block: bytes = self._fetch_block_cached(start_block_number) + return block[start_pos:end_pos] + + else: + # read from the initial + out = [self._fetch_block_cached(start_block_number)[start_pos:]] + + # intermediate blocks + # Note: it'd be nice to combine these into one big request. However + # that doesn't play nicely with our LRU cache. + out.extend( + map( + self._fetch_block_cached, + range(start_block_number + 1, end_block_number), + ) + ) + + # final block + out.append(self._fetch_block_cached(end_block_number)[:end_pos]) + + return b"".join(out) + + +class BytesCache(BaseCache): + """Cache which holds data in a in-memory bytes object + + Implements read-ahead by the block size, for semi-random reads progressing + through the file. + + Parameters + ---------- + trim: bool + As we read more data, whether to discard the start of the buffer when + we are more than a blocksize ahead of it. + """ + + name: ClassVar[str] = "bytes" + + def __init__( + self, blocksize: int, fetcher: Fetcher, size: int, trim: bool = True + ) -> None: + super().__init__(blocksize, fetcher, size) + self.cache = b"" + self.start: int | None = None + self.end: int | None = None + self.trim = trim + + def _fetch(self, start: int | None, end: int | None) -> bytes: + # TODO: only set start/end after fetch, in case it fails? + # is this where retry logic might go? + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + if ( + self.start is not None + and start >= self.start + and self.end is not None + and end < self.end + ): + # cache hit: we have all the required data + offset = start - self.start + return self.cache[offset : offset + end - start] + + if self.blocksize: + bend = min(self.size, end + self.blocksize) + else: + bend = end + + if bend == start or start > self.size: + return b"" + + if (self.start is None or start < self.start) and ( + self.end is None or end > self.end + ): + # First read, or extending both before and after + self.cache = self.fetcher(start, bend) + self.start = start + else: + assert self.start is not None + assert self.end is not None + + if start < self.start: + if self.end is None or self.end - end > self.blocksize: + self.cache = self.fetcher(start, bend) + self.start = start + else: + new = self.fetcher(start, self.start) + self.start = start + self.cache = new + self.cache + elif self.end is not None and bend > self.end: + if self.end > self.size: + pass + elif end - self.end > self.blocksize: + self.cache = self.fetcher(start, bend) + self.start = start + else: + new = self.fetcher(self.end, bend) + self.cache = self.cache + new + + self.end = self.start + len(self.cache) + offset = start - self.start + out = self.cache[offset : offset + end - start] + if self.trim: + num = (self.end - self.start) // (self.blocksize + 1) + if num > 1: + self.start += self.blocksize * num + self.cache = self.cache[self.blocksize * num :] + return out + + def __len__(self) -> int: + return len(self.cache) + + +class AllBytes(BaseCache): + """Cache entire contents of the file""" + + name: ClassVar[str] = "all" + + def __init__( + self, + blocksize: int | None = None, + fetcher: Fetcher | None = None, + size: int | None = None, + data: bytes | None = None, + ) -> None: + super().__init__(blocksize, fetcher, size) # type: ignore[arg-type] + if data is None: + data = self.fetcher(0, self.size) + self.data = data + + def _fetch(self, start: int | None, stop: int | None) -> bytes: + return self.data[start:stop] + + +class KnownPartsOfAFile(BaseCache): + """ + Cache holding known file parts. + + Parameters + ---------- + blocksize: int + How far to read ahead in numbers of bytes + fetcher: func + Function of the form f(start, end) which gets bytes from remote as + specified + size: int + How big this file is + data: dict + A dictionary mapping explicit `(start, stop)` file-offset tuples + with known bytes. + strict: bool, default True + Whether to fetch reads that go beyond a known byte-range boundary. + If `False`, any read that ends outside a known part will be zero + padded. Note that zero padding will not be used for reads that + begin outside a known byte-range. + """ + + name: ClassVar[str] = "parts" + + def __init__( + self, + blocksize: int, + fetcher: Fetcher, + size: int, + data: dict[tuple[int, int], bytes] = {}, + strict: bool = True, + **_: Any, + ): + super().__init__(blocksize, fetcher, size) + self.strict = strict + + # simple consolidation of contiguous blocks + if data: + old_offsets = sorted(data.keys()) + offsets = [old_offsets[0]] + blocks = [data.pop(old_offsets[0])] + for start, stop in old_offsets[1:]: + start0, stop0 = offsets[-1] + if start == stop0: + offsets[-1] = (start0, stop) + blocks[-1] += data.pop((start, stop)) + else: + offsets.append((start, stop)) + blocks.append(data.pop((start, stop))) + + self.data = dict(zip(offsets, blocks)) + else: + self.data = data + + def _fetch(self, start: int | None, stop: int | None) -> bytes: + if start is None: + start = 0 + if stop is None: + stop = self.size + + out = b"" + for (loc0, loc1), data in self.data.items(): + # If self.strict=False, use zero-padded data + # for reads beyond the end of a "known" buffer + if loc0 <= start < loc1: + off = start - loc0 + out = data[off : off + stop - start] + if not self.strict or loc0 <= stop <= loc1: + # The request is within a known range, or + # it begins within a known range, and we + # are allowed to pad reads beyond the + # buffer with zero + out += b"\x00" * (stop - start - len(out)) + return out + else: + # The request ends outside a known range, + # and we are being "strict" about reads + # beyond the buffer + start = loc1 + break + + # We only get here if there is a request outside the + # known parts of the file. In an ideal world, this + # should never happen + if self.fetcher is None: + # We cannot fetch the data, so raise an error + raise ValueError(f"Read is outside the known file parts: {(start, stop)}. ") + # We can fetch the data, but should warn the user + # that this may be slow + warnings.warn( + f"Read is outside the known file parts: {(start, stop)}. " + f"IO/caching performance may be poor!" + ) + logger.debug(f"KnownPartsOfAFile cache fetching {start}-{stop}") + return out + super()._fetch(start, stop) + + +class UpdatableLRU(Generic[P, T]): + """ + Custom implementation of LRU cache that allows updating keys + + Used by BackgroudBlockCache + """ + + class CacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int + currsize: int + + def __init__(self, func: Callable[P, T], max_size: int = 128) -> None: + self._cache: OrderedDict[Any, T] = collections.OrderedDict() + self._func = func + self._max_size = max_size + self._hits = 0 + self._misses = 0 + self._lock = threading.Lock() + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + if kwargs: + raise TypeError(f"Got unexpected keyword argument {kwargs.keys()}") + with self._lock: + if args in self._cache: + self._cache.move_to_end(args) + self._hits += 1 + return self._cache[args] + + result = self._func(*args, **kwargs) + + with self._lock: + self._cache[args] = result + self._misses += 1 + if len(self._cache) > self._max_size: + self._cache.popitem(last=False) + + return result + + def is_key_cached(self, *args: Any) -> bool: + with self._lock: + return args in self._cache + + def add_key(self, result: T, *args: Any) -> None: + with self._lock: + self._cache[args] = result + if len(self._cache) > self._max_size: + self._cache.popitem(last=False) + + def cache_info(self) -> UpdatableLRU.CacheInfo: + with self._lock: + return self.CacheInfo( + maxsize=self._max_size, + currsize=len(self._cache), + hits=self._hits, + misses=self._misses, + ) + + +class BackgroundBlockCache(BaseCache): + """ + Cache holding memory as a set of blocks with pre-loading of + the next block in the background. + + Requests are only ever made ``blocksize`` at a time, and are + stored in an LRU cache. The least recently accessed block is + discarded when more than ``maxblocks`` are stored. If the + next block is not in cache, it is loaded in a separate thread + in non-blocking way. + + Parameters + ---------- + blocksize : int + The number of bytes to store in each block. + Requests are only ever made for ``blocksize``, so this + should balance the overhead of making a request against + the granularity of the blocks. + fetcher : Callable + size : int + The total size of the file being cached. + maxblocks : int + The maximum number of blocks to cache for. The maximum memory + use for this cache is then ``blocksize * maxblocks``. + """ + + name: ClassVar[str] = "background" + + def __init__( + self, blocksize: int, fetcher: Fetcher, size: int, maxblocks: int = 32 + ) -> None: + super().__init__(blocksize, fetcher, size) + self.nblocks = math.ceil(size / blocksize) + self.maxblocks = maxblocks + self._fetch_block_cached = UpdatableLRU(self._fetch_block, maxblocks) + + self._thread_executor = ThreadPoolExecutor(max_workers=1) + self._fetch_future_block_number: int | None = None + self._fetch_future: Future[bytes] | None = None + self._fetch_future_lock = threading.Lock() + + def __repr__(self) -> str: + return ( + f"" + ) + + def cache_info(self) -> UpdatableLRU.CacheInfo: + """ + The statistics on the block cache. + + Returns + ------- + NamedTuple + Returned directly from the LRU Cache used internally. + """ + return self._fetch_block_cached.cache_info() + + def __getstate__(self) -> dict[str, Any]: + state = self.__dict__ + del state["_fetch_block_cached"] + del state["_thread_executor"] + del state["_fetch_future_block_number"] + del state["_fetch_future"] + del state["_fetch_future_lock"] + return state + + def __setstate__(self, state) -> None: + self.__dict__.update(state) + self._fetch_block_cached = UpdatableLRU(self._fetch_block, state["maxblocks"]) + self._thread_executor = ThreadPoolExecutor(max_workers=1) + self._fetch_future_block_number = None + self._fetch_future = None + self._fetch_future_lock = threading.Lock() + + def _fetch(self, start: int | None, end: int | None) -> bytes: + if start is None: + start = 0 + if end is None: + end = self.size + if start >= self.size or start >= end: + return b"" + + # byte position -> block numbers + start_block_number = start // self.blocksize + end_block_number = end // self.blocksize + + fetch_future_block_number = None + fetch_future = None + with self._fetch_future_lock: + # Background thread is running. Check we we can or must join it. + if self._fetch_future is not None: + assert self._fetch_future_block_number is not None + if self._fetch_future.done(): + logger.info("BlockCache joined background fetch without waiting.") + self._fetch_block_cached.add_key( + self._fetch_future.result(), self._fetch_future_block_number + ) + # Cleanup the fetch variables. Done with fetching the block. + self._fetch_future_block_number = None + self._fetch_future = None + else: + # Must join if we need the block for the current fetch + must_join = bool( + start_block_number + <= self._fetch_future_block_number + <= end_block_number + ) + if must_join: + # Copy to the local variables to release lock + # before waiting for result + fetch_future_block_number = self._fetch_future_block_number + fetch_future = self._fetch_future + + # Cleanup the fetch variables. Have a local copy. + self._fetch_future_block_number = None + self._fetch_future = None + + # Need to wait for the future for the current read + if fetch_future is not None: + logger.info("BlockCache waiting for background fetch.") + # Wait until result and put it in cache + self._fetch_block_cached.add_key( + fetch_future.result(), fetch_future_block_number + ) + + # these are cached, so safe to do multiple calls for the same start and end. + for block_number in range(start_block_number, end_block_number + 1): + self._fetch_block_cached(block_number) + + # fetch next block in the background if nothing is running in the background, + # the block is within file and it is not already cached + end_block_plus_1 = end_block_number + 1 + with self._fetch_future_lock: + if ( + self._fetch_future is None + and end_block_plus_1 <= self.nblocks + and not self._fetch_block_cached.is_key_cached(end_block_plus_1) + ): + self._fetch_future_block_number = end_block_plus_1 + self._fetch_future = self._thread_executor.submit( + self._fetch_block, end_block_plus_1, "async" + ) + + return self._read_cache( + start, + end, + start_block_number=start_block_number, + end_block_number=end_block_number, + ) + + def _fetch_block(self, block_number: int, log_info: str = "sync") -> bytes: + """ + Fetch the block of data for `block_number`. + """ + if block_number > self.nblocks: + raise ValueError( + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" + ) + + start = block_number * self.blocksize + end = start + self.blocksize + logger.info("BlockCache fetching block (%s) %d", log_info, block_number) + block_contents = super()._fetch(start, end) + return block_contents + + def _read_cache( + self, start: int, end: int, start_block_number: int, end_block_number: int + ) -> bytes: + """ + Read from our block cache. + + Parameters + ---------- + start, end : int + The start and end byte positions. + start_block_number, end_block_number : int + The start and end block numbers. + """ + start_pos = start % self.blocksize + end_pos = end % self.blocksize + + if start_block_number == end_block_number: + block = self._fetch_block_cached(start_block_number) + return block[start_pos:end_pos] + + else: + # read from the initial + out = [self._fetch_block_cached(start_block_number)[start_pos:]] + + # intermediate blocks + # Note: it'd be nice to combine these into one big request. However + # that doesn't play nicely with our LRU cache. + out.extend( + map( + self._fetch_block_cached, + range(start_block_number + 1, end_block_number), + ) + ) + + # final block + out.append(self._fetch_block_cached(end_block_number)[:end_pos]) + + return b"".join(out) + + +caches: dict[str | None, type[BaseCache]] = { + # one custom case + None: BaseCache, +} + + +def register_cache(cls: type[BaseCache], clobber: bool = False) -> None: + """'Register' cache implementation. + + Parameters + ---------- + clobber: bool, optional + If set to True (default is False) - allow to overwrite existing + entry. + + Raises + ------ + ValueError + """ + name = cls.name + if not clobber and name in caches: + raise ValueError(f"Cache with name {name!r} is already known: {caches[name]}") + caches[name] = cls + + +for c in ( + BaseCache, + MMapCache, + BytesCache, + ReadAheadCache, + BlockCache, + FirstChunkCache, + AllBytes, + KnownPartsOfAFile, + BackgroundBlockCache, +): + register_cache(c) diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec/dircache.py b/llmeval-env/lib/python3.10/site-packages/fsspec/dircache.py new file mode 100644 index 0000000000000000000000000000000000000000..eca19566b135e5a7a4f6e7407d56411ec58bfe44 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec/dircache.py @@ -0,0 +1,98 @@ +import time +from collections.abc import MutableMapping +from functools import lru_cache + + +class DirCache(MutableMapping): + """ + Caching of directory listings, in a structure like:: + + {"path0": [ + {"name": "path0/file0", + "size": 123, + "type": "file", + ... + }, + {"name": "path0/file1", + }, + ... + ], + "path1": [...] + } + + Parameters to this class control listing expiry or indeed turn + caching off + """ + + def __init__( + self, + use_listings_cache=True, + listings_expiry_time=None, + max_paths=None, + **kwargs, + ): + """ + + Parameters + ---------- + use_listings_cache: bool + If False, this cache never returns items, but always reports KeyError, + and setting items has no effect + listings_expiry_time: int or float (optional) + Time in seconds that a listing is considered valid. If None, + listings do not expire. + max_paths: int (optional) + The number of most recent listings that are considered valid; 'recent' + refers to when the entry was set. + """ + self._cache = {} + self._times = {} + if max_paths: + self._q = lru_cache(max_paths + 1)(lambda key: self._cache.pop(key, None)) + self.use_listings_cache = use_listings_cache + self.listings_expiry_time = listings_expiry_time + self.max_paths = max_paths + + def __getitem__(self, item): + if self.listings_expiry_time is not None: + if self._times.get(item, 0) - time.time() < -self.listings_expiry_time: + del self._cache[item] + if self.max_paths: + self._q(item) + return self._cache[item] # maybe raises KeyError + + def clear(self): + self._cache.clear() + + def __len__(self): + return len(self._cache) + + def __contains__(self, item): + try: + self[item] + return True + except KeyError: + return False + + def __setitem__(self, key, value): + if not self.use_listings_cache: + return + if self.max_paths: + self._q(key) + self._cache[key] = value + if self.listings_expiry_time is not None: + self._times[key] = time.time() + + def __delitem__(self, key): + del self._cache[key] + + def __iter__(self): + entries = list(self._cache) + + return (k for k in entries if k in self) + + def __reduce__(self): + return ( + DirCache, + (self.use_listings_cache, self.listings_expiry_time, self.max_paths), + ) diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec/generic.py b/llmeval-env/lib/python3.10/site-packages/fsspec/generic.py new file mode 100644 index 0000000000000000000000000000000000000000..4fb58ac5b4035c9af01273c115e6409a19b26488 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec/generic.py @@ -0,0 +1,408 @@ +from __future__ import annotations + +import inspect +import logging +import os +import shutil +import uuid +from typing import Optional + +from .asyn import AsyncFileSystem, _run_coros_in_chunks, sync_wrapper +from .callbacks import DEFAULT_CALLBACK +from .core import filesystem, get_filesystem_class, split_protocol, url_to_fs + +_generic_fs = {} +logger = logging.getLogger("fsspec.generic") + + +def set_generic_fs(protocol, **storage_options): + _generic_fs[protocol] = filesystem(protocol, **storage_options) + + +default_method = "default" + + +def _resolve_fs(url, method=None, protocol=None, storage_options=None): + """Pick instance of backend FS""" + method = method or default_method + protocol = protocol or split_protocol(url)[0] + storage_options = storage_options or {} + if method == "default": + return filesystem(protocol) + if method == "generic": + return _generic_fs[protocol] + if method == "current": + cls = get_filesystem_class(protocol) + return cls.current() + if method == "options": + fs, _ = url_to_fs(url, **storage_options.get(protocol, {})) + return fs + raise ValueError(f"Unknown FS resolution method: {method}") + + +def rsync( + source, + destination, + delete_missing=False, + source_field="size", + dest_field="size", + update_cond="different", + inst_kwargs=None, + fs=None, + **kwargs, +): + """Sync files between two directory trees + + (experimental) + + Parameters + ---------- + source: str + Root of the directory tree to take files from. This must be a directory, but + do not include any terminating "/" character + destination: str + Root path to copy into. The contents of this location should be + identical to the contents of ``source`` when done. This will be made a + directory, and the terminal "/" should not be included. + delete_missing: bool + If there are paths in the destination that don't exist in the + source and this is True, delete them. Otherwise, leave them alone. + source_field: str | callable + If ``update_field`` is "different", this is the key in the info + of source files to consider for difference. Maybe a function of the + info dict. + dest_field: str | callable + If ``update_field`` is "different", this is the key in the info + of destination files to consider for difference. May be a function of + the info dict. + update_cond: "different"|"always"|"never" + If "always", every file is copied, regardless of whether it exists in + the destination. If "never", files that exist in the destination are + not copied again. If "different" (default), only copy if the info + fields given by ``source_field`` and ``dest_field`` (usually "size") + are different. Other comparisons may be added in the future. + inst_kwargs: dict|None + If ``fs`` is None, use this set of keyword arguments to make a + GenericFileSystem instance + fs: GenericFileSystem|None + Instance to use if explicitly given. The instance defines how to + to make downstream file system instances from paths. + + Returns + ------- + dict of the copy operations that were performed, {source: destination} + """ + fs = fs or GenericFileSystem(**(inst_kwargs or {})) + source = fs._strip_protocol(source) + destination = fs._strip_protocol(destination) + allfiles = fs.find(source, withdirs=True, detail=True) + if not fs.isdir(source): + raise ValueError("Can only rsync on a directory") + otherfiles = fs.find(destination, withdirs=True, detail=True) + dirs = [ + a + for a, v in allfiles.items() + if v["type"] == "directory" and a.replace(source, destination) not in otherfiles + ] + logger.debug(f"{len(dirs)} directories to create") + if dirs: + fs.make_many_dirs( + [dirn.replace(source, destination) for dirn in dirs], exist_ok=True + ) + allfiles = {a: v for a, v in allfiles.items() if v["type"] == "file"} + logger.debug(f"{len(allfiles)} files to consider for copy") + to_delete = [ + o + for o, v in otherfiles.items() + if o.replace(destination, source) not in allfiles and v["type"] == "file" + ] + for k, v in allfiles.copy().items(): + otherfile = k.replace(source, destination) + if otherfile in otherfiles: + if update_cond == "always": + allfiles[k] = otherfile + elif update_cond == "different": + inf1 = source_field(v) if callable(source_field) else v[source_field] + v2 = otherfiles[otherfile] + inf2 = dest_field(v2) if callable(dest_field) else v2[dest_field] + if inf1 != inf2: + # details mismatch, make copy + allfiles[k] = otherfile + else: + # details match, don't copy + allfiles.pop(k) + else: + # file not in target yet + allfiles[k] = otherfile + logger.debug(f"{len(allfiles)} files to copy") + if allfiles: + source_files, target_files = zip(*allfiles.items()) + fs.cp(source_files, target_files, **kwargs) + logger.debug(f"{len(to_delete)} files to delete") + if delete_missing: + fs.rm(to_delete) + return allfiles + + +class GenericFileSystem(AsyncFileSystem): + """Wrapper over all other FS types + + + + This implementation is a single unified interface to be able to run FS operations + over generic URLs, and dispatch to the specific implementations using the URL + protocol prefix. + + Note: instances of this FS are always async, even if you never use it with any async + backend. + """ + + protocol = "generic" # there is no real reason to ever use a protocol with this FS + + def __init__(self, default_method="default", **kwargs): + """ + + Parameters + ---------- + default_method: str (optional) + Defines how to configure backend FS instances. Options are: + - "default": instantiate like FSClass(), with no + extra arguments; this is the default instance of that FS, and can be + configured via the config system + - "generic": takes instances from the `_generic_fs` dict in this module, + which you must populate before use. Keys are by protocol + - "current": takes the most recently instantiated version of each FS + """ + self.method = default_method + super().__init__(**kwargs) + + def _parent(self, path): + fs = _resolve_fs(path, self.method) + return fs.unstrip_protocol(fs._parent(path)) + + def _strip_protocol(self, path): + # normalization only + fs = _resolve_fs(path, self.method) + return fs.unstrip_protocol(fs._strip_protocol(path)) + + async def _find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs): + fs = _resolve_fs(path, self.method) + if fs.async_impl: + out = await fs._find( + path, maxdepth=maxdepth, withdirs=withdirs, detail=True, **kwargs + ) + else: + out = fs.find( + path, maxdepth=maxdepth, withdirs=withdirs, detail=True, **kwargs + ) + result = {} + for k, v in out.items(): + name = fs.unstrip_protocol(k) + v["name"] = name + result[name] = v + if detail: + return result + return list(result) + + async def _info(self, url, **kwargs): + fs = _resolve_fs(url, self.method) + if fs.async_impl: + out = await fs._info(url, **kwargs) + else: + out = fs.info(url, **kwargs) + out["name"] = fs.unstrip_protocol(out["name"]) + return out + + async def _ls( + self, + url, + detail=True, + **kwargs, + ): + fs = _resolve_fs(url, self.method) + if fs.async_impl: + out = await fs._ls(url, detail=True, **kwargs) + else: + out = fs.ls(url, detail=True, **kwargs) + for o in out: + o["name"] = fs.unstrip_protocol(o["name"]) + if detail: + return out + else: + return [o["name"] for o in out] + + async def _cat_file( + self, + url, + **kwargs, + ): + fs = _resolve_fs(url, self.method) + if fs.async_impl: + return await fs._cat_file(url, **kwargs) + else: + return fs.cat_file(url, **kwargs) + + async def _pipe_file( + self, + path, + value, + **kwargs, + ): + fs = _resolve_fs(path, self.method) + if fs.async_impl: + return await fs._pipe_file(path, value, **kwargs) + else: + return fs.pipe_file(path, value, **kwargs) + + async def _rm(self, url, **kwargs): + urls = url + if isinstance(urls, str): + urls = [urls] + fs = _resolve_fs(urls[0], self.method) + if fs.async_impl: + await fs._rm(urls, **kwargs) + else: + fs.rm(url, **kwargs) + + async def _makedirs(self, path, exist_ok=False): + logger.debug("Make dir %s", path) + fs = _resolve_fs(path, self.method) + if fs.async_impl: + await fs._makedirs(path, exist_ok=exist_ok) + else: + fs.makedirs(path, exist_ok=exist_ok) + + def rsync(self, source, destination, **kwargs): + """Sync files between two directory trees + + See `func:rsync` for more details. + """ + rsync(source, destination, fs=self, **kwargs) + + async def _cp_file( + self, + url, + url2, + blocksize=2**20, + callback=DEFAULT_CALLBACK, + **kwargs, + ): + fs = _resolve_fs(url, self.method) + fs2 = _resolve_fs(url2, self.method) + if fs is fs2: + # pure remote + if fs.async_impl: + return await fs._cp_file(url, url2, **kwargs) + else: + return fs.cp_file(url, url2, **kwargs) + kw = {"blocksize": 0, "cache_type": "none"} + try: + f1 = ( + await fs.open_async(url, "rb") + if hasattr(fs, "open_async") + else fs.open(url, "rb", **kw) + ) + callback.set_size(await maybe_await(f1.size)) + f2 = ( + await fs2.open_async(url2, "wb") + if hasattr(fs2, "open_async") + else fs2.open(url2, "wb", **kw) + ) + while f1.size is None or f2.tell() < f1.size: + data = await maybe_await(f1.read(blocksize)) + if f1.size is None and not data: + break + await maybe_await(f2.write(data)) + callback.absolute_update(f2.tell()) + finally: + try: + await maybe_await(f2.close()) + await maybe_await(f1.close()) + except NameError: + # fail while opening f1 or f2 + pass + + async def _make_many_dirs(self, urls, exist_ok=True): + fs = _resolve_fs(urls[0], self.method) + if fs.async_impl: + coros = [fs._makedirs(u, exist_ok=exist_ok) for u in urls] + await _run_coros_in_chunks(coros) + else: + for u in urls: + fs.makedirs(u, exist_ok=exist_ok) + + make_many_dirs = sync_wrapper(_make_many_dirs) + + async def _copy( + self, + path1: list[str], + path2: list[str], + recursive: bool = False, + on_error: str = "ignore", + maxdepth: Optional[int] = None, + batch_size: Optional[int] = None, + tempdir: Optional[str] = None, + **kwargs, + ): + if recursive: + raise NotImplementedError + fs = _resolve_fs(path1[0], self.method) + fs2 = _resolve_fs(path2[0], self.method) + # not expanding paths atm., assume call is from rsync() + if fs is fs2: + # pure remote + if fs.async_impl: + return await fs._copy(path1, path2, **kwargs) + else: + return fs.copy(path1, path2, **kwargs) + await copy_file_op( + fs, path1, fs2, path2, tempdir, batch_size, on_error=on_error + ) + + +async def copy_file_op( + fs1, url1, fs2, url2, tempdir=None, batch_size=20, on_error="ignore" +): + import tempfile + + tempdir = tempdir or tempfile.mkdtemp() + try: + coros = [ + _copy_file_op( + fs1, + u1, + fs2, + u2, + os.path.join(tempdir, uuid.uuid4().hex), + on_error=on_error, + ) + for u1, u2 in zip(url1, url2) + ] + await _run_coros_in_chunks(coros, batch_size=batch_size) + finally: + shutil.rmtree(tempdir) + + +async def _copy_file_op(fs1, url1, fs2, url2, local, on_error="ignore"): + ex = () if on_error == "raise" else Exception + logger.debug("Copy %s -> %s", url1, url2) + try: + if fs1.async_impl: + await fs1._get_file(url1, local) + else: + fs1.get_file(url1, local) + if fs2.async_impl: + await fs2._put_file(local, url2) + else: + fs2.put_file(local, url2) + os.unlink(local) + logger.debug("Copy %s -> %s; done", url1, url2) + except ex as e: + logger.debug("ignoring cp exception for %s: %s", url1, e) + + +async def maybe_await(cor): + if inspect.iscoroutine(cor): + return await cor + else: + return cor diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec/mapping.py b/llmeval-env/lib/python3.10/site-packages/fsspec/mapping.py new file mode 100644 index 0000000000000000000000000000000000000000..93ebd1df3a127ab1ad7d0d218cdb4fe0217f44bd --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec/mapping.py @@ -0,0 +1,251 @@ +import array +import logging +import posixpath +import warnings +from collections.abc import MutableMapping +from functools import cached_property + +from fsspec.core import url_to_fs + +logger = logging.getLogger("fsspec.mapping") + + +class FSMap(MutableMapping): + """Wrap a FileSystem instance as a mutable wrapping. + + The keys of the mapping become files under the given root, and the + values (which must be bytes) the contents of those files. + + Parameters + ---------- + root: string + prefix for all the files + fs: FileSystem instance + check: bool (=True) + performs a touch at the location, to check for write access. + + Examples + -------- + >>> fs = FileSystem(**parameters) # doctest: +SKIP + >>> d = FSMap('my-data/path/', fs) # doctest: +SKIP + or, more likely + >>> d = fs.get_mapper('my-data/path/') + + >>> d['loc1'] = b'Hello World' # doctest: +SKIP + >>> list(d.keys()) # doctest: +SKIP + ['loc1'] + >>> d['loc1'] # doctest: +SKIP + b'Hello World' + """ + + def __init__(self, root, fs, check=False, create=False, missing_exceptions=None): + self.fs = fs + self.root = fs._strip_protocol(root) + self._root_key_to_str = fs._strip_protocol(posixpath.join(root, "x"))[:-1] + if missing_exceptions is None: + missing_exceptions = ( + FileNotFoundError, + IsADirectoryError, + NotADirectoryError, + ) + self.missing_exceptions = missing_exceptions + self.check = check + self.create = create + if create: + if not self.fs.exists(root): + self.fs.mkdir(root) + if check: + if not self.fs.exists(root): + raise ValueError( + f"Path {root} does not exist. Create " + f" with the ``create=True`` keyword" + ) + self.fs.touch(root + "/a") + self.fs.rm(root + "/a") + + @cached_property + def dirfs(self): + """dirfs instance that can be used with the same keys as the mapper""" + from .implementations.dirfs import DirFileSystem + + return DirFileSystem(path=self._root_key_to_str, fs=self.fs) + + def clear(self): + """Remove all keys below root - empties out mapping""" + logger.info("Clear mapping at %s", self.root) + try: + self.fs.rm(self.root, True) + self.fs.mkdir(self.root) + except: # noqa: E722 + pass + + def getitems(self, keys, on_error="raise"): + """Fetch multiple items from the store + + If the backend is async-able, this might proceed concurrently + + Parameters + ---------- + keys: list(str) + They keys to be fetched + on_error : "raise", "omit", "return" + If raise, an underlying exception will be raised (converted to KeyError + if the type is in self.missing_exceptions); if omit, keys with exception + will simply not be included in the output; if "return", all keys are + included in the output, but the value will be bytes or an exception + instance. + + Returns + ------- + dict(key, bytes|exception) + """ + keys2 = [self._key_to_str(k) for k in keys] + oe = on_error if on_error == "raise" else "return" + try: + out = self.fs.cat(keys2, on_error=oe) + if isinstance(out, bytes): + out = {keys2[0]: out} + except self.missing_exceptions as e: + raise KeyError from e + out = { + k: (KeyError() if isinstance(v, self.missing_exceptions) else v) + for k, v in out.items() + } + return { + key: out[k2] + for key, k2 in zip(keys, keys2) + if on_error == "return" or not isinstance(out[k2], BaseException) + } + + def setitems(self, values_dict): + """Set the values of multiple items in the store + + Parameters + ---------- + values_dict: dict(str, bytes) + """ + values = {self._key_to_str(k): maybe_convert(v) for k, v in values_dict.items()} + self.fs.pipe(values) + + def delitems(self, keys): + """Remove multiple keys from the store""" + self.fs.rm([self._key_to_str(k) for k in keys]) + + def _key_to_str(self, key): + """Generate full path for the key""" + if not isinstance(key, str): + # raise TypeError("key must be of type `str`, got `{type(key).__name__}`" + warnings.warn( + "from fsspec 2023.5 onward FSMap non-str keys will raise TypeError", + DeprecationWarning, + ) + if isinstance(key, list): + key = tuple(key) + key = str(key) + return f"{self._root_key_to_str}{key}".rstrip("/") + + def _str_to_key(self, s): + """Strip path of to leave key name""" + return s[len(self.root) :].lstrip("/") + + def __getitem__(self, key, default=None): + """Retrieve data""" + k = self._key_to_str(key) + try: + result = self.fs.cat(k) + except self.missing_exceptions: + if default is not None: + return default + raise KeyError(key) + return result + + def pop(self, key, default=None): + """Pop data""" + result = self.__getitem__(key, default) + try: + del self[key] + except KeyError: + pass + return result + + def __setitem__(self, key, value): + """Store value in key""" + key = self._key_to_str(key) + self.fs.mkdirs(self.fs._parent(key), exist_ok=True) + self.fs.pipe_file(key, maybe_convert(value)) + + def __iter__(self): + return (self._str_to_key(x) for x in self.fs.find(self.root)) + + def __len__(self): + return len(self.fs.find(self.root)) + + def __delitem__(self, key): + """Remove key""" + try: + self.fs.rm(self._key_to_str(key)) + except: # noqa: E722 + raise KeyError + + def __contains__(self, key): + """Does key exist in mapping?""" + path = self._key_to_str(key) + return self.fs.isfile(path) + + def __reduce__(self): + return FSMap, (self.root, self.fs, False, False, self.missing_exceptions) + + +def maybe_convert(value): + if isinstance(value, array.array) or hasattr(value, "__array__"): + # bytes-like things + if hasattr(value, "dtype") and value.dtype.kind in "Mm": + # The buffer interface doesn't support datetime64/timdelta64 numpy + # arrays + value = value.view("int64") + value = bytes(memoryview(value)) + return value + + +def get_mapper( + url="", + check=False, + create=False, + missing_exceptions=None, + alternate_root=None, + **kwargs, +): + """Create key-value interface for given URL and options + + The URL will be of the form "protocol://location" and point to the root + of the mapper required. All keys will be file-names below this location, + and their values the contents of each key. + + Also accepts compound URLs like zip::s3://bucket/file.zip , see ``fsspec.open``. + + Parameters + ---------- + url: str + Root URL of mapping + check: bool + Whether to attempt to read from the location before instantiation, to + check that the mapping does exist + create: bool + Whether to make the directory corresponding to the root before + instantiating + missing_exceptions: None or tuple + If given, these exception types will be regarded as missing keys and + return KeyError when trying to read data. By default, you get + (FileNotFoundError, IsADirectoryError, NotADirectoryError) + alternate_root: None or str + In cases of complex URLs, the parser may fail to pick the correct part + for the mapper root, so this arg can override + + Returns + ------- + ``FSMap`` instance, the dict-like key-value store. + """ + # Removing protocol here - could defer to each open() on the backend + fs, urlpath = url_to_fs(url, **kwargs) + root = alternate_root if alternate_root is not None else urlpath + return FSMap(root, fs, check, create, missing_exceptions=missing_exceptions) diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec/registry.py b/llmeval-env/lib/python3.10/site-packages/fsspec/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..8a36e0948332e324a41487efc757bc1b0acc50c6 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec/registry.py @@ -0,0 +1,301 @@ +from __future__ import annotations + +import importlib +import types +import warnings + +__all__ = ["registry", "get_filesystem_class", "default"] + +# internal, mutable +_registry: dict[str, type] = {} + +# external, immutable +registry = types.MappingProxyType(_registry) +default = "file" + + +def register_implementation(name, cls, clobber=False, errtxt=None): + """Add implementation class to the registry + + Parameters + ---------- + name: str + Protocol name to associate with the class + cls: class or str + if a class: fsspec-compliant implementation class (normally inherits from + ``fsspec.AbstractFileSystem``, gets added straight to the registry. If a + str, the full path to an implementation class like package.module.class, + which gets added to known_implementations, + so the import is deferred until the filesystem is actually used. + clobber: bool (optional) + Whether to overwrite a protocol with the same name; if False, will raise + instead. + errtxt: str (optional) + If given, then a failure to import the given class will result in this + text being given. + """ + if isinstance(cls, str): + if name in known_implementations and clobber is False: + if cls != known_implementations[name]["class"]: + raise ValueError( + f"Name ({name}) already in the known_implementations and clobber " + f"is False" + ) + else: + known_implementations[name] = { + "class": cls, + "err": errtxt or f"{cls} import failed for protocol {name}", + } + + else: + if name in registry and clobber is False: + if _registry[name] is not cls: + raise ValueError( + f"Name ({name}) already in the registry and clobber is False" + ) + else: + _registry[name] = cls + + +# protocols mapped to the class which implements them. This dict can be +# updated with register_implementation +known_implementations = { + "abfs": { + "class": "adlfs.AzureBlobFileSystem", + "err": "Install adlfs to access Azure Datalake Gen2 and Azure Blob Storage", + }, + "adl": { + "class": "adlfs.AzureDatalakeFileSystem", + "err": "Install adlfs to access Azure Datalake Gen1", + }, + "arrow_hdfs": { + "class": "fsspec.implementations.arrow.HadoopFileSystem", + "err": "pyarrow and local java libraries required for HDFS", + }, + "asynclocal": { + "class": "morefs.asyn_local.AsyncLocalFileSystem", + "err": "Install 'morefs[asynclocalfs]' to use AsyncLocalFileSystem", + }, + "az": { + "class": "adlfs.AzureBlobFileSystem", + "err": "Install adlfs to access Azure Datalake Gen2 and Azure Blob Storage", + }, + "blockcache": {"class": "fsspec.implementations.cached.CachingFileSystem"}, + "box": { + "class": "boxfs.BoxFileSystem", + "err": "Please install boxfs to access BoxFileSystem", + }, + "cached": {"class": "fsspec.implementations.cached.CachingFileSystem"}, + "dask": { + "class": "fsspec.implementations.dask.DaskWorkerFileSystem", + "err": "Install dask distributed to access worker file system", + }, + "data": {"class": "fsspec.implementations.data.DataFileSystem"}, + "dbfs": { + "class": "fsspec.implementations.dbfs.DatabricksFileSystem", + "err": "Install the requests package to use the DatabricksFileSystem", + }, + "dir": {"class": "fsspec.implementations.dirfs.DirFileSystem"}, + "dropbox": { + "class": "dropboxdrivefs.DropboxDriveFileSystem", + "err": ( + 'DropboxFileSystem requires "dropboxdrivefs","requests" and "' + '"dropbox" to be installed' + ), + }, + "dvc": { + "class": "dvc.api.DVCFileSystem", + "err": "Install dvc to access DVCFileSystem", + }, + "file": {"class": "fsspec.implementations.local.LocalFileSystem"}, + "filecache": {"class": "fsspec.implementations.cached.WholeFileCacheFileSystem"}, + "ftp": {"class": "fsspec.implementations.ftp.FTPFileSystem"}, + "gcs": { + "class": "gcsfs.GCSFileSystem", + "err": "Please install gcsfs to access Google Storage", + }, + "gdrive": { + "class": "gdrivefs.GoogleDriveFileSystem", + "err": "Please install gdrivefs for access to Google Drive", + }, + "generic": {"class": "fsspec.generic.GenericFileSystem"}, + "git": { + "class": "fsspec.implementations.git.GitFileSystem", + "err": "Install pygit2 to browse local git repos", + }, + "github": { + "class": "fsspec.implementations.github.GithubFileSystem", + "err": "Install the requests package to use the github FS", + }, + "gs": { + "class": "gcsfs.GCSFileSystem", + "err": "Please install gcsfs to access Google Storage", + }, + "hdfs": { + "class": "fsspec.implementations.arrow.HadoopFileSystem", + "err": "pyarrow and local java libraries required for HDFS", + }, + "hf": { + "class": "huggingface_hub.HfFileSystem", + "err": "Install huggingface_hub to access HfFileSystem", + }, + "http": { + "class": "fsspec.implementations.http.HTTPFileSystem", + "err": 'HTTPFileSystem requires "requests" and "aiohttp" to be installed', + }, + "https": { + "class": "fsspec.implementations.http.HTTPFileSystem", + "err": 'HTTPFileSystem requires "requests" and "aiohttp" to be installed', + }, + "jlab": { + "class": "fsspec.implementations.jupyter.JupyterFileSystem", + "err": "Jupyter FS requires requests to be installed", + }, + "jupyter": { + "class": "fsspec.implementations.jupyter.JupyterFileSystem", + "err": "Jupyter FS requires requests to be installed", + }, + "lakefs": { + "class": "lakefs_spec.LakeFSFileSystem", + "err": "Please install lakefs-spec to access LakeFSFileSystem", + }, + "libarchive": { + "class": "fsspec.implementations.libarchive.LibArchiveFileSystem", + "err": "LibArchive requires to be installed", + }, + "local": {"class": "fsspec.implementations.local.LocalFileSystem"}, + "memory": {"class": "fsspec.implementations.memory.MemoryFileSystem"}, + "oci": { + "class": "ocifs.OCIFileSystem", + "err": "Install ocifs to access OCI Object Storage", + }, + "ocilake": { + "class": "ocifs.OCIFileSystem", + "err": "Install ocifs to access OCI Data Lake", + }, + "oss": { + "class": "ossfs.OSSFileSystem", + "err": "Install ossfs to access Alibaba Object Storage System", + }, + "reference": {"class": "fsspec.implementations.reference.ReferenceFileSystem"}, + "root": { + "class": "fsspec_xrootd.XRootDFileSystem", + "err": ( + "Install fsspec-xrootd to access xrootd storage system. " + "Note: 'root' is the protocol name for xrootd storage systems, " + "not referring to root directories" + ), + }, + "s3": {"class": "s3fs.S3FileSystem", "err": "Install s3fs to access S3"}, + "s3a": {"class": "s3fs.S3FileSystem", "err": "Install s3fs to access S3"}, + "sftp": { + "class": "fsspec.implementations.sftp.SFTPFileSystem", + "err": 'SFTPFileSystem requires "paramiko" to be installed', + }, + "simplecache": {"class": "fsspec.implementations.cached.SimpleCacheFileSystem"}, + "smb": { + "class": "fsspec.implementations.smb.SMBFileSystem", + "err": 'SMB requires "smbprotocol" or "smbprotocol[kerberos]" installed', + }, + "ssh": { + "class": "fsspec.implementations.sftp.SFTPFileSystem", + "err": 'SFTPFileSystem requires "paramiko" to be installed', + }, + "tar": {"class": "fsspec.implementations.tar.TarFileSystem"}, + "wandb": {"class": "wandbfs.WandbFS", "err": "Install wandbfs to access wandb"}, + "webdav": { + "class": "webdav4.fsspec.WebdavFileSystem", + "err": "Install webdav4 to access WebDAV", + }, + "webhdfs": { + "class": "fsspec.implementations.webhdfs.WebHDFS", + "err": 'webHDFS access requires "requests" to be installed', + }, + "zip": {"class": "fsspec.implementations.zip.ZipFileSystem"}, +} + + +def get_filesystem_class(protocol): + """Fetch named protocol implementation from the registry + + The dict ``known_implementations`` maps protocol names to the locations + of classes implementing the corresponding file-system. When used for the + first time, appropriate imports will happen and the class will be placed in + the registry. All subsequent calls will fetch directly from the registry. + + Some protocol implementations require additional dependencies, and so the + import may fail. In this case, the string in the "err" field of the + ``known_implementations`` will be given as the error message. + """ + if not protocol: + protocol = default + + if protocol not in registry: + if protocol not in known_implementations: + raise ValueError(f"Protocol not known: {protocol}") + bit = known_implementations[protocol] + try: + register_implementation(protocol, _import_class(bit["class"])) + except ImportError as e: + raise ImportError(bit["err"]) from e + cls = registry[protocol] + if getattr(cls, "protocol", None) in ("abstract", None): + cls.protocol = protocol + + return cls + + +s3_msg = """Your installed version of s3fs is very old and known to cause +severe performance issues, see also https://github.com/dask/dask/issues/10276 + +To fix, you should specify a lower version bound on s3fs, or +update the current installation. +""" + + +def _import_class(cls, minv=None): + """Take a string FQP and return the imported class or identifier + + cls is of the form "package.module.klass" or "package.module:subobject.klass" + """ + if ":" in cls: + mod, name = cls.rsplit(":", 1) + s3 = mod == "s3fs" + mod = importlib.import_module(mod) + if s3 and mod.__version__.split(".") < ["0", "5"]: + warnings.warn(s3_msg) + for part in name.split("."): + mod = getattr(mod, part) + return mod + else: + mod, name = cls.rsplit(".", 1) + s3 = mod == "s3fs" + mod = importlib.import_module(mod) + if s3 and mod.__version__.split(".") < ["0", "5"]: + warnings.warn(s3_msg) + return getattr(mod, name) + + +def filesystem(protocol, **storage_options): + """Instantiate filesystems for given protocol and arguments + + ``storage_options`` are specific to the protocol being chosen, and are + passed directly to the class. + """ + if protocol == "arrow_hdfs": + warnings.warn( + "The 'arrow_hdfs' protocol has been deprecated and will be " + "removed in the future. Specify it as 'hdfs'.", + DeprecationWarning, + ) + + cls = get_filesystem_class(protocol) + return cls(**storage_options) + + +def available_protocols(): + """Return a list of the implemented protocols. + + Note that any given protocol may require extra packages to be importable. + """ + return list(known_implementations) diff --git a/llmeval-env/lib/python3.10/site-packages/fsspec/utils.py b/llmeval-env/lib/python3.10/site-packages/fsspec/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ba3f80be36d4aa4f2f56df10549c1dff6b72e43b --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/fsspec/utils.py @@ -0,0 +1,742 @@ +from __future__ import annotations + +import contextlib +import logging +import math +import os +import pathlib +import re +import sys +import tempfile +from functools import partial +from hashlib import md5 +from importlib.metadata import version +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Iterable, + Iterator, + Sequence, + TypeVar, +) +from urllib.parse import urlsplit + +if TYPE_CHECKING: + from typing_extensions import TypeGuard + + from fsspec.spec import AbstractFileSystem + + +DEFAULT_BLOCK_SIZE = 5 * 2**20 + +T = TypeVar("T") + + +def infer_storage_options( + urlpath: str, inherit_storage_options: dict[str, Any] | None = None +) -> dict[str, Any]: + """Infer storage options from URL path and merge it with existing storage + options. + + Parameters + ---------- + urlpath: str or unicode + Either local absolute file path or URL (hdfs://namenode:8020/file.csv) + inherit_storage_options: dict (optional) + Its contents will get merged with the inferred information from the + given path + + Returns + ------- + Storage options dict. + + Examples + -------- + >>> infer_storage_options('/mnt/datasets/test.csv') # doctest: +SKIP + {"protocol": "file", "path", "/mnt/datasets/test.csv"} + >>> infer_storage_options( + ... 'hdfs://username:pwd@node:123/mnt/datasets/test.csv?q=1', + ... inherit_storage_options={'extra': 'value'}, + ... ) # doctest: +SKIP + {"protocol": "hdfs", "username": "username", "password": "pwd", + "host": "node", "port": 123, "path": "/mnt/datasets/test.csv", + "url_query": "q=1", "extra": "value"} + """ + # Handle Windows paths including disk name in this special case + if ( + re.match(r"^[a-zA-Z]:[\\/]", urlpath) + or re.match(r"^[a-zA-Z0-9]+://", urlpath) is None + ): + return {"protocol": "file", "path": urlpath} + + parsed_path = urlsplit(urlpath) + protocol = parsed_path.scheme or "file" + if parsed_path.fragment: + path = "#".join([parsed_path.path, parsed_path.fragment]) + else: + path = parsed_path.path + if protocol == "file": + # Special case parsing file protocol URL on Windows according to: + # https://msdn.microsoft.com/en-us/library/jj710207.aspx + windows_path = re.match(r"^/([a-zA-Z])[:|]([\\/].*)$", path) + if windows_path: + path = "%s:%s" % windows_path.groups() + + if protocol in ["http", "https"]: + # for HTTP, we don't want to parse, as requests will anyway + return {"protocol": protocol, "path": urlpath} + + options: dict[str, Any] = {"protocol": protocol, "path": path} + + if parsed_path.netloc: + # Parse `hostname` from netloc manually because `parsed_path.hostname` + # lowercases the hostname which is not always desirable (e.g. in S3): + # https://github.com/dask/dask/issues/1417 + options["host"] = parsed_path.netloc.rsplit("@", 1)[-1].rsplit(":", 1)[0] + + if protocol in ("s3", "s3a", "gcs", "gs"): + options["path"] = options["host"] + options["path"] + else: + options["host"] = options["host"] + if parsed_path.port: + options["port"] = parsed_path.port + if parsed_path.username: + options["username"] = parsed_path.username + if parsed_path.password: + options["password"] = parsed_path.password + + if parsed_path.query: + options["url_query"] = parsed_path.query + if parsed_path.fragment: + options["url_fragment"] = parsed_path.fragment + + if inherit_storage_options: + update_storage_options(options, inherit_storage_options) + + return options + + +def update_storage_options( + options: dict[str, Any], inherited: dict[str, Any] | None = None +) -> None: + if not inherited: + inherited = {} + collisions = set(options) & set(inherited) + if collisions: + for collision in collisions: + if options.get(collision) != inherited.get(collision): + raise KeyError( + f"Collision between inferred and specified storage " + f"option:\n{collision}" + ) + options.update(inherited) + + +# Compression extensions registered via fsspec.compression.register_compression +compressions: dict[str, str] = {} + + +def infer_compression(filename: str) -> str | None: + """Infer compression, if available, from filename. + + Infer a named compression type, if registered and available, from filename + extension. This includes builtin (gz, bz2, zip) compressions, as well as + optional compressions. See fsspec.compression.register_compression. + """ + extension = os.path.splitext(filename)[-1].strip(".").lower() + if extension in compressions: + return compressions[extension] + return None + + +def build_name_function(max_int: float) -> Callable[[int], str]: + """Returns a function that receives a single integer + and returns it as a string padded by enough zero characters + to align with maximum possible integer + + >>> name_f = build_name_function(57) + + >>> name_f(7) + '07' + >>> name_f(31) + '31' + >>> build_name_function(1000)(42) + '0042' + >>> build_name_function(999)(42) + '042' + >>> build_name_function(0)(0) + '0' + """ + # handle corner cases max_int is 0 or exact power of 10 + max_int += 1e-8 + + pad_length = int(math.ceil(math.log10(max_int))) + + def name_function(i: int) -> str: + return str(i).zfill(pad_length) + + return name_function + + +def seek_delimiter(file: IO[bytes], delimiter: bytes, blocksize: int) -> bool: + r"""Seek current file to file start, file end, or byte after delimiter seq. + + Seeks file to next chunk delimiter, where chunks are defined on file start, + a delimiting sequence, and file end. Use file.tell() to see location afterwards. + Note that file start is a valid split, so must be at offset > 0 to seek for + delimiter. + + Parameters + ---------- + file: a file + delimiter: bytes + a delimiter like ``b'\n'`` or message sentinel, matching file .read() type + blocksize: int + Number of bytes to read from the file at once. + + + Returns + ------- + Returns True if a delimiter was found, False if at file start or end. + + """ + + if file.tell() == 0: + # beginning-of-file, return without seek + return False + + # Interface is for binary IO, with delimiter as bytes, but initialize last + # with result of file.read to preserve compatibility with text IO. + last: bytes | None = None + while True: + current = file.read(blocksize) + if not current: + # end-of-file without delimiter + return False + full = last + current if last else current + try: + if delimiter in full: + i = full.index(delimiter) + file.seek(file.tell() - (len(full) - i) + len(delimiter)) + return True + elif len(current) < blocksize: + # end-of-file without delimiter + return False + except (OSError, ValueError): + pass + last = full[-len(delimiter) :] + + +def read_block( + f: IO[bytes], + offset: int, + length: int | None, + delimiter: bytes | None = None, + split_before: bool = False, +) -> bytes: + """Read a block of bytes from a file + + Parameters + ---------- + f: File + Open file + offset: int + Byte offset to start read + length: int + Number of bytes to read, read through end of file if None + delimiter: bytes (optional) + Ensure reading starts and stops at delimiter bytestring + split_before: bool (optional) + Start/stop read *before* delimiter bytestring. + + + If using the ``delimiter=`` keyword argument we ensure that the read + starts and stops at delimiter boundaries that follow the locations + ``offset`` and ``offset + length``. If ``offset`` is zero then we + start at zero, regardless of delimiter. The bytestring returned WILL + include the terminating delimiter string. + + Examples + -------- + + >>> from io import BytesIO # doctest: +SKIP + >>> f = BytesIO(b'Alice, 100\\nBob, 200\\nCharlie, 300') # doctest: +SKIP + >>> read_block(f, 0, 13) # doctest: +SKIP + b'Alice, 100\\nBo' + + >>> read_block(f, 0, 13, delimiter=b'\\n') # doctest: +SKIP + b'Alice, 100\\nBob, 200\\n' + + >>> read_block(f, 10, 10, delimiter=b'\\n') # doctest: +SKIP + b'Bob, 200\\nCharlie, 300' + """ + if delimiter: + f.seek(offset) + found_start_delim = seek_delimiter(f, delimiter, 2**16) + if length is None: + return f.read() + start = f.tell() + length -= start - offset + + f.seek(start + length) + found_end_delim = seek_delimiter(f, delimiter, 2**16) + end = f.tell() + + # Adjust split location to before delimiter if seek found the + # delimiter sequence, not start or end of file. + if found_start_delim and split_before: + start -= len(delimiter) + + if found_end_delim and split_before: + end -= len(delimiter) + + offset = start + length = end - start + + f.seek(offset) + + # TODO: allow length to be None and read to the end of the file? + assert length is not None + b = f.read(length) + return b + + +def tokenize(*args: Any, **kwargs: Any) -> str: + """Deterministic token + + (modified from dask.base) + + >>> tokenize([1, 2, '3']) + '9d71491b50023b06fc76928e6eddb952' + + >>> tokenize('Hello') == tokenize('Hello') + True + """ + if kwargs: + args += (kwargs,) + try: + h = md5(str(args).encode()) + except ValueError: + # FIPS systems: https://github.com/fsspec/filesystem_spec/issues/380 + h = md5(str(args).encode(), usedforsecurity=False) + return h.hexdigest() + + +def stringify_path(filepath: str | os.PathLike[str] | pathlib.Path) -> str: + """Attempt to convert a path-like object to a string. + + Parameters + ---------- + filepath: object to be converted + + Returns + ------- + filepath_str: maybe a string version of the object + + Notes + ----- + Objects supporting the fspath protocol are coerced according to its + __fspath__ method. + + For backwards compatibility with older Python version, pathlib.Path + objects are specially coerced. + + Any other object is passed through unchanged, which includes bytes, + strings, buffers, or anything else that's not even path-like. + """ + if isinstance(filepath, str): + return filepath + elif hasattr(filepath, "__fspath__"): + return filepath.__fspath__() + elif isinstance(filepath, pathlib.Path): + return str(filepath) + elif hasattr(filepath, "path"): + return filepath.path + else: + return filepath # type: ignore[return-value] + + +def make_instance( + cls: Callable[..., T], args: Sequence[Any], kwargs: dict[str, Any] +) -> T: + inst = cls(*args, **kwargs) + inst._determine_worker() # type: ignore[attr-defined] + return inst + + +def common_prefix(paths: Iterable[str]) -> str: + """For a list of paths, find the shortest prefix common to all""" + parts = [p.split("/") for p in paths] + lmax = min(len(p) for p in parts) + end = 0 + for i in range(lmax): + end = all(p[i] == parts[0][i] for p in parts) + if not end: + break + i += end + return "/".join(parts[0][:i]) + + +def other_paths( + paths: list[str], + path2: str | list[str], + exists: bool = False, + flatten: bool = False, +) -> list[str]: + """In bulk file operations, construct a new file tree from a list of files + + Parameters + ---------- + paths: list of str + The input file tree + path2: str or list of str + Root to construct the new list in. If this is already a list of str, we just + assert it has the right number of elements. + exists: bool (optional) + For a str destination, it is already exists (and is a dir), files should + end up inside. + flatten: bool (optional) + Whether to flatten the input directory tree structure so that the output files + are in the same directory. + + Returns + ------- + list of str + """ + + if isinstance(path2, str): + path2 = path2.rstrip("/") + + if flatten: + path2 = ["/".join((path2, p.split("/")[-1])) for p in paths] + else: + cp = common_prefix(paths) + if exists: + cp = cp.rsplit("/", 1)[0] + if not cp and all(not s.startswith("/") for s in paths): + path2 = ["/".join([path2, p]) for p in paths] + else: + path2 = [p.replace(cp, path2, 1) for p in paths] + else: + assert len(paths) == len(path2) + return path2 + + +def is_exception(obj: Any) -> bool: + return isinstance(obj, BaseException) + + +def isfilelike(f: Any) -> TypeGuard[IO[bytes]]: + for attr in ["read", "close", "tell"]: + if not hasattr(f, attr): + return False + return True + + +def get_protocol(url: str) -> str: + url = stringify_path(url) + parts = re.split(r"(\:\:|\://)", url, 1) + if len(parts) > 1: + return parts[0] + return "file" + + +def can_be_local(path: str) -> bool: + """Can the given URL be used with open_local?""" + from fsspec import get_filesystem_class + + try: + return getattr(get_filesystem_class(get_protocol(path)), "local_file", False) + except (ValueError, ImportError): + # not in registry or import failed + return False + + +def get_package_version_without_import(name: str) -> str | None: + """For given package name, try to find the version without importing it + + Import and package.__version__ is still the backup here, so an import + *might* happen. + + Returns either the version string, or None if the package + or the version was not readily found. + """ + if name in sys.modules: + mod = sys.modules[name] + if hasattr(mod, "__version__"): + return mod.__version__ + try: + return version(name) + except: # noqa: E722 + pass + try: + import importlib + + mod = importlib.import_module(name) + return mod.__version__ + except (ImportError, AttributeError): + return None + + +def setup_logging( + logger: logging.Logger | None = None, + logger_name: str | None = None, + level: str = "DEBUG", + clear: bool = True, +) -> logging.Logger: + if logger is None and logger_name is None: + raise ValueError("Provide either logger object or logger name") + logger = logger or logging.getLogger(logger_name) + handle = logging.StreamHandler() + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(funcName)s -- %(message)s" + ) + handle.setFormatter(formatter) + if clear: + logger.handlers.clear() + logger.addHandler(handle) + logger.setLevel(level) + return logger + + +def _unstrip_protocol(name: str, fs: AbstractFileSystem) -> str: + return fs.unstrip_protocol(name) + + +def mirror_from( + origin_name: str, methods: Iterable[str] +) -> Callable[[type[T]], type[T]]: + """Mirror attributes and methods from the given + origin_name attribute of the instance to the + decorated class""" + + def origin_getter(method: str, self: Any) -> Any: + origin = getattr(self, origin_name) + return getattr(origin, method) + + def wrapper(cls: type[T]) -> type[T]: + for method in methods: + wrapped_method = partial(origin_getter, method) + setattr(cls, method, property(wrapped_method)) + return cls + + return wrapper + + +@contextlib.contextmanager +def nullcontext(obj: T) -> Iterator[T]: + yield obj + + +def merge_offset_ranges( + paths: list[str], + starts: list[int] | int, + ends: list[int] | int, + max_gap: int = 0, + max_block: int | None = None, + sort: bool = True, +) -> tuple[list[str], list[int], list[int]]: + """Merge adjacent byte-offset ranges when the inter-range + gap is <= `max_gap`, and when the merged byte range does not + exceed `max_block` (if specified). By default, this function + will re-order the input paths and byte ranges to ensure sorted + order. If the user can guarantee that the inputs are already + sorted, passing `sort=False` will skip the re-ordering. + """ + # Check input + if not isinstance(paths, list): + raise TypeError + if not isinstance(starts, list): + starts = [starts] * len(paths) + if not isinstance(ends, list): + ends = [ends] * len(paths) + if len(starts) != len(paths) or len(ends) != len(paths): + raise ValueError + + # Early Return + if len(starts) <= 1: + return paths, starts, ends + + starts = [s or 0 for s in starts] + # Sort by paths and then ranges if `sort=True` + if sort: + paths, starts, ends = ( + list(v) + for v in zip( + *sorted( + zip(paths, starts, ends), + ) + ) + ) + + if paths: + # Loop through the coupled `paths`, `starts`, and + # `ends`, and merge adjacent blocks when appropriate + new_paths = paths[:1] + new_starts = starts[:1] + new_ends = ends[:1] + for i in range(1, len(paths)): + if paths[i] == paths[i - 1] and new_ends[-1] is None: + continue + elif ( + paths[i] != paths[i - 1] + or ((starts[i] - new_ends[-1]) > max_gap) + or (max_block is not None and (ends[i] - new_starts[-1]) > max_block) + ): + # Cannot merge with previous block. + # Add new `paths`, `starts`, and `ends` elements + new_paths.append(paths[i]) + new_starts.append(starts[i]) + new_ends.append(ends[i]) + else: + # Merge with previous block by updating the + # last element of `ends` + new_ends[-1] = ends[i] + return new_paths, new_starts, new_ends + + # `paths` is empty. Just return input lists + return paths, starts, ends + + +def file_size(filelike: IO[bytes]) -> int: + """Find length of any open read-mode file-like""" + pos = filelike.tell() + try: + return filelike.seek(0, 2) + finally: + filelike.seek(pos) + + +@contextlib.contextmanager +def atomic_write(path: str, mode: str = "wb"): + """ + A context manager that opens a temporary file next to `path` and, on exit, + replaces `path` with the temporary file, thereby updating `path` + atomically. + """ + fd, fn = tempfile.mkstemp( + dir=os.path.dirname(path), prefix=os.path.basename(path) + "-" + ) + try: + with open(fd, mode) as fp: + yield fp + except BaseException: + with contextlib.suppress(FileNotFoundError): + os.unlink(fn) + raise + else: + os.replace(fn, path) + + +def _translate(pat, STAR, QUESTION_MARK): + # Copied from: https://github.com/python/cpython/pull/106703. + res: list[str] = [] + add = res.append + i, n = 0, len(pat) + while i < n: + c = pat[i] + i = i + 1 + if c == "*": + # compress consecutive `*` into one + if (not res) or res[-1] is not STAR: + add(STAR) + elif c == "?": + add(QUESTION_MARK) + elif c == "[": + j = i + if j < n and pat[j] == "!": + j = j + 1 + if j < n and pat[j] == "]": + j = j + 1 + while j < n and pat[j] != "]": + j = j + 1 + if j >= n: + add("\\[") + else: + stuff = pat[i:j] + if "-" not in stuff: + stuff = stuff.replace("\\", r"\\") + else: + chunks = [] + k = i + 2 if pat[i] == "!" else i + 1 + while True: + k = pat.find("-", k, j) + if k < 0: + break + chunks.append(pat[i:k]) + i = k + 1 + k = k + 3 + chunk = pat[i:j] + if chunk: + chunks.append(chunk) + else: + chunks[-1] += "-" + # Remove empty ranges -- invalid in RE. + for k in range(len(chunks) - 1, 0, -1): + if chunks[k - 1][-1] > chunks[k][0]: + chunks[k - 1] = chunks[k - 1][:-1] + chunks[k][1:] + del chunks[k] + # Escape backslashes and hyphens for set difference (--). + # Hyphens that create ranges shouldn't be escaped. + stuff = "-".join( + s.replace("\\", r"\\").replace("-", r"\-") for s in chunks + ) + # Escape set operations (&&, ~~ and ||). + stuff = re.sub(r"([&~|])", r"\\\1", stuff) + i = j + 1 + if not stuff: + # Empty range: never match. + add("(?!)") + elif stuff == "!": + # Negated empty range: match any character. + add(".") + else: + if stuff[0] == "!": + stuff = "^" + stuff[1:] + elif stuff[0] in ("^", "["): + stuff = "\\" + stuff + add(f"[{stuff}]") + else: + add(re.escape(c)) + assert i == n + return res + + +def glob_translate(pat): + # Copied from: https://github.com/python/cpython/pull/106703. + # The keyword parameters' values are fixed to: + # recursive=True, include_hidden=True, seps=None + """Translate a pathname with shell wildcards to a regular expression.""" + if os.path.altsep: + seps = os.path.sep + os.path.altsep + else: + seps = os.path.sep + escaped_seps = "".join(map(re.escape, seps)) + any_sep = f"[{escaped_seps}]" if len(seps) > 1 else escaped_seps + not_sep = f"[^{escaped_seps}]" + one_last_segment = f"{not_sep}+" + one_segment = f"{one_last_segment}{any_sep}" + any_segments = f"(?:.+{any_sep})?" + any_last_segments = ".*" + results = [] + parts = re.split(any_sep, pat) + last_part_idx = len(parts) - 1 + for idx, part in enumerate(parts): + if part == "*": + results.append(one_segment if idx < last_part_idx else one_last_segment) + continue + if part == "**": + results.append(any_segments if idx < last_part_idx else any_last_segments) + continue + elif "**" in part: + raise ValueError( + "Invalid pattern: '**' can only be an entire path component" + ) + if part: + results.extend(_translate(part, f"{not_sep}*", not_sep)) + if idx < last_part_idx: + results.append(any_sep) + res = "".join(results) + return rf"(?s:{res})\Z" diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/INSTALLER b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/LICENSE b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/METADATA b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..afffde5804de48c4ffcc5d4e96022cae69f75c91 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/METADATA @@ -0,0 +1,300 @@ +Metadata-Version: 2.1 +Name: huggingface-hub +Version: 0.23.0 +Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub +Home-page: https://github.com/huggingface/huggingface_hub +Author: Hugging Face, Inc. +Author-email: julien@huggingface.co +License: Apache +Keywords: model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.8.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock +Requires-Dist: fsspec >=2023.5.0 +Requires-Dist: packaging >=20.9 +Requires-Dist: pyyaml >=5.1 +Requires-Dist: requests +Requires-Dist: tqdm >=4.42.1 +Requires-Dist: typing-extensions >=3.7.4.3 +Provides-Extra: all +Requires-Dist: InquirerPy ==0.3.4 ; extra == 'all' +Requires-Dist: aiohttp ; extra == 'all' +Requires-Dist: minijinja >=1.0 ; extra == 'all' +Requires-Dist: jedi ; extra == 'all' +Requires-Dist: Jinja2 ; extra == 'all' +Requires-Dist: pytest ; extra == 'all' +Requires-Dist: pytest-cov ; extra == 'all' +Requires-Dist: pytest-env ; extra == 'all' +Requires-Dist: pytest-xdist ; extra == 'all' +Requires-Dist: pytest-vcr ; extra == 'all' +Requires-Dist: pytest-asyncio ; extra == 'all' +Requires-Dist: pytest-rerunfailures ; extra == 'all' +Requires-Dist: urllib3 <2.0 ; extra == 'all' +Requires-Dist: soundfile ; extra == 'all' +Requires-Dist: Pillow ; extra == 'all' +Requires-Dist: gradio ; extra == 'all' +Requires-Dist: numpy ; extra == 'all' +Requires-Dist: fastapi ; extra == 'all' +Requires-Dist: ruff >=0.3.0 ; extra == 'all' +Requires-Dist: mypy ==1.5.1 ; extra == 'all' +Requires-Dist: typing-extensions >=4.8.0 ; extra == 'all' +Requires-Dist: types-PyYAML ; extra == 'all' +Requires-Dist: types-requests ; extra == 'all' +Requires-Dist: types-simplejson ; extra == 'all' +Requires-Dist: types-toml ; extra == 'all' +Requires-Dist: types-tqdm ; extra == 'all' +Requires-Dist: types-urllib3 ; extra == 'all' +Provides-Extra: cli +Requires-Dist: InquirerPy ==0.3.4 ; extra == 'cli' +Provides-Extra: dev +Requires-Dist: InquirerPy ==0.3.4 ; extra == 'dev' +Requires-Dist: aiohttp ; extra == 'dev' +Requires-Dist: minijinja >=1.0 ; extra == 'dev' +Requires-Dist: jedi ; extra == 'dev' +Requires-Dist: Jinja2 ; extra == 'dev' +Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: pytest-cov ; extra == 'dev' +Requires-Dist: pytest-env ; extra == 'dev' +Requires-Dist: pytest-xdist ; extra == 'dev' +Requires-Dist: pytest-vcr ; extra == 'dev' +Requires-Dist: pytest-asyncio ; extra == 'dev' +Requires-Dist: pytest-rerunfailures ; extra == 'dev' +Requires-Dist: urllib3 <2.0 ; extra == 'dev' +Requires-Dist: soundfile ; extra == 'dev' +Requires-Dist: Pillow ; extra == 'dev' +Requires-Dist: gradio ; extra == 'dev' +Requires-Dist: numpy ; extra == 'dev' +Requires-Dist: fastapi ; extra == 'dev' +Requires-Dist: ruff >=0.3.0 ; extra == 'dev' +Requires-Dist: mypy ==1.5.1 ; extra == 'dev' +Requires-Dist: typing-extensions >=4.8.0 ; extra == 'dev' +Requires-Dist: types-PyYAML ; extra == 'dev' +Requires-Dist: types-requests ; extra == 'dev' +Requires-Dist: types-simplejson ; extra == 'dev' +Requires-Dist: types-toml ; extra == 'dev' +Requires-Dist: types-tqdm ; extra == 'dev' +Requires-Dist: types-urllib3 ; extra == 'dev' +Provides-Extra: fastai +Requires-Dist: toml ; extra == 'fastai' +Requires-Dist: fastai >=2.4 ; extra == 'fastai' +Requires-Dist: fastcore >=1.3.27 ; extra == 'fastai' +Provides-Extra: hf_transfer +Requires-Dist: hf-transfer >=0.1.4 ; extra == 'hf_transfer' +Provides-Extra: inference +Requires-Dist: aiohttp ; extra == 'inference' +Requires-Dist: minijinja >=1.0 ; extra == 'inference' +Provides-Extra: quality +Requires-Dist: ruff >=0.3.0 ; extra == 'quality' +Requires-Dist: mypy ==1.5.1 ; extra == 'quality' +Provides-Extra: tensorflow +Requires-Dist: tensorflow ; extra == 'tensorflow' +Requires-Dist: pydot ; extra == 'tensorflow' +Requires-Dist: graphviz ; extra == 'tensorflow' +Provides-Extra: tensorflow-testing +Requires-Dist: tensorflow ; extra == 'tensorflow-testing' +Requires-Dist: keras <3.0 ; extra == 'tensorflow-testing' +Provides-Extra: testing +Requires-Dist: InquirerPy ==0.3.4 ; extra == 'testing' +Requires-Dist: aiohttp ; extra == 'testing' +Requires-Dist: minijinja >=1.0 ; extra == 'testing' +Requires-Dist: jedi ; extra == 'testing' +Requires-Dist: Jinja2 ; extra == 'testing' +Requires-Dist: pytest ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-env ; extra == 'testing' +Requires-Dist: pytest-xdist ; extra == 'testing' +Requires-Dist: pytest-vcr ; extra == 'testing' +Requires-Dist: pytest-asyncio ; extra == 'testing' +Requires-Dist: pytest-rerunfailures ; extra == 'testing' +Requires-Dist: urllib3 <2.0 ; extra == 'testing' +Requires-Dist: soundfile ; extra == 'testing' +Requires-Dist: Pillow ; extra == 'testing' +Requires-Dist: gradio ; extra == 'testing' +Requires-Dist: numpy ; extra == 'testing' +Requires-Dist: fastapi ; extra == 'testing' +Provides-Extra: torch +Requires-Dist: torch ; extra == 'torch' +Requires-Dist: safetensors ; extra == 'torch' +Provides-Extra: typing +Requires-Dist: typing-extensions >=4.8.0 ; extra == 'typing' +Requires-Dist: types-PyYAML ; extra == 'typing' +Requires-Dist: types-requests ; extra == 'typing' +Requires-Dist: types-simplejson ; extra == 'typing' +Requires-Dist: types-toml ; extra == 'typing' +Requires-Dist: types-tqdm ; extra == 'typing' +Requires-Dist: types-urllib3 ; extra == 'typing' + +

+
+ huggingface_hub library logo +
+

+ +

+ The official Python client for the Huggingface Hub. +

+ +

+ Documentation + GitHub release + PyPi version + downloads + Code coverage +

+ +

+

+ English | + Deutsch | + हिंदी | + 한국어 | + 中文(简体) +

+

+ +--- + +**Documentation**: https://hf.co/docs/huggingface_hub + +**Source Code**: https://github.com/huggingface/huggingface_hub + +--- + +## Welcome to the huggingface_hub library + +The `huggingface_hub` library allows you to interact with the [Hugging Face Hub](https://huggingface.co/), a platform democratizing open-source Machine Learning for creators and collaborators. Discover pre-trained models and datasets for your projects or play with the thousands of machine learning apps hosted on the Hub. You can also create and share your own models, datasets and demos with the community. The `huggingface_hub` library provides a simple way to do all these things with Python. + +## Key features + +- [Download files](https://huggingface.co/docs/huggingface_hub/en/guides/download) from the Hub. +- [Upload files](https://huggingface.co/docs/huggingface_hub/en/guides/upload) to the Hub. +- [Manage your repositories](https://huggingface.co/docs/huggingface_hub/en/guides/repository). +- [Run Inference](https://huggingface.co/docs/huggingface_hub/en/guides/inference) on deployed models. +- [Search](https://huggingface.co/docs/huggingface_hub/en/guides/search) for models, datasets and Spaces. +- [Share Model Cards](https://huggingface.co/docs/huggingface_hub/en/guides/model-cards) to document your models. +- [Engage with the community](https://huggingface.co/docs/huggingface_hub/en/guides/community) through PRs and comments. + +## Installation + +Install the `huggingface_hub` package with [pip](https://pypi.org/project/huggingface-hub/): + +```bash +pip install huggingface_hub +``` + +If you prefer, you can also install it with [conda](https://huggingface.co/docs/huggingface_hub/en/installation#install-with-conda). + +In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want have a complete experience for Inference, run: + +```bash +pip install huggingface_hub[inference] +``` + +To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation). + +## Quick start + +### Download files + +Download a single file + +```py +from huggingface_hub import hf_hub_download + +hf_hub_download(repo_id="tiiuae/falcon-7b-instruct", filename="config.json") +``` + +Or an entire repository + +```py +from huggingface_hub import snapshot_download + +snapshot_download("stabilityai/stable-diffusion-2-1") +``` + +Files will be downloaded in a local cache folder. More details in [this guide](https://huggingface.co/docs/huggingface_hub/en/guides/manage-cache). + +### Login + +The Hugging Face Hub uses tokens to authenticate applications (see [docs](https://huggingface.co/docs/hub/security-tokens)). To login your machine, run the following CLI: + +```bash +huggingface-cli login +# or using an environment variable +huggingface-cli login --token $HUGGINGFACE_TOKEN +``` + +### Create a repository + +```py +from huggingface_hub import create_repo + +create_repo(repo_id="super-cool-model") +``` + +### Upload files + +Upload a single file + +```py +from huggingface_hub import upload_file + +upload_file( + path_or_fileobj="/home/lysandre/dummy-test/README.md", + path_in_repo="README.md", + repo_id="lysandre/test-model", +) +``` + +Or an entire folder + +```py +from huggingface_hub import upload_folder + +upload_folder( + folder_path="/path/to/local/space", + repo_id="username/my-cool-space", + repo_type="space", +) +``` + +For details in the [upload guide](https://huggingface.co/docs/huggingface_hub/en/guides/upload). + +## Integrating to the Hub. + +We're partnering with cool open source ML libraries to provide free model hosting and versioning. You can find the existing integrations [here](https://huggingface.co/docs/hub/libraries). + +The advantages are: + +- Free model or dataset hosting for libraries and their users. +- Built-in file versioning, even with very large files, thanks to a git-based approach. +- Serverless inference API for all models publicly available. +- In-browser widgets to play with the uploaded models. +- Anyone can upload a new model for your library, they just need to add the corresponding tag for the model to be discoverable. +- Fast downloads! We use Cloudfront (a CDN) to geo-replicate downloads so they're blazing fast from anywhere on the globe. +- Usage stats and more features to come. + +If you would like to integrate your library, feel free to open an issue to begin the discussion. We wrote a [step-by-step guide](https://huggingface.co/docs/hub/adding-a-library) with ❤️ showing how to do this integration. + +## Contributions (feature requests, bugs, etc.) are super welcome 💙💚💛💜🧡❤️ + +Everyone is welcome to contribute, and we value everybody's contribution. Code is not the only way to help the community. +Answering questions, helping others, reaching out and improving the documentations are immensely valuable to the community. +We wrote a [contribution guide](https://github.com/huggingface/huggingface_hub/blob/main/CONTRIBUTING.md) to summarize +how to get started to contribute to this repository. + + diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/RECORD b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..66a436f877659b3288279ee5a2e0ada9c70947e7 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/RECORD @@ -0,0 +1,224 @@ +../../../bin/huggingface-cli,sha256=JlIXlvToHPKBYEDy0_2MR77fOIewyVoAtdLiaIwsonE,273 +huggingface_hub-0.23.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +huggingface_hub-0.23.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +huggingface_hub-0.23.0.dist-info/METADATA,sha256=_OWllpyp_iC9C7uiCKcZfHiPBLna6BYgSY3FCx7zr-A,12994 +huggingface_hub-0.23.0.dist-info/RECORD,, +huggingface_hub-0.23.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +huggingface_hub-0.23.0.dist-info/entry_points.txt,sha256=Y3Z2L02rBG7va_iE6RPXolIgwOdwUFONyRN3kXMxZ0g,131 +huggingface_hub-0.23.0.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16 +huggingface_hub/__init__.py,sha256=xXb4llMkyN0SvoFdcDDPqiHEiI3C-sZsSHQYLo3NK04,32692 +huggingface_hub/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/__pycache__/_commit_api.cpython-310.pyc,, +huggingface_hub/__pycache__/_commit_scheduler.cpython-310.pyc,, +huggingface_hub/__pycache__/_inference_endpoints.cpython-310.pyc,, +huggingface_hub/__pycache__/_local_folder.cpython-310.pyc,, +huggingface_hub/__pycache__/_login.cpython-310.pyc,, +huggingface_hub/__pycache__/_multi_commits.cpython-310.pyc,, +huggingface_hub/__pycache__/_snapshot_download.cpython-310.pyc,, +huggingface_hub/__pycache__/_space_api.cpython-310.pyc,, +huggingface_hub/__pycache__/_tensorboard_logger.cpython-310.pyc,, +huggingface_hub/__pycache__/_webhooks_payload.cpython-310.pyc,, +huggingface_hub/__pycache__/_webhooks_server.cpython-310.pyc,, +huggingface_hub/__pycache__/community.cpython-310.pyc,, +huggingface_hub/__pycache__/constants.cpython-310.pyc,, +huggingface_hub/__pycache__/errors.cpython-310.pyc,, +huggingface_hub/__pycache__/fastai_utils.cpython-310.pyc,, +huggingface_hub/__pycache__/file_download.cpython-310.pyc,, +huggingface_hub/__pycache__/hf_api.cpython-310.pyc,, +huggingface_hub/__pycache__/hf_file_system.cpython-310.pyc,, +huggingface_hub/__pycache__/hub_mixin.cpython-310.pyc,, +huggingface_hub/__pycache__/inference_api.cpython-310.pyc,, +huggingface_hub/__pycache__/keras_mixin.cpython-310.pyc,, +huggingface_hub/__pycache__/lfs.cpython-310.pyc,, +huggingface_hub/__pycache__/repocard.cpython-310.pyc,, +huggingface_hub/__pycache__/repocard_data.cpython-310.pyc,, +huggingface_hub/__pycache__/repository.cpython-310.pyc,, +huggingface_hub/_commit_api.py,sha256=Z1sQnJx1xWfspsX6vS8eGTmr-9QujIoItjbnJVVyyCQ,29299 +huggingface_hub/_commit_scheduler.py,sha256=nlJS_vnLb8i92NLrRwJX8Mg9QZ7f3kfLbLlQuEd5YjU,13647 +huggingface_hub/_inference_endpoints.py,sha256=rBx6xgnSJq0JtntF1_zphj7NsCmduICqgZfmvscdE_w,15667 +huggingface_hub/_local_folder.py,sha256=ajjI3vRgV9kGrx2ZPeTnDm8lfGN1eyMshn5gxM_7Q38,8441 +huggingface_hub/_login.py,sha256=E-3hbns3Jo0mjnyPWQVz9c0xPEXuQ-KQhZCQ9R1BE7o,15478 +huggingface_hub/_multi_commits.py,sha256=mFmCP_5hNsruEgDF6kOVyaFkpnbSdNxPWfGUlFbl5O8,12535 +huggingface_hub/_snapshot_download.py,sha256=pN7CEl8X_JJRdrFDeBk0nYecVM7rvULJty9vuxrHnMU,14039 +huggingface_hub/_space_api.py,sha256=Mae_lqTRyTWyszI5mlObJ2fn9slPxkFPcFTEVADoNQM,5255 +huggingface_hub/_tensorboard_logger.py,sha256=x_56MOZiU2-9QQ1XHOWem39ySRLe29hkalxy2nRaRL4,7470 +huggingface_hub/_webhooks_payload.py,sha256=Xm3KaK7tCOGBlXkuZvbym6zjHXrT1XCrbUFWuXiBmNY,3617 +huggingface_hub/_webhooks_server.py,sha256=9RQ4AS5JVssJhM66FzlyOSQhKwrKG-dV_x6SA8GeOQw,15497 +huggingface_hub/commands/__init__.py,sha256=AkbM2a-iGh0Vq_xAWhK3mu3uZ44km8-X5uWjKcvcrUQ,928 +huggingface_hub/commands/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/_cli_utils.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/delete_cache.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/download.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/env.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/huggingface_cli.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/lfs.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/scan_cache.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/tag.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/upload.cpython-310.pyc,, +huggingface_hub/commands/__pycache__/user.cpython-310.pyc,, +huggingface_hub/commands/_cli_utils.py,sha256=qRdl9opi3yJxIVNCnrmte-jFWmYbjVqd8gBlin8NNzY,1971 +huggingface_hub/commands/delete_cache.py,sha256=Rb1BtIltJPnQ-th7tcK_L4mFqfk785t3KXV77xXKBP4,16131 +huggingface_hub/commands/download.py,sha256=s0dSqUTWG26Q5F2rEFAr_jY2xW4yOvDbSM20vYCjD3I,7880 +huggingface_hub/commands/env.py,sha256=yYl4DSS14V8t244nAi0t77Izx5LIdgS_dy6xiV5VQME,1226 +huggingface_hub/commands/huggingface_cli.py,sha256=-MkVPxIKIhP1aTFcExz7krEEDdaVpG9cV7P70ZBJh-U,2030 +huggingface_hub/commands/lfs.py,sha256=6E769AoRxUDiIOapn1_QvTbNtdUnUiouu2F4Gopp4do,7318 +huggingface_hub/commands/scan_cache.py,sha256=4o_jQsZloicRa-P8gncUBncVyWswpSF9T6KGlNrGodk,5183 +huggingface_hub/commands/tag.py,sha256=gCoR8G95lhHBzyVytTxT7MnqTmjKYtStDnHXcysOJwg,6287 +huggingface_hub/commands/upload.py,sha256=Mr69qO60otqCVw0sVSBPykUTkL9HO-pkCyulSD2mROM,13622 +huggingface_hub/commands/user.py,sha256=QApZJOCQEHADhjunM3hlQ72uqHsearCiCE4SdpzGdcc,6893 +huggingface_hub/community.py,sha256=SBaOfI-3atCzRbO0gDS8BYxctbdvD4G0X6D0GfY8Fgc,12203 +huggingface_hub/constants.py,sha256=_xLHaNnAcA9KnENaABbsee3UctmaViE8AQ6njk17ni4,7591 +huggingface_hub/errors.py,sha256=IM0lNbExLzaYEs0HrrPvY4-kyj6DiP2Szu7Jy9slHOE,2083 +huggingface_hub/fastai_utils.py,sha256=5I7zAfgHJU_mZnxnf9wgWTHrCRu_EAV8VTangDVfE_o,16676 +huggingface_hub/file_download.py,sha256=n5ovYqh1-xe3ptRHuS-EXn6X_-3ZVI7C-pQrHD45DtA,82236 +huggingface_hub/hf_api.py,sha256=hyMkURhYXalCNG4Qqx3PhN7Ucru8m18ZidEok_T2504,375216 +huggingface_hub/hf_file_system.py,sha256=r7NGKIIF0o2GaFn_qZzvoCGi6Vyhc3BH8wcFGFztyCw,37425 +huggingface_hub/hub_mixin.py,sha256=ktwuDqSXFU2q2_xj676R-zag_tB3QEiMMVFueJ3YD9g,34644 +huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +huggingface_hub/inference/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/inference/__pycache__/_client.cpython-310.pyc,, +huggingface_hub/inference/__pycache__/_common.cpython-310.pyc,, +huggingface_hub/inference/__pycache__/_templating.cpython-310.pyc,, +huggingface_hub/inference/__pycache__/_types.cpython-310.pyc,, +huggingface_hub/inference/_client.py,sha256=NveAWL3hx8dwse0t_0U3dlRJoEtZ1G12TxZxvWimMF0,117568 +huggingface_hub/inference/_common.py,sha256=L4b0A_raoWAUfl7d2vn6-rLfUcHcG5kjn_wUYIkx4uY,16362 +huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +huggingface_hub/inference/_generated/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-310.pyc,, +huggingface_hub/inference/_generated/_async_client.py,sha256=Yva-stGgFAFH0vFF7o9JE3GbX14bGNz0AhQStfZDB8U,120700 +huggingface_hub/inference/_generated/types/__init__.py,sha256=Ro2qZb2STQz8V3bfElXY4DvmkxKuBaPjzY5BgH-1khI,5110 +huggingface_hub/inference/_generated/types/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/audio_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/automatic_speech_recognition.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/base.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/depth_estimation.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/document_question_answering.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/image_to_text.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/question_answering.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/summarization.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/text2text_generation.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/text_generation.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/text_to_audio.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/text_to_image.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/translation.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/video_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/visual_question_answering.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/zero_shot_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-310.pyc,, +huggingface_hub/inference/_generated/types/audio_classification.py,sha256=wk4kUTLQZoXWLpiUOpKRHRRE-JYqqJlzGVe62VACR-0,1347 +huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=n7GeCepzt254yoSLsdjrI1j4fzYgjWzxoaKE5gZJc48,881 +huggingface_hub/inference/_generated/types/automatic_speech_recognition.py,sha256=-7UHu5QTGwSrJFnrbMgzeUFpJQOGyTmfK_QHgtnx6j8,5352 +huggingface_hub/inference/_generated/types/base.py,sha256=Cq4gUVtwwLmWyiIIq4NSL8kRk0EWk9QWWHc5Vup2LVg,6213 +huggingface_hub/inference/_generated/types/chat_completion.py,sha256=cPe_VAs-bfsUELY0fZtMnId6fdVoJnAcRmJItD5Otck,8185 +huggingface_hub/inference/_generated/types/depth_estimation.py,sha256=lmLmd8S313ZMCG94RblwquL0UN_0hJmXAhWUqSIrtwc,898 +huggingface_hub/inference/_generated/types/document_question_answering.py,sha256=_hBzK4Pu9X_zXsgOO4JNSloIKuVfE5m7eGwEw5YTfZ4,3264 +huggingface_hub/inference/_generated/types/feature_extraction.py,sha256=KerTrRR5YR02X0qBDzrtK8953amCGf_adSUbfWOozD4,664 +huggingface_hub/inference/_generated/types/fill_mask.py,sha256=JcYIbTDXc4f7k2FNY3fCWtJ9ke3HUZFz2pDOOrDuxOs,1714 +huggingface_hub/inference/_generated/types/image_classification.py,sha256=W1QVfc0j7t6qbxjICUQDwygRx43yPPGZKyStogHkHqg,1359 +huggingface_hub/inference/_generated/types/image_segmentation.py,sha256=nVQc5Qhv37qqmTn_M8xegpNgk14ozKelsGIYC8hba_0,1803 +huggingface_hub/inference/_generated/types/image_to_image.py,sha256=MbubS9pD8bFP9LoI4QoQxJwpUGeNXi5iSEk8Ymhbh0M,1797 +huggingface_hub/inference/_generated/types/image_to_text.py,sha256=mloLf-LO7oR_2HbPY1-XMM18BfjMODytRaxTXYkIXoY,4827 +huggingface_hub/inference/_generated/types/object_detection.py,sha256=F8ly6GSE8dupsekPVf6G5nI8teZAIP4iXw6u3zm1JiE,1569 +huggingface_hub/inference/_generated/types/question_answering.py,sha256=xLDy5oA-k9JPncSU6NqPAPb8rWPerfTbU857G3e7JZ0,2884 +huggingface_hub/inference/_generated/types/sentence_similarity.py,sha256=edH-TWfnZ4J0zJD-zqfcRMLwOV0dTt1g5Y0caYnVuPc,1018 +huggingface_hub/inference/_generated/types/summarization.py,sha256=RWCXh7yftI_JWvLsr7JiDpQPexq1klYP158tUICUcbM,1574 +huggingface_hub/inference/_generated/types/table_question_answering.py,sha256=PuVZlR6dI6FEUK7pjMSVMtzkDgrcxdKjfcnDbVmPdSs,1569 +huggingface_hub/inference/_generated/types/text2text_generation.py,sha256=SZYfdhyraG5vZ2Jzm1C8k9w9IYLxMtm5UUu1tU2oOQk,1604 +huggingface_hub/inference/_generated/types/text_classification.py,sha256=vC7B1sBzZ4gdLjE2i2Y7w5cpdaFwQKK1dlWqW0asjIk,1347 +huggingface_hub/inference/_generated/types/text_generation.py,sha256=wR2DrDazFmeqIchkHXPUv17d4zWUmiUSPBdUFCDqJNY,4284 +huggingface_hub/inference/_generated/types/text_to_audio.py,sha256=cgvECsiwsycgP9Tfs_GU1CJfo9AngVn6x9s4fHCP-g4,4819 +huggingface_hub/inference/_generated/types/text_to_image.py,sha256=oBGeJ-S9WfsMxVQlvEOll9yaCyMXZ277wsYFD8bt87U,1931 +huggingface_hub/inference/_generated/types/token_classification.py,sha256=7oL8AZOTWtf2bYD2T3236GDNMtUl7FtydaB6We7wbfw,1890 +huggingface_hub/inference/_generated/types/translation.py,sha256=MruCx6yhzQGlxSdBRXCVoEhRzRSa5Ks4bjZ1PDrlTeQ,1562 +huggingface_hub/inference/_generated/types/video_classification.py,sha256=BI2_PP-pxLT6w9TuX6QCZz4BsG-ZukTXnW6fWMchI5M,1579 +huggingface_hub/inference/_generated/types/visual_question_answering.py,sha256=0PHNnjwxxHvG3SjOz7O7DckbBeGYDsRmlagG11qIkkM,1667 +huggingface_hub/inference/_generated/types/zero_shot_classification.py,sha256=u6jfFCqDv9XqeAN5E9_Xf7jqMZgqTRFF_S9PtWbiBUk,1963 +huggingface_hub/inference/_generated/types/zero_shot_image_classification.py,sha256=qVH6Ms0FjF8TraGy4BYiS8lmvGq9xiIDdXqGFynLHMA,1689 +huggingface_hub/inference/_generated/types/zero_shot_object_detection.py,sha256=PU4OOlQ2aAOosW2JlG2Z27MEQpmE6BxcygH_ns3w1KQ,1662 +huggingface_hub/inference/_templating.py,sha256=LCy-U_25R-l5dhcEHsyRwiOrgvKQHXkdSmynWCfsPjI,3991 +huggingface_hub/inference/_types.py,sha256=C73l5-RO8P1UMBHF8OAO9CRUq7Xdv33pcADoJsGMPSU,1782 +huggingface_hub/inference_api.py,sha256=UXOKu_Ez2I3hDsjguqCcCrj03WFDndehpngYiIAucdg,8331 +huggingface_hub/keras_mixin.py,sha256=2DF-hNGdxJCxqvcw46id-ExH_865ZAXsJd2vmpAuWHQ,19484 +huggingface_hub/lfs.py,sha256=GNmKV_SURcGxMa3p_OyF8ttoq7fZhHjgpyxYzP4VTqU,19690 +huggingface_hub/repocard.py,sha256=oUrGim27nCHkevPDZDbUp68uKTxB8xbdoyeqv24pexc,34605 +huggingface_hub/repocard_data.py,sha256=1hIkI8xp0EmW2aR3LtHMrjIMk_W-KJxHslMjpNMwVPg,31911 +huggingface_hub/repository.py,sha256=87QxXPTK9PCztFW69oD4RZsNMLL9yxoQDdn-F81wSdM,54548 +huggingface_hub/serialization/__init__.py,sha256=W74TaCtYnMfpvGEQr1SS-OBmqPUFnM9AeWT9hTJCG9Y,910 +huggingface_hub/serialization/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/serialization/__pycache__/_base.cpython-310.pyc,, +huggingface_hub/serialization/__pycache__/_numpy.cpython-310.pyc,, +huggingface_hub/serialization/__pycache__/_tensorflow.cpython-310.pyc,, +huggingface_hub/serialization/__pycache__/_torch.cpython-310.pyc,, +huggingface_hub/serialization/_base.py,sha256=AgO-16i-vyosbERnLSCFYgaXbVqQDM7xfIne8gsWrLQ,7133 +huggingface_hub/serialization/_numpy.py,sha256=idULJp1js6L6E8o-MiGVqNa4lBfXS2cfAmqivnpsaYs,2671 +huggingface_hub/serialization/_tensorflow.py,sha256=Rf4kw1NYxEaoUXB8aLtQLHrTjgobaEAJdzO0w0kbP58,3559 +huggingface_hub/serialization/_torch.py,sha256=xYR6e_G9laMTroWLiQRABSuloTQuuRSQNyYHdT_rmXU,7687 +huggingface_hub/templates/datasetcard_template.md,sha256=W-EMqR6wndbrnZorkVv56URWPG49l7MATGeI015kTvs,5503 +huggingface_hub/templates/modelcard_template.md,sha256=4AqArS3cqdtbit5Bo-DhjcnDFR-pza5hErLLTPM4Yuc,6870 +huggingface_hub/utils/__init__.py,sha256=44yhxTtWsuMGrZcALK-3UuVazGBtc94z9nZwLmLnu8w,3589 +huggingface_hub/utils/__pycache__/__init__.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_cache_assets.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_cache_manager.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_chunk_utils.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_datetime.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_deprecation.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_errors.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_experimental.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_fixes.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_git_credential.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_headers.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_hf_folder.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_http.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_pagination.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_paths.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_runtime.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_safetensors.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_subprocess.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_telemetry.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_token.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_typing.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/_validators.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/endpoint_helpers.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/logging.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/sha.cpython-310.pyc,, +huggingface_hub/utils/__pycache__/tqdm.cpython-310.pyc,, +huggingface_hub/utils/_cache_assets.py,sha256=kai77HPQMfYpROouMBQCr_gdBCaeTm996Sqj0dExbNg,5728 +huggingface_hub/utils/_cache_manager.py,sha256=Fs1XVP1UGzUTogMfMfEi_MfpURzHyW__djX0s2oLmrY,29307 +huggingface_hub/utils/_chunk_utils.py,sha256=kRCaj5228_vKcyLWspd8Xq01f17Jz6ds5Sr9ed5d_RU,2130 +huggingface_hub/utils/_datetime.py,sha256=DHnktKm1taeOe2XCBgNU4pVck5d70qu8FJ7nACD6C3k,2554 +huggingface_hub/utils/_deprecation.py,sha256=HZhRGGUX_QMKBBBwHHlffLtmCSK01TOpeXHefZbPfwI,4872 +huggingface_hub/utils/_errors.py,sha256=N5nUkCCaj8393wntazeTcKNrwDZfsDVHVMxxreHPfaE,15141 +huggingface_hub/utils/_experimental.py,sha256=crCPH6k6-11wwH2GZuZzZzZbjUotay49ywV1SSJhMHM,2395 +huggingface_hub/utils/_fixes.py,sha256=wJ0FGewO6_zZFo65crJWcth9zODZz4TdyeDxkGNSeB0,2898 +huggingface_hub/utils/_git_credential.py,sha256=SDdsiREr1TcAR2Ze2TB0E5cYzVJgvDZrs60od9lAsMc,4596 +huggingface_hub/utils/_headers.py,sha256=05sDPAi7-Fs3Z4YLbrTJTAbIT7yjSX9DEqotd6gHqhQ,9593 +huggingface_hub/utils/_hf_folder.py,sha256=gWH-TT9h_6X_CyrtLTtKNEawf9kKlCHraFiOu09BuLk,3613 +huggingface_hub/utils/_http.py,sha256=-Vuphx-pX9dvVBUf-AS2dECjO0HJBscXzith_FKOgO4,13458 +huggingface_hub/utils/_pagination.py,sha256=hzLFLd8i_DKkPRVYzOx2CxLt5lcocEiAxDJriQUjAjY,1841 +huggingface_hub/utils/_paths.py,sha256=bs6PlgsVdAINC9bAKivVOcOod1lIun0YgJbQ3VpmpPE,4646 +huggingface_hub/utils/_runtime.py,sha256=QooW0cgJ349PX8x46KBluN01KMMvUm0ZQ9SsmidBH74,11041 +huggingface_hub/utils/_safetensors.py,sha256=GW3nyv7xQcuwObKYeYoT9VhURVzG1DZTbKBKho8Bbos,4458 +huggingface_hub/utils/_subprocess.py,sha256=34ETD8JvLzm16NRZHciaCLXdE9aRyxuDdOA5gdNvMJ8,4617 +huggingface_hub/utils/_telemetry.py,sha256=jHAdgWNcL9nVvMT3ec3i78O-cwL09GnlifuokzpQjMI,4641 +huggingface_hub/utils/_token.py,sha256=cxBZaafW2IsJ2dKWd55v7056zycW1ewp_nPk8dNcSO4,5476 +huggingface_hub/utils/_typing.py,sha256=pXh7GtVtSBD_Fvvthex9BRTAJZ6bWScUOw06oJS0Lek,2025 +huggingface_hub/utils/_validators.py,sha256=dDsVG31iooTYrIyi5Vwr1DukL0fEmJwu3ceVNduhsuE,9204 +huggingface_hub/utils/endpoint_helpers.py,sha256=n_VguR_L2Vl6Mi_4PFO2iAd5xaPeQRiD8KRBpzs4nMw,9536 +huggingface_hub/utils/insecure_hashlib.py,sha256=OjxlvtSQHpbLp9PWSrXBDJ0wHjxCBU-SQJgucEEXDbU,1058 +huggingface_hub/utils/logging.py,sha256=Cp03s0uEl3kDM9XHQW9a8GAoExODQ-e7kEtgMt-_To8,4728 +huggingface_hub/utils/sha.py,sha256=QLlIwPCyz46MmUc_4L8xl87KfYoBks9kPgsMZ5JCz-o,902 +huggingface_hub/utils/tqdm.py,sha256=x35PqUA8bBBztPrqhv87Y_TGl5CdlfBs4pe6k1YyDJ8,9390 diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/WHEEL b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..bab98d675883cc7567a79df485cd7b4f015e376f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/entry_points.txt b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..eb3dafd90f19de60b3e520aeaf8132402980214d --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/entry_points.txt @@ -0,0 +1,6 @@ +[console_scripts] +huggingface-cli = huggingface_hub.commands.huggingface_cli:main + +[fsspec.specs] +hf=huggingface_hub.HfFileSystem + diff --git a/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b964ccca3c1b6766042b3fe3b2707ba25372924 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/huggingface_hub-0.23.0.dist-info/top_level.txt @@ -0,0 +1 @@ +huggingface_hub diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/__pycache__/__init__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/lxml/includes/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cd0df73a50e145eeff33d4a91d2653710c090606 Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/lxml/includes/__pycache__/__init__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/__init__.py b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/__pycache__/__init__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9aa839cf58d77909df7e8f3864a000d70b959c1b Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/__pycache__/__init__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/libcharset.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/libcharset.h new file mode 100644 index 0000000000000000000000000000000000000000..fcf22748101279e454fd2fefe01908fd8545bce2 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/libcharset.h @@ -0,0 +1,45 @@ +/* Copyright (C) 2003 Free Software Foundation, Inc. + This file is part of the GNU CHARSET Library. + + The GNU CHARSET Library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public License as + published by the Free Software Foundation; either version 2 of the + License, or (at your option) any later version. + + The GNU CHARSET Library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public License + along with the GNU CHARSET Library; see the file COPYING.LIB. If not, + see . */ + +#ifndef _LIBCHARSET_H +#define _LIBCHARSET_H + +#include + + +#ifdef __cplusplus +extern "C" { +#endif + + +/* Support for relocatable packages. */ + +/* Sets the original and the current installation prefix of the package. + Relocation simply replaces a pathname starting with the original prefix + by the corresponding pathname with the current prefix instead. Both + prefixes should be directory names without trailing slash (i.e. use "" + instead of "/"). */ +extern void libcharset_set_relocation_prefix (const char *orig_prefix, + const char *curr_prefix); + + +#ifdef __cplusplus +} +#endif + + +#endif /* _LIBCHARSET_H */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/localcharset.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/localcharset.h new file mode 100644 index 0000000000000000000000000000000000000000..34ce0adde9bb793f1c1cd5f81b5cc3d2eff08ab1 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/localcharset.h @@ -0,0 +1,137 @@ +/* Determine a canonical name for the current locale's character encoding. + Copyright (C) 2000-2003, 2009-2019 Free Software Foundation, Inc. + This file is part of the GNU CHARSET Library. + + This program is free software; you can redistribute it and/or modify it + under the terms of the GNU Lesser General Public License as published + by the Free Software Foundation; either version 2, or (at your option) + any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public License + along with this program; if not, see . */ + +#ifndef _LOCALCHARSET_H +#define _LOCALCHARSET_H + + +#ifdef __cplusplus +extern "C" { +#endif + + +/* Determine the current locale's character encoding, and canonicalize it + into one of the canonical names listed below. + The result must not be freed; it is statically allocated. The result + becomes invalid when setlocale() is used to change the global locale, or + when the value of one of the environment variables LC_ALL, LC_CTYPE, LANG + is changed; threads in multithreaded programs should not do this. + If the canonical name cannot be determined, the result is a non-canonical + name. */ +extern const char * locale_charset (void); + +/* About GNU canonical names for character encodings: + + Every canonical name must be supported by GNU libiconv. Support by GNU libc + is also desirable. + + The name is case insensitive. Usually an upper case MIME charset name is + preferred. + + The current list of these GNU canonical names is: + + name MIME? used by which systems + (darwin = Mac OS X, windows = native Windows) + + ASCII, ANSI_X3.4-1968 glibc solaris freebsd netbsd darwin minix cygwin + ISO-8859-1 Y glibc aix hpux irix osf solaris freebsd netbsd openbsd darwin cygwin zos + ISO-8859-2 Y glibc aix hpux irix osf solaris freebsd netbsd openbsd darwin cygwin zos + ISO-8859-3 Y glibc solaris cygwin + ISO-8859-4 Y hpux osf solaris freebsd netbsd openbsd darwin + ISO-8859-5 Y glibc aix hpux irix osf solaris freebsd netbsd openbsd darwin cygwin zos + ISO-8859-6 Y glibc aix hpux solaris cygwin + ISO-8859-7 Y glibc aix hpux irix osf solaris freebsd netbsd openbsd darwin cygwin zos + ISO-8859-8 Y glibc aix hpux osf solaris cygwin zos + ISO-8859-9 Y glibc aix hpux irix osf solaris freebsd darwin cygwin zos + ISO-8859-13 glibc hpux solaris freebsd netbsd openbsd darwin cygwin + ISO-8859-14 glibc cygwin + ISO-8859-15 glibc aix irix osf solaris freebsd netbsd openbsd darwin cygwin + KOI8-R Y glibc hpux solaris freebsd netbsd openbsd darwin + KOI8-U Y glibc freebsd netbsd openbsd darwin cygwin + KOI8-T glibc + CP437 dos + CP775 dos + CP850 aix osf dos + CP852 dos + CP855 dos + CP856 aix + CP857 dos + CP861 dos + CP862 dos + CP864 dos + CP865 dos + CP866 freebsd netbsd openbsd darwin dos + CP869 dos + CP874 windows dos + CP922 aix + CP932 aix cygwin windows dos + CP943 aix zos + CP949 osf darwin windows dos + CP950 windows dos + CP1046 aix + CP1124 aix + CP1125 dos + CP1129 aix + CP1131 freebsd darwin + CP1250 windows + CP1251 glibc hpux solaris freebsd netbsd openbsd darwin cygwin windows + CP1252 aix windows + CP1253 windows + CP1254 windows + CP1255 glibc windows + CP1256 windows + CP1257 windows + GB2312 Y glibc aix hpux irix solaris freebsd netbsd darwin cygwin zos + EUC-JP Y glibc aix hpux irix osf solaris freebsd netbsd darwin cygwin + EUC-KR Y glibc aix hpux irix osf solaris freebsd netbsd darwin cygwin zos + EUC-TW glibc aix hpux irix osf solaris netbsd + BIG5 Y glibc aix hpux osf solaris freebsd netbsd darwin cygwin zos + BIG5-HKSCS glibc hpux solaris netbsd darwin + GBK glibc aix osf solaris freebsd darwin cygwin windows dos + GB18030 glibc hpux solaris freebsd netbsd darwin + SHIFT_JIS Y hpux osf solaris freebsd netbsd darwin + JOHAB glibc solaris windows + TIS-620 glibc aix hpux osf solaris cygwin zos + VISCII Y glibc + TCVN5712-1 glibc + ARMSCII-8 glibc freebsd netbsd darwin + GEORGIAN-PS glibc cygwin + PT154 glibc netbsd cygwin + HP-ROMAN8 hpux + HP-ARABIC8 hpux + HP-GREEK8 hpux + HP-HEBREW8 hpux + HP-TURKISH8 hpux + HP-KANA8 hpux + DEC-KANJI osf + DEC-HANYU osf + UTF-8 Y glibc aix hpux osf solaris netbsd darwin cygwin zos + + Note: Names which are not marked as being a MIME name should not be used in + Internet protocols for information interchange (mail, news, etc.). + + Note: ASCII and ANSI_X3.4-1968 are synonymous canonical names. Applications + must understand both names and treat them as equivalent. + */ + + +#ifdef __cplusplus +} +#endif + + +#endif /* _LOCALCHARSET_H */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/zconf.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/zconf.h new file mode 100644 index 0000000000000000000000000000000000000000..ede3c82e3eb129528194a2045c808b418fb20296 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/zconf.h @@ -0,0 +1,543 @@ +/* zconf.h -- configuration of the zlib compression library + * Copyright (C) 1995-2024 Jean-loup Gailly, Mark Adler + * For conditions of distribution and use, see copyright notice in zlib.h + */ + +/* @(#) $Id$ */ + +#ifndef ZCONF_H +#define ZCONF_H + +/* + * If you *really* need a unique prefix for all types and library functions, + * compile with -DZ_PREFIX. The "standard" zlib should be compiled without it. + * Even better than compiling with -DZ_PREFIX would be to use configure to set + * this permanently in zconf.h using "./configure --zprefix". + */ +#ifdef Z_PREFIX /* may be set to #if 1 by ./configure */ +# define Z_PREFIX_SET + +/* all linked symbols and init macros */ +# define _dist_code z__dist_code +# define _length_code z__length_code +# define _tr_align z__tr_align +# define _tr_flush_bits z__tr_flush_bits +# define _tr_flush_block z__tr_flush_block +# define _tr_init z__tr_init +# define _tr_stored_block z__tr_stored_block +# define _tr_tally z__tr_tally +# define adler32 z_adler32 +# define adler32_combine z_adler32_combine +# define adler32_combine64 z_adler32_combine64 +# define adler32_z z_adler32_z +# ifndef Z_SOLO +# define compress z_compress +# define compress2 z_compress2 +# define compressBound z_compressBound +# endif +# define crc32 z_crc32 +# define crc32_combine z_crc32_combine +# define crc32_combine64 z_crc32_combine64 +# define crc32_combine_gen z_crc32_combine_gen +# define crc32_combine_gen64 z_crc32_combine_gen64 +# define crc32_combine_op z_crc32_combine_op +# define crc32_z z_crc32_z +# define deflate z_deflate +# define deflateBound z_deflateBound +# define deflateCopy z_deflateCopy +# define deflateEnd z_deflateEnd +# define deflateGetDictionary z_deflateGetDictionary +# define deflateInit z_deflateInit +# define deflateInit2 z_deflateInit2 +# define deflateInit2_ z_deflateInit2_ +# define deflateInit_ z_deflateInit_ +# define deflateParams z_deflateParams +# define deflatePending z_deflatePending +# define deflatePrime z_deflatePrime +# define deflateReset z_deflateReset +# define deflateResetKeep z_deflateResetKeep +# define deflateSetDictionary z_deflateSetDictionary +# define deflateSetHeader z_deflateSetHeader +# define deflateTune z_deflateTune +# define deflate_copyright z_deflate_copyright +# define get_crc_table z_get_crc_table +# ifndef Z_SOLO +# define gz_error z_gz_error +# define gz_intmax z_gz_intmax +# define gz_strwinerror z_gz_strwinerror +# define gzbuffer z_gzbuffer +# define gzclearerr z_gzclearerr +# define gzclose z_gzclose +# define gzclose_r z_gzclose_r +# define gzclose_w z_gzclose_w +# define gzdirect z_gzdirect +# define gzdopen z_gzdopen +# define gzeof z_gzeof +# define gzerror z_gzerror +# define gzflush z_gzflush +# define gzfread z_gzfread +# define gzfwrite z_gzfwrite +# define gzgetc z_gzgetc +# define gzgetc_ z_gzgetc_ +# define gzgets z_gzgets +# define gzoffset z_gzoffset +# define gzoffset64 z_gzoffset64 +# define gzopen z_gzopen +# define gzopen64 z_gzopen64 +# ifdef _WIN32 +# define gzopen_w z_gzopen_w +# endif +# define gzprintf z_gzprintf +# define gzputc z_gzputc +# define gzputs z_gzputs +# define gzread z_gzread +# define gzrewind z_gzrewind +# define gzseek z_gzseek +# define gzseek64 z_gzseek64 +# define gzsetparams z_gzsetparams +# define gztell z_gztell +# define gztell64 z_gztell64 +# define gzungetc z_gzungetc +# define gzvprintf z_gzvprintf +# define gzwrite z_gzwrite +# endif +# define inflate z_inflate +# define inflateBack z_inflateBack +# define inflateBackEnd z_inflateBackEnd +# define inflateBackInit z_inflateBackInit +# define inflateBackInit_ z_inflateBackInit_ +# define inflateCodesUsed z_inflateCodesUsed +# define inflateCopy z_inflateCopy +# define inflateEnd z_inflateEnd +# define inflateGetDictionary z_inflateGetDictionary +# define inflateGetHeader z_inflateGetHeader +# define inflateInit z_inflateInit +# define inflateInit2 z_inflateInit2 +# define inflateInit2_ z_inflateInit2_ +# define inflateInit_ z_inflateInit_ +# define inflateMark z_inflateMark +# define inflatePrime z_inflatePrime +# define inflateReset z_inflateReset +# define inflateReset2 z_inflateReset2 +# define inflateResetKeep z_inflateResetKeep +# define inflateSetDictionary z_inflateSetDictionary +# define inflateSync z_inflateSync +# define inflateSyncPoint z_inflateSyncPoint +# define inflateUndermine z_inflateUndermine +# define inflateValidate z_inflateValidate +# define inflate_copyright z_inflate_copyright +# define inflate_fast z_inflate_fast +# define inflate_table z_inflate_table +# ifndef Z_SOLO +# define uncompress z_uncompress +# define uncompress2 z_uncompress2 +# endif +# define zError z_zError +# ifndef Z_SOLO +# define zcalloc z_zcalloc +# define zcfree z_zcfree +# endif +# define zlibCompileFlags z_zlibCompileFlags +# define zlibVersion z_zlibVersion + +/* all zlib typedefs in zlib.h and zconf.h */ +# define Byte z_Byte +# define Bytef z_Bytef +# define alloc_func z_alloc_func +# define charf z_charf +# define free_func z_free_func +# ifndef Z_SOLO +# define gzFile z_gzFile +# endif +# define gz_header z_gz_header +# define gz_headerp z_gz_headerp +# define in_func z_in_func +# define intf z_intf +# define out_func z_out_func +# define uInt z_uInt +# define uIntf z_uIntf +# define uLong z_uLong +# define uLongf z_uLongf +# define voidp z_voidp +# define voidpc z_voidpc +# define voidpf z_voidpf + +/* all zlib structs in zlib.h and zconf.h */ +# define gz_header_s z_gz_header_s +# define internal_state z_internal_state + +#endif + +#if defined(__MSDOS__) && !defined(MSDOS) +# define MSDOS +#endif +#if (defined(OS_2) || defined(__OS2__)) && !defined(OS2) +# define OS2 +#endif +#if defined(_WINDOWS) && !defined(WINDOWS) +# define WINDOWS +#endif +#if defined(_WIN32) || defined(_WIN32_WCE) || defined(__WIN32__) +# ifndef WIN32 +# define WIN32 +# endif +#endif +#if (defined(MSDOS) || defined(OS2) || defined(WINDOWS)) && !defined(WIN32) +# if !defined(__GNUC__) && !defined(__FLAT__) && !defined(__386__) +# ifndef SYS16BIT +# define SYS16BIT +# endif +# endif +#endif + +/* + * Compile with -DMAXSEG_64K if the alloc function cannot allocate more + * than 64k bytes at a time (needed on systems with 16-bit int). + */ +#ifdef SYS16BIT +# define MAXSEG_64K +#endif +#ifdef MSDOS +# define UNALIGNED_OK +#endif + +#ifdef __STDC_VERSION__ +# ifndef STDC +# define STDC +# endif +# if __STDC_VERSION__ >= 199901L +# ifndef STDC99 +# define STDC99 +# endif +# endif +#endif +#if !defined(STDC) && (defined(__STDC__) || defined(__cplusplus)) +# define STDC +#endif +#if !defined(STDC) && (defined(__GNUC__) || defined(__BORLANDC__)) +# define STDC +#endif +#if !defined(STDC) && (defined(MSDOS) || defined(WINDOWS) || defined(WIN32)) +# define STDC +#endif +#if !defined(STDC) && (defined(OS2) || defined(__HOS_AIX__)) +# define STDC +#endif + +#if defined(__OS400__) && !defined(STDC) /* iSeries (formerly AS/400). */ +# define STDC +#endif + +#ifndef STDC +# ifndef const /* cannot use !defined(STDC) && !defined(const) on Mac */ +# define const /* note: need a more gentle solution here */ +# endif +#endif + +#if defined(ZLIB_CONST) && !defined(z_const) +# define z_const const +#else +# define z_const +#endif + +#ifdef Z_SOLO +# ifdef _WIN64 + typedef unsigned long long z_size_t; +# else + typedef unsigned long z_size_t; +# endif +#else +# define z_longlong long long +# if defined(NO_SIZE_T) + typedef unsigned NO_SIZE_T z_size_t; +# elif defined(STDC) +# include + typedef size_t z_size_t; +# else + typedef unsigned long z_size_t; +# endif +# undef z_longlong +#endif + +/* Maximum value for memLevel in deflateInit2 */ +#ifndef MAX_MEM_LEVEL +# ifdef MAXSEG_64K +# define MAX_MEM_LEVEL 8 +# else +# define MAX_MEM_LEVEL 9 +# endif +#endif + +/* Maximum value for windowBits in deflateInit2 and inflateInit2. + * WARNING: reducing MAX_WBITS makes minigzip unable to extract .gz files + * created by gzip. (Files created by minigzip can still be extracted by + * gzip.) + */ +#ifndef MAX_WBITS +# define MAX_WBITS 15 /* 32K LZ77 window */ +#endif + +/* The memory requirements for deflate are (in bytes): + (1 << (windowBits+2)) + (1 << (memLevel+9)) + that is: 128K for windowBits=15 + 128K for memLevel = 8 (default values) + plus a few kilobytes for small objects. For example, if you want to reduce + the default memory requirements from 256K to 128K, compile with + make CFLAGS="-O -DMAX_WBITS=14 -DMAX_MEM_LEVEL=7" + Of course this will generally degrade compression (there's no free lunch). + + The memory requirements for inflate are (in bytes) 1 << windowBits + that is, 32K for windowBits=15 (default value) plus about 7 kilobytes + for small objects. +*/ + + /* Type declarations */ + +#ifndef OF /* function prototypes */ +# ifdef STDC +# define OF(args) args +# else +# define OF(args) () +# endif +#endif + +/* The following definitions for FAR are needed only for MSDOS mixed + * model programming (small or medium model with some far allocations). + * This was tested only with MSC; for other MSDOS compilers you may have + * to define NO_MEMCPY in zutil.h. If you don't need the mixed model, + * just define FAR to be empty. + */ +#ifdef SYS16BIT +# if defined(M_I86SM) || defined(M_I86MM) + /* MSC small or medium model */ +# define SMALL_MEDIUM +# ifdef _MSC_VER +# define FAR _far +# else +# define FAR far +# endif +# endif +# if (defined(__SMALL__) || defined(__MEDIUM__)) + /* Turbo C small or medium model */ +# define SMALL_MEDIUM +# ifdef __BORLANDC__ +# define FAR _far +# else +# define FAR far +# endif +# endif +#endif + +#if defined(WINDOWS) || defined(WIN32) + /* If building or using zlib as a DLL, define ZLIB_DLL. + * This is not mandatory, but it offers a little performance increase. + */ +# ifdef ZLIB_DLL +# if defined(WIN32) && (!defined(__BORLANDC__) || (__BORLANDC__ >= 0x500)) +# ifdef ZLIB_INTERNAL +# define ZEXTERN extern __declspec(dllexport) +# else +# define ZEXTERN extern __declspec(dllimport) +# endif +# endif +# endif /* ZLIB_DLL */ + /* If building or using zlib with the WINAPI/WINAPIV calling convention, + * define ZLIB_WINAPI. + * Caution: the standard ZLIB1.DLL is NOT compiled using ZLIB_WINAPI. + */ +# ifdef ZLIB_WINAPI +# ifdef FAR +# undef FAR +# endif +# ifndef WIN32_LEAN_AND_MEAN +# define WIN32_LEAN_AND_MEAN +# endif +# include + /* No need for _export, use ZLIB.DEF instead. */ + /* For complete Windows compatibility, use WINAPI, not __stdcall. */ +# define ZEXPORT WINAPI +# ifdef WIN32 +# define ZEXPORTVA WINAPIV +# else +# define ZEXPORTVA FAR CDECL +# endif +# endif +#endif + +#if defined (__BEOS__) +# ifdef ZLIB_DLL +# ifdef ZLIB_INTERNAL +# define ZEXPORT __declspec(dllexport) +# define ZEXPORTVA __declspec(dllexport) +# else +# define ZEXPORT __declspec(dllimport) +# define ZEXPORTVA __declspec(dllimport) +# endif +# endif +#endif + +#ifndef ZEXTERN +# define ZEXTERN extern +#endif +#ifndef ZEXPORT +# define ZEXPORT +#endif +#ifndef ZEXPORTVA +# define ZEXPORTVA +#endif + +#ifndef FAR +# define FAR +#endif + +#if !defined(__MACTYPES__) +typedef unsigned char Byte; /* 8 bits */ +#endif +typedef unsigned int uInt; /* 16 bits or more */ +typedef unsigned long uLong; /* 32 bits or more */ + +#ifdef SMALL_MEDIUM + /* Borland C/C++ and some old MSC versions ignore FAR inside typedef */ +# define Bytef Byte FAR +#else + typedef Byte FAR Bytef; +#endif +typedef char FAR charf; +typedef int FAR intf; +typedef uInt FAR uIntf; +typedef uLong FAR uLongf; + +#ifdef STDC + typedef void const *voidpc; + typedef void FAR *voidpf; + typedef void *voidp; +#else + typedef Byte const *voidpc; + typedef Byte FAR *voidpf; + typedef Byte *voidp; +#endif + +#if !defined(Z_U4) && !defined(Z_SOLO) && defined(STDC) +# include +# if (UINT_MAX == 0xffffffffUL) +# define Z_U4 unsigned +# elif (ULONG_MAX == 0xffffffffUL) +# define Z_U4 unsigned long +# elif (USHRT_MAX == 0xffffffffUL) +# define Z_U4 unsigned short +# endif +#endif + +#ifdef Z_U4 + typedef Z_U4 z_crc_t; +#else + typedef unsigned long z_crc_t; +#endif + +#if 1 /* was set to #if 1 by ./configure */ +# define Z_HAVE_UNISTD_H +#endif + +#if 1 /* was set to #if 1 by ./configure */ +# define Z_HAVE_STDARG_H +#endif + +#ifdef STDC +# ifndef Z_SOLO +# include /* for off_t */ +# endif +#endif + +#if defined(STDC) || defined(Z_HAVE_STDARG_H) +# ifndef Z_SOLO +# include /* for va_list */ +# endif +#endif + +#ifdef _WIN32 +# ifndef Z_SOLO +# include /* for wchar_t */ +# endif +#endif + +/* a little trick to accommodate both "#define _LARGEFILE64_SOURCE" and + * "#define _LARGEFILE64_SOURCE 1" as requesting 64-bit operations, (even + * though the former does not conform to the LFS document), but considering + * both "#undef _LARGEFILE64_SOURCE" and "#define _LARGEFILE64_SOURCE 0" as + * equivalently requesting no 64-bit operations + */ +#if defined(_LARGEFILE64_SOURCE) && -_LARGEFILE64_SOURCE - -1 == 1 +# undef _LARGEFILE64_SOURCE +#endif + +#ifndef Z_HAVE_UNISTD_H +# ifdef __WATCOMC__ +# define Z_HAVE_UNISTD_H +# endif +#endif +#ifndef Z_HAVE_UNISTD_H +# if defined(_LARGEFILE64_SOURCE) && !defined(_WIN32) +# define Z_HAVE_UNISTD_H +# endif +#endif +#ifndef Z_SOLO +# if defined(Z_HAVE_UNISTD_H) +# include /* for SEEK_*, off_t, and _LFS64_LARGEFILE */ +# ifdef VMS +# include /* for off_t */ +# endif +# ifndef z_off_t +# define z_off_t off_t +# endif +# endif +#endif + +#if defined(_LFS64_LARGEFILE) && _LFS64_LARGEFILE-0 +# define Z_LFS64 +#endif + +#if defined(_LARGEFILE64_SOURCE) && defined(Z_LFS64) +# define Z_LARGE64 +#endif + +#if defined(_FILE_OFFSET_BITS) && _FILE_OFFSET_BITS-0 == 64 && defined(Z_LFS64) +# define Z_WANT64 +#endif + +#if !defined(SEEK_SET) && !defined(Z_SOLO) +# define SEEK_SET 0 /* Seek from beginning of file. */ +# define SEEK_CUR 1 /* Seek from current position. */ +# define SEEK_END 2 /* Set file pointer to EOF plus "offset" */ +#endif + +#ifndef z_off_t +# define z_off_t long +#endif + +#if !defined(_WIN32) && defined(Z_LARGE64) +# define z_off64_t off64_t +#else +# if defined(_WIN32) && !defined(__GNUC__) +# define z_off64_t __int64 +# else +# define z_off64_t z_off_t +# endif +#endif + +/* MVS linker does not support external names larger than 8 bytes */ +#if defined(__MVS__) + #pragma map(deflateInit_,"DEIN") + #pragma map(deflateInit2_,"DEIN2") + #pragma map(deflateEnd,"DEEND") + #pragma map(deflateBound,"DEBND") + #pragma map(inflateInit_,"ININ") + #pragma map(inflateInit2_,"ININ2") + #pragma map(inflateEnd,"INEND") + #pragma map(inflateSync,"INSY") + #pragma map(inflateSetDictionary,"INSEDI") + #pragma map(compressBound,"CMBND") + #pragma map(inflate_table,"INTABL") + #pragma map(inflate_fast,"INFA") + #pragma map(inflate_copyright,"INCOPY") +#endif + +#endif /* ZCONF_H */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/zlib.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/zlib.h new file mode 100644 index 0000000000000000000000000000000000000000..8d4b932eaf6a0fbb8133b3ab49ba5ef587059fa0 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/extlibs/zlib.h @@ -0,0 +1,1938 @@ +/* zlib.h -- interface of the 'zlib' general purpose compression library + version 1.3.1, January 22nd, 2024 + + Copyright (C) 1995-2024 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + + + The data format used by the zlib library is described by RFCs (Request for + Comments) 1950 to 1952 in the files http://tools.ietf.org/html/rfc1950 + (zlib format), rfc1951 (deflate format) and rfc1952 (gzip format). +*/ + +#ifndef ZLIB_H +#define ZLIB_H + +#include "zconf.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define ZLIB_VERSION "1.3.1" +#define ZLIB_VERNUM 0x1310 +#define ZLIB_VER_MAJOR 1 +#define ZLIB_VER_MINOR 3 +#define ZLIB_VER_REVISION 1 +#define ZLIB_VER_SUBREVISION 0 + +/* + The 'zlib' compression library provides in-memory compression and + decompression functions, including integrity checks of the uncompressed data. + This version of the library supports only one compression method (deflation) + but other algorithms will be added later and will have the same stream + interface. + + Compression can be done in a single step if the buffers are large enough, + or can be done by repeated calls of the compression function. In the latter + case, the application must provide more input and/or consume the output + (providing more output space) before each call. + + The compressed data format used by default by the in-memory functions is + the zlib format, which is a zlib wrapper documented in RFC 1950, wrapped + around a deflate stream, which is itself documented in RFC 1951. + + The library also supports reading and writing files in gzip (.gz) format + with an interface similar to that of stdio using the functions that start + with "gz". The gzip format is different from the zlib format. gzip is a + gzip wrapper, documented in RFC 1952, wrapped around a deflate stream. + + This library can optionally read and write gzip and raw deflate streams in + memory as well. + + The zlib format was designed to be compact and fast for use in memory + and on communications channels. The gzip format was designed for single- + file compression on file systems, has a larger header than zlib to maintain + directory information, and uses a different, slower check method than zlib. + + The library does not install any signal handler. The decoder checks + the consistency of the compressed data, so the library should never crash + even in the case of corrupted input. +*/ + +typedef voidpf (*alloc_func)(voidpf opaque, uInt items, uInt size); +typedef void (*free_func)(voidpf opaque, voidpf address); + +struct internal_state; + +typedef struct z_stream_s { + z_const Bytef *next_in; /* next input byte */ + uInt avail_in; /* number of bytes available at next_in */ + uLong total_in; /* total number of input bytes read so far */ + + Bytef *next_out; /* next output byte will go here */ + uInt avail_out; /* remaining free space at next_out */ + uLong total_out; /* total number of bytes output so far */ + + z_const char *msg; /* last error message, NULL if no error */ + struct internal_state FAR *state; /* not visible by applications */ + + alloc_func zalloc; /* used to allocate the internal state */ + free_func zfree; /* used to free the internal state */ + voidpf opaque; /* private data object passed to zalloc and zfree */ + + int data_type; /* best guess about the data type: binary or text + for deflate, or the decoding state for inflate */ + uLong adler; /* Adler-32 or CRC-32 value of the uncompressed data */ + uLong reserved; /* reserved for future use */ +} z_stream; + +typedef z_stream FAR *z_streamp; + +/* + gzip header information passed to and from zlib routines. See RFC 1952 + for more details on the meanings of these fields. +*/ +typedef struct gz_header_s { + int text; /* true if compressed data believed to be text */ + uLong time; /* modification time */ + int xflags; /* extra flags (not used when writing a gzip file) */ + int os; /* operating system */ + Bytef *extra; /* pointer to extra field or Z_NULL if none */ + uInt extra_len; /* extra field length (valid if extra != Z_NULL) */ + uInt extra_max; /* space at extra (only when reading header) */ + Bytef *name; /* pointer to zero-terminated file name or Z_NULL */ + uInt name_max; /* space at name (only when reading header) */ + Bytef *comment; /* pointer to zero-terminated comment or Z_NULL */ + uInt comm_max; /* space at comment (only when reading header) */ + int hcrc; /* true if there was or will be a header crc */ + int done; /* true when done reading gzip header (not used + when writing a gzip file) */ +} gz_header; + +typedef gz_header FAR *gz_headerp; + +/* + The application must update next_in and avail_in when avail_in has dropped + to zero. It must update next_out and avail_out when avail_out has dropped + to zero. The application must initialize zalloc, zfree and opaque before + calling the init function. All other fields are set by the compression + library and must not be updated by the application. + + The opaque value provided by the application will be passed as the first + parameter for calls of zalloc and zfree. This can be useful for custom + memory management. The compression library attaches no meaning to the + opaque value. + + zalloc must return Z_NULL if there is not enough memory for the object. + If zlib is used in a multi-threaded application, zalloc and zfree must be + thread safe. In that case, zlib is thread-safe. When zalloc and zfree are + Z_NULL on entry to the initialization function, they are set to internal + routines that use the standard library functions malloc() and free(). + + On 16-bit systems, the functions zalloc and zfree must be able to allocate + exactly 65536 bytes, but will not be required to allocate more than this if + the symbol MAXSEG_64K is defined (see zconf.h). WARNING: On MSDOS, pointers + returned by zalloc for objects of exactly 65536 bytes *must* have their + offset normalized to zero. The default allocation function provided by this + library ensures this (see zutil.c). To reduce memory requirements and avoid + any allocation of 64K objects, at the expense of compression ratio, compile + the library with -DMAX_WBITS=14 (see zconf.h). + + The fields total_in and total_out can be used for statistics or progress + reports. After compression, total_in holds the total size of the + uncompressed data and may be saved for use by the decompressor (particularly + if the decompressor wants to decompress everything in a single step). +*/ + + /* constants */ + +#define Z_NO_FLUSH 0 +#define Z_PARTIAL_FLUSH 1 +#define Z_SYNC_FLUSH 2 +#define Z_FULL_FLUSH 3 +#define Z_FINISH 4 +#define Z_BLOCK 5 +#define Z_TREES 6 +/* Allowed flush values; see deflate() and inflate() below for details */ + +#define Z_OK 0 +#define Z_STREAM_END 1 +#define Z_NEED_DICT 2 +#define Z_ERRNO (-1) +#define Z_STREAM_ERROR (-2) +#define Z_DATA_ERROR (-3) +#define Z_MEM_ERROR (-4) +#define Z_BUF_ERROR (-5) +#define Z_VERSION_ERROR (-6) +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + +#define Z_NO_COMPRESSION 0 +#define Z_BEST_SPEED 1 +#define Z_BEST_COMPRESSION 9 +#define Z_DEFAULT_COMPRESSION (-1) +/* compression levels */ + +#define Z_FILTERED 1 +#define Z_HUFFMAN_ONLY 2 +#define Z_RLE 3 +#define Z_FIXED 4 +#define Z_DEFAULT_STRATEGY 0 +/* compression strategy; see deflateInit2() below for details */ + +#define Z_BINARY 0 +#define Z_TEXT 1 +#define Z_ASCII Z_TEXT /* for compatibility with 1.2.2 and earlier */ +#define Z_UNKNOWN 2 +/* Possible values of the data_type field for deflate() */ + +#define Z_DEFLATED 8 +/* The deflate compression method (the only one supported in this version) */ + +#define Z_NULL 0 /* for initializing zalloc, zfree, opaque */ + +#define zlib_version zlibVersion() +/* for compatibility with versions < 1.0.2 */ + + + /* basic functions */ + +ZEXTERN const char * ZEXPORT zlibVersion(void); +/* The application can compare zlibVersion and ZLIB_VERSION for consistency. + If the first character differs, the library code actually used is not + compatible with the zlib.h header file used by the application. This check + is automatically made by deflateInit and inflateInit. + */ + +/* +ZEXTERN int ZEXPORT deflateInit(z_streamp strm, int level); + + Initializes the internal stream state for compression. The fields + zalloc, zfree and opaque must be initialized before by the caller. If + zalloc and zfree are set to Z_NULL, deflateInit updates them to use default + allocation functions. total_in, total_out, adler, and msg are initialized. + + The compression level must be Z_DEFAULT_COMPRESSION, or between 0 and 9: + 1 gives best speed, 9 gives best compression, 0 gives no compression at all + (the input data is simply copied a block at a time). Z_DEFAULT_COMPRESSION + requests a default compromise between speed and compression (currently + equivalent to level 6). + + deflateInit returns Z_OK if success, Z_MEM_ERROR if there was not enough + memory, Z_STREAM_ERROR if level is not a valid compression level, or + Z_VERSION_ERROR if the zlib library version (zlib_version) is incompatible + with the version assumed by the caller (ZLIB_VERSION). msg is set to null + if there is no error message. deflateInit does not perform any compression: + this will be done by deflate(). +*/ + + +ZEXTERN int ZEXPORT deflate(z_streamp strm, int flush); +/* + deflate compresses as much data as possible, and stops when the input + buffer becomes empty or the output buffer becomes full. It may introduce + some output latency (reading input without producing any output) except when + forced to flush. + + The detailed semantics are as follows. deflate performs one or both of the + following actions: + + - Compress more input starting at next_in and update next_in and avail_in + accordingly. If not all input can be processed (because there is not + enough room in the output buffer), next_in and avail_in are updated and + processing will resume at this point for the next call of deflate(). + + - Generate more output starting at next_out and update next_out and avail_out + accordingly. This action is forced if the parameter flush is non zero. + Forcing flush frequently degrades the compression ratio, so this parameter + should be set only when necessary. Some output may be provided even if + flush is zero. + + Before the call of deflate(), the application should ensure that at least + one of the actions is possible, by providing more input and/or consuming more + output, and updating avail_in or avail_out accordingly; avail_out should + never be zero before the call. The application can consume the compressed + output when it wants, for example when the output buffer is full (avail_out + == 0), or after each call of deflate(). If deflate returns Z_OK and with + zero avail_out, it must be called again after making room in the output + buffer because there might be more output pending. See deflatePending(), + which can be used if desired to determine whether or not there is more output + in that case. + + Normally the parameter flush is set to Z_NO_FLUSH, which allows deflate to + decide how much data to accumulate before producing output, in order to + maximize compression. + + If the parameter flush is set to Z_SYNC_FLUSH, all pending output is + flushed to the output buffer and the output is aligned on a byte boundary, so + that the decompressor can get all input data available so far. (In + particular avail_in is zero after the call if enough output space has been + provided before the call.) Flushing may degrade compression for some + compression algorithms and so it should be used only when necessary. This + completes the current deflate block and follows it with an empty stored block + that is three bits plus filler bits to the next byte, followed by four bytes + (00 00 ff ff). + + If flush is set to Z_PARTIAL_FLUSH, all pending output is flushed to the + output buffer, but the output is not aligned to a byte boundary. All of the + input data so far will be available to the decompressor, as for Z_SYNC_FLUSH. + This completes the current deflate block and follows it with an empty fixed + codes block that is 10 bits long. This assures that enough bytes are output + in order for the decompressor to finish the block before the empty fixed + codes block. + + If flush is set to Z_BLOCK, a deflate block is completed and emitted, as + for Z_SYNC_FLUSH, but the output is not aligned on a byte boundary, and up to + seven bits of the current block are held to be written as the next byte after + the next deflate block is completed. In this case, the decompressor may not + be provided enough bits at this point in order to complete decompression of + the data provided so far to the compressor. It may need to wait for the next + block to be emitted. This is for advanced applications that need to control + the emission of deflate blocks. + + If flush is set to Z_FULL_FLUSH, all output is flushed as with + Z_SYNC_FLUSH, and the compression state is reset so that decompression can + restart from this point if previous compressed data has been damaged or if + random access is desired. Using Z_FULL_FLUSH too often can seriously degrade + compression. + + If deflate returns with avail_out == 0, this function must be called again + with the same value of the flush parameter and more output space (updated + avail_out), until the flush is complete (deflate returns with non-zero + avail_out). In the case of a Z_FULL_FLUSH or Z_SYNC_FLUSH, make sure that + avail_out is greater than six when the flush marker begins, in order to avoid + repeated flush markers upon calling deflate() again when avail_out == 0. + + If the parameter flush is set to Z_FINISH, pending input is processed, + pending output is flushed and deflate returns with Z_STREAM_END if there was + enough output space. If deflate returns with Z_OK or Z_BUF_ERROR, this + function must be called again with Z_FINISH and more output space (updated + avail_out) but no more input data, until it returns with Z_STREAM_END or an + error. After deflate has returned Z_STREAM_END, the only possible operations + on the stream are deflateReset or deflateEnd. + + Z_FINISH can be used in the first deflate call after deflateInit if all the + compression is to be done in a single step. In order to complete in one + call, avail_out must be at least the value returned by deflateBound (see + below). Then deflate is guaranteed to return Z_STREAM_END. If not enough + output space is provided, deflate will not return Z_STREAM_END, and it must + be called again as described above. + + deflate() sets strm->adler to the Adler-32 checksum of all input read + so far (that is, total_in bytes). If a gzip stream is being generated, then + strm->adler will be the CRC-32 checksum of the input read so far. (See + deflateInit2 below.) + + deflate() may update strm->data_type if it can make a good guess about + the input data type (Z_BINARY or Z_TEXT). If in doubt, the data is + considered binary. This field is only for information purposes and does not + affect the compression algorithm in any manner. + + deflate() returns Z_OK if some progress has been made (more input + processed or more output produced), Z_STREAM_END if all input has been + consumed and all output has been produced (only when flush is set to + Z_FINISH), Z_STREAM_ERROR if the stream state was inconsistent (for example + if next_in or next_out was Z_NULL or the state was inadvertently written over + by the application), or Z_BUF_ERROR if no progress is possible (for example + avail_in or avail_out was zero). Note that Z_BUF_ERROR is not fatal, and + deflate() can be called again with more input and more output space to + continue compressing. +*/ + + +ZEXTERN int ZEXPORT deflateEnd(z_streamp strm); +/* + All dynamically allocated data structures for this stream are freed. + This function discards any unprocessed input and does not flush any pending + output. + + deflateEnd returns Z_OK if success, Z_STREAM_ERROR if the + stream state was inconsistent, Z_DATA_ERROR if the stream was freed + prematurely (some input or output was discarded). In the error case, msg + may be set but then points to a static string (which must not be + deallocated). +*/ + + +/* +ZEXTERN int ZEXPORT inflateInit(z_streamp strm); + + Initializes the internal stream state for decompression. The fields + next_in, avail_in, zalloc, zfree and opaque must be initialized before by + the caller. In the current version of inflate, the provided input is not + read or consumed. The allocation of a sliding window will be deferred to + the first call of inflate (if the decompression does not complete on the + first call). If zalloc and zfree are set to Z_NULL, inflateInit updates + them to use default allocation functions. total_in, total_out, adler, and + msg are initialized. + + inflateInit returns Z_OK if success, Z_MEM_ERROR if there was not enough + memory, Z_VERSION_ERROR if the zlib library version is incompatible with the + version assumed by the caller, or Z_STREAM_ERROR if the parameters are + invalid, such as a null pointer to the structure. msg is set to null if + there is no error message. inflateInit does not perform any decompression. + Actual decompression will be done by inflate(). So next_in, and avail_in, + next_out, and avail_out are unused and unchanged. The current + implementation of inflateInit() does not process any header information -- + that is deferred until inflate() is called. +*/ + + +ZEXTERN int ZEXPORT inflate(z_streamp strm, int flush); +/* + inflate decompresses as much data as possible, and stops when the input + buffer becomes empty or the output buffer becomes full. It may introduce + some output latency (reading input without producing any output) except when + forced to flush. + + The detailed semantics are as follows. inflate performs one or both of the + following actions: + + - Decompress more input starting at next_in and update next_in and avail_in + accordingly. If not all input can be processed (because there is not + enough room in the output buffer), then next_in and avail_in are updated + accordingly, and processing will resume at this point for the next call of + inflate(). + + - Generate more output starting at next_out and update next_out and avail_out + accordingly. inflate() provides as much output as possible, until there is + no more input data or no more space in the output buffer (see below about + the flush parameter). + + Before the call of inflate(), the application should ensure that at least + one of the actions is possible, by providing more input and/or consuming more + output, and updating the next_* and avail_* values accordingly. If the + caller of inflate() does not provide both available input and available + output space, it is possible that there will be no progress made. The + application can consume the uncompressed output when it wants, for example + when the output buffer is full (avail_out == 0), or after each call of + inflate(). If inflate returns Z_OK and with zero avail_out, it must be + called again after making room in the output buffer because there might be + more output pending. + + The flush parameter of inflate() can be Z_NO_FLUSH, Z_SYNC_FLUSH, Z_FINISH, + Z_BLOCK, or Z_TREES. Z_SYNC_FLUSH requests that inflate() flush as much + output as possible to the output buffer. Z_BLOCK requests that inflate() + stop if and when it gets to the next deflate block boundary. When decoding + the zlib or gzip format, this will cause inflate() to return immediately + after the header and before the first block. When doing a raw inflate, + inflate() will go ahead and process the first block, and will return when it + gets to the end of that block, or when it runs out of data. + + The Z_BLOCK option assists in appending to or combining deflate streams. + To assist in this, on return inflate() always sets strm->data_type to the + number of unused bits in the last byte taken from strm->next_in, plus 64 if + inflate() is currently decoding the last block in the deflate stream, plus + 128 if inflate() returned immediately after decoding an end-of-block code or + decoding the complete header up to just before the first byte of the deflate + stream. The end-of-block will not be indicated until all of the uncompressed + data from that block has been written to strm->next_out. The number of + unused bits may in general be greater than seven, except when bit 7 of + data_type is set, in which case the number of unused bits will be less than + eight. data_type is set as noted here every time inflate() returns for all + flush options, and so can be used to determine the amount of currently + consumed input in bits. + + The Z_TREES option behaves as Z_BLOCK does, but it also returns when the + end of each deflate block header is reached, before any actual data in that + block is decoded. This allows the caller to determine the length of the + deflate block header for later use in random access within a deflate block. + 256 is added to the value of strm->data_type when inflate() returns + immediately after reaching the end of the deflate block header. + + inflate() should normally be called until it returns Z_STREAM_END or an + error. However if all decompression is to be performed in a single step (a + single call of inflate), the parameter flush should be set to Z_FINISH. In + this case all pending input is processed and all pending output is flushed; + avail_out must be large enough to hold all of the uncompressed data for the + operation to complete. (The size of the uncompressed data may have been + saved by the compressor for this purpose.) The use of Z_FINISH is not + required to perform an inflation in one step. However it may be used to + inform inflate that a faster approach can be used for the single inflate() + call. Z_FINISH also informs inflate to not maintain a sliding window if the + stream completes, which reduces inflate's memory footprint. If the stream + does not complete, either because not all of the stream is provided or not + enough output space is provided, then a sliding window will be allocated and + inflate() can be called again to continue the operation as if Z_NO_FLUSH had + been used. + + In this implementation, inflate() always flushes as much output as + possible to the output buffer, and always uses the faster approach on the + first call. So the effects of the flush parameter in this implementation are + on the return value of inflate() as noted below, when inflate() returns early + when Z_BLOCK or Z_TREES is used, and when inflate() avoids the allocation of + memory for a sliding window when Z_FINISH is used. + + If a preset dictionary is needed after this call (see inflateSetDictionary + below), inflate sets strm->adler to the Adler-32 checksum of the dictionary + chosen by the compressor and returns Z_NEED_DICT; otherwise it sets + strm->adler to the Adler-32 checksum of all output produced so far (that is, + total_out bytes) and returns Z_OK, Z_STREAM_END or an error code as described + below. At the end of the stream, inflate() checks that its computed Adler-32 + checksum is equal to that saved by the compressor and returns Z_STREAM_END + only if the checksum is correct. + + inflate() can decompress and check either zlib-wrapped or gzip-wrapped + deflate data. The header type is detected automatically, if requested when + initializing with inflateInit2(). Any information contained in the gzip + header is not retained unless inflateGetHeader() is used. When processing + gzip-wrapped deflate data, strm->adler32 is set to the CRC-32 of the output + produced so far. The CRC-32 is checked against the gzip trailer, as is the + uncompressed length, modulo 2^32. + + inflate() returns Z_OK if some progress has been made (more input processed + or more output produced), Z_STREAM_END if the end of the compressed data has + been reached and all uncompressed output has been produced, Z_NEED_DICT if a + preset dictionary is needed at this point, Z_DATA_ERROR if the input data was + corrupted (input stream not conforming to the zlib format or incorrect check + value, in which case strm->msg points to a string with a more specific + error), Z_STREAM_ERROR if the stream structure was inconsistent (for example + next_in or next_out was Z_NULL, or the state was inadvertently written over + by the application), Z_MEM_ERROR if there was not enough memory, Z_BUF_ERROR + if no progress was possible or if there was not enough room in the output + buffer when Z_FINISH is used. Note that Z_BUF_ERROR is not fatal, and + inflate() can be called again with more input and more output space to + continue decompressing. If Z_DATA_ERROR is returned, the application may + then call inflateSync() to look for a good compression block if a partial + recovery of the data is to be attempted. +*/ + + +ZEXTERN int ZEXPORT inflateEnd(z_streamp strm); +/* + All dynamically allocated data structures for this stream are freed. + This function discards any unprocessed input and does not flush any pending + output. + + inflateEnd returns Z_OK if success, or Z_STREAM_ERROR if the stream state + was inconsistent. +*/ + + + /* Advanced functions */ + +/* + The following functions are needed only in some special applications. +*/ + +/* +ZEXTERN int ZEXPORT deflateInit2(z_streamp strm, + int level, + int method, + int windowBits, + int memLevel, + int strategy); + + This is another version of deflateInit with more compression options. The + fields zalloc, zfree and opaque must be initialized before by the caller. + + The method parameter is the compression method. It must be Z_DEFLATED in + this version of the library. + + The windowBits parameter is the base two logarithm of the window size + (the size of the history buffer). It should be in the range 8..15 for this + version of the library. Larger values of this parameter result in better + compression at the expense of memory usage. The default value is 15 if + deflateInit is used instead. + + For the current implementation of deflate(), a windowBits value of 8 (a + window size of 256 bytes) is not supported. As a result, a request for 8 + will result in 9 (a 512-byte window). In that case, providing 8 to + inflateInit2() will result in an error when the zlib header with 9 is + checked against the initialization of inflate(). The remedy is to not use 8 + with deflateInit2() with this initialization, or at least in that case use 9 + with inflateInit2(). + + windowBits can also be -8..-15 for raw deflate. In this case, -windowBits + determines the window size. deflate() will then generate raw deflate data + with no zlib header or trailer, and will not compute a check value. + + windowBits can also be greater than 15 for optional gzip encoding. Add + 16 to windowBits to write a simple gzip header and trailer around the + compressed data instead of a zlib wrapper. The gzip header will have no + file name, no extra data, no comment, no modification time (set to zero), no + header crc, and the operating system will be set to the appropriate value, + if the operating system was determined at compile time. If a gzip stream is + being written, strm->adler is a CRC-32 instead of an Adler-32. + + For raw deflate or gzip encoding, a request for a 256-byte window is + rejected as invalid, since only the zlib header provides a means of + transmitting the window size to the decompressor. + + The memLevel parameter specifies how much memory should be allocated + for the internal compression state. memLevel=1 uses minimum memory but is + slow and reduces compression ratio; memLevel=9 uses maximum memory for + optimal speed. The default value is 8. See zconf.h for total memory usage + as a function of windowBits and memLevel. + + The strategy parameter is used to tune the compression algorithm. Use the + value Z_DEFAULT_STRATEGY for normal data, Z_FILTERED for data produced by a + filter (or predictor), Z_HUFFMAN_ONLY to force Huffman encoding only (no + string match), or Z_RLE to limit match distances to one (run-length + encoding). Filtered data consists mostly of small values with a somewhat + random distribution. In this case, the compression algorithm is tuned to + compress them better. The effect of Z_FILTERED is to force more Huffman + coding and less string matching; it is somewhat intermediate between + Z_DEFAULT_STRATEGY and Z_HUFFMAN_ONLY. Z_RLE is designed to be almost as + fast as Z_HUFFMAN_ONLY, but give better compression for PNG image data. The + strategy parameter only affects the compression ratio but not the + correctness of the compressed output even if it is not set appropriately. + Z_FIXED prevents the use of dynamic Huffman codes, allowing for a simpler + decoder for special applications. + + deflateInit2 returns Z_OK if success, Z_MEM_ERROR if there was not enough + memory, Z_STREAM_ERROR if any parameter is invalid (such as an invalid + method), or Z_VERSION_ERROR if the zlib library version (zlib_version) is + incompatible with the version assumed by the caller (ZLIB_VERSION). msg is + set to null if there is no error message. deflateInit2 does not perform any + compression: this will be done by deflate(). +*/ + +ZEXTERN int ZEXPORT deflateSetDictionary(z_streamp strm, + const Bytef *dictionary, + uInt dictLength); +/* + Initializes the compression dictionary from the given byte sequence + without producing any compressed output. When using the zlib format, this + function must be called immediately after deflateInit, deflateInit2 or + deflateReset, and before any call of deflate. When doing raw deflate, this + function must be called either before any call of deflate, or immediately + after the completion of a deflate block, i.e. after all input has been + consumed and all output has been delivered when using any of the flush + options Z_BLOCK, Z_PARTIAL_FLUSH, Z_SYNC_FLUSH, or Z_FULL_FLUSH. The + compressor and decompressor must use exactly the same dictionary (see + inflateSetDictionary). + + The dictionary should consist of strings (byte sequences) that are likely + to be encountered later in the data to be compressed, with the most commonly + used strings preferably put towards the end of the dictionary. Using a + dictionary is most useful when the data to be compressed is short and can be + predicted with good accuracy; the data can then be compressed better than + with the default empty dictionary. + + Depending on the size of the compression data structures selected by + deflateInit or deflateInit2, a part of the dictionary may in effect be + discarded, for example if the dictionary is larger than the window size + provided in deflateInit or deflateInit2. Thus the strings most likely to be + useful should be put at the end of the dictionary, not at the front. In + addition, the current implementation of deflate will use at most the window + size minus 262 bytes of the provided dictionary. + + Upon return of this function, strm->adler is set to the Adler-32 value + of the dictionary; the decompressor may later use this value to determine + which dictionary has been used by the compressor. (The Adler-32 value + applies to the whole dictionary even if only a subset of the dictionary is + actually used by the compressor.) If a raw deflate was requested, then the + Adler-32 value is not computed and strm->adler is not set. + + deflateSetDictionary returns Z_OK if success, or Z_STREAM_ERROR if a + parameter is invalid (e.g. dictionary being Z_NULL) or the stream state is + inconsistent (for example if deflate has already been called for this stream + or if not at a block boundary for raw deflate). deflateSetDictionary does + not perform any compression: this will be done by deflate(). +*/ + +ZEXTERN int ZEXPORT deflateGetDictionary(z_streamp strm, + Bytef *dictionary, + uInt *dictLength); +/* + Returns the sliding dictionary being maintained by deflate. dictLength is + set to the number of bytes in the dictionary, and that many bytes are copied + to dictionary. dictionary must have enough space, where 32768 bytes is + always enough. If deflateGetDictionary() is called with dictionary equal to + Z_NULL, then only the dictionary length is returned, and nothing is copied. + Similarly, if dictLength is Z_NULL, then it is not set. + + deflateGetDictionary() may return a length less than the window size, even + when more than the window size in input has been provided. It may return up + to 258 bytes less in that case, due to how zlib's implementation of deflate + manages the sliding window and lookahead for matches, where matches can be + up to 258 bytes long. If the application needs the last window-size bytes of + input, then that would need to be saved by the application outside of zlib. + + deflateGetDictionary returns Z_OK on success, or Z_STREAM_ERROR if the + stream state is inconsistent. +*/ + +ZEXTERN int ZEXPORT deflateCopy(z_streamp dest, + z_streamp source); +/* + Sets the destination stream as a complete copy of the source stream. + + This function can be useful when several compression strategies will be + tried, for example when there are several ways of pre-processing the input + data with a filter. The streams that will be discarded should then be freed + by calling deflateEnd. Note that deflateCopy duplicates the internal + compression state which can be quite large, so this strategy is slow and can + consume lots of memory. + + deflateCopy returns Z_OK if success, Z_MEM_ERROR if there was not + enough memory, Z_STREAM_ERROR if the source stream state was inconsistent + (such as zalloc being Z_NULL). msg is left unchanged in both source and + destination. +*/ + +ZEXTERN int ZEXPORT deflateReset(z_streamp strm); +/* + This function is equivalent to deflateEnd followed by deflateInit, but + does not free and reallocate the internal compression state. The stream + will leave the compression level and any other attributes that may have been + set unchanged. total_in, total_out, adler, and msg are initialized. + + deflateReset returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent (such as zalloc or state being Z_NULL). +*/ + +ZEXTERN int ZEXPORT deflateParams(z_streamp strm, + int level, + int strategy); +/* + Dynamically update the compression level and compression strategy. The + interpretation of level and strategy is as in deflateInit2(). This can be + used to switch between compression and straight copy of the input data, or + to switch to a different kind of input data requiring a different strategy. + If the compression approach (which is a function of the level) or the + strategy is changed, and if there have been any deflate() calls since the + state was initialized or reset, then the input available so far is + compressed with the old level and strategy using deflate(strm, Z_BLOCK). + There are three approaches for the compression levels 0, 1..3, and 4..9 + respectively. The new level and strategy will take effect at the next call + of deflate(). + + If a deflate(strm, Z_BLOCK) is performed by deflateParams(), and it does + not have enough output space to complete, then the parameter change will not + take effect. In this case, deflateParams() can be called again with the + same parameters and more output space to try again. + + In order to assure a change in the parameters on the first try, the + deflate stream should be flushed using deflate() with Z_BLOCK or other flush + request until strm.avail_out is not zero, before calling deflateParams(). + Then no more input data should be provided before the deflateParams() call. + If this is done, the old level and strategy will be applied to the data + compressed before deflateParams(), and the new level and strategy will be + applied to the data compressed after deflateParams(). + + deflateParams returns Z_OK on success, Z_STREAM_ERROR if the source stream + state was inconsistent or if a parameter was invalid, or Z_BUF_ERROR if + there was not enough output space to complete the compression of the + available input data before a change in the strategy or approach. Note that + in the case of a Z_BUF_ERROR, the parameters are not changed. A return + value of Z_BUF_ERROR is not fatal, in which case deflateParams() can be + retried with more output space. +*/ + +ZEXTERN int ZEXPORT deflateTune(z_streamp strm, + int good_length, + int max_lazy, + int nice_length, + int max_chain); +/* + Fine tune deflate's internal compression parameters. This should only be + used by someone who understands the algorithm used by zlib's deflate for + searching for the best matching string, and even then only by the most + fanatic optimizer trying to squeeze out the last compressed bit for their + specific input data. Read the deflate.c source code for the meaning of the + max_lazy, good_length, nice_length, and max_chain parameters. + + deflateTune() can be called after deflateInit() or deflateInit2(), and + returns Z_OK on success, or Z_STREAM_ERROR for an invalid deflate stream. + */ + +ZEXTERN uLong ZEXPORT deflateBound(z_streamp strm, + uLong sourceLen); +/* + deflateBound() returns an upper bound on the compressed size after + deflation of sourceLen bytes. It must be called after deflateInit() or + deflateInit2(), and after deflateSetHeader(), if used. This would be used + to allocate an output buffer for deflation in a single pass, and so would be + called before deflate(). If that first deflate() call is provided the + sourceLen input bytes, an output buffer allocated to the size returned by + deflateBound(), and the flush value Z_FINISH, then deflate() is guaranteed + to return Z_STREAM_END. Note that it is possible for the compressed size to + be larger than the value returned by deflateBound() if flush options other + than Z_FINISH or Z_NO_FLUSH are used. +*/ + +ZEXTERN int ZEXPORT deflatePending(z_streamp strm, + unsigned *pending, + int *bits); +/* + deflatePending() returns the number of bytes and bits of output that have + been generated, but not yet provided in the available output. The bytes not + provided would be due to the available output space having being consumed. + The number of bits of output not provided are between 0 and 7, where they + await more bits to join them in order to fill out a full byte. If pending + or bits are Z_NULL, then those values are not set. + + deflatePending returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent. + */ + +ZEXTERN int ZEXPORT deflatePrime(z_streamp strm, + int bits, + int value); +/* + deflatePrime() inserts bits in the deflate output stream. The intent + is that this function is used to start off the deflate output with the bits + leftover from a previous deflate stream when appending to it. As such, this + function can only be used for raw deflate, and must be used before the first + deflate() call after a deflateInit2() or deflateReset(). bits must be less + than or equal to 16, and that many of the least significant bits of value + will be inserted in the output. + + deflatePrime returns Z_OK if success, Z_BUF_ERROR if there was not enough + room in the internal buffer to insert the bits, or Z_STREAM_ERROR if the + source stream state was inconsistent. +*/ + +ZEXTERN int ZEXPORT deflateSetHeader(z_streamp strm, + gz_headerp head); +/* + deflateSetHeader() provides gzip header information for when a gzip + stream is requested by deflateInit2(). deflateSetHeader() may be called + after deflateInit2() or deflateReset() and before the first call of + deflate(). The text, time, os, extra field, name, and comment information + in the provided gz_header structure are written to the gzip header (xflag is + ignored -- the extra flags are set according to the compression level). The + caller must assure that, if not Z_NULL, name and comment are terminated with + a zero byte, and that if extra is not Z_NULL, that extra_len bytes are + available there. If hcrc is true, a gzip header crc is included. Note that + the current versions of the command-line version of gzip (up through version + 1.3.x) do not support header crc's, and will report that it is a "multi-part + gzip file" and give up. + + If deflateSetHeader is not used, the default gzip header has text false, + the time set to zero, and os set to the current operating system, with no + extra, name, or comment fields. The gzip header is returned to the default + state by deflateReset(). + + deflateSetHeader returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent. +*/ + +/* +ZEXTERN int ZEXPORT inflateInit2(z_streamp strm, + int windowBits); + + This is another version of inflateInit with an extra parameter. The + fields next_in, avail_in, zalloc, zfree and opaque must be initialized + before by the caller. + + The windowBits parameter is the base two logarithm of the maximum window + size (the size of the history buffer). It should be in the range 8..15 for + this version of the library. The default value is 15 if inflateInit is used + instead. windowBits must be greater than or equal to the windowBits value + provided to deflateInit2() while compressing, or it must be equal to 15 if + deflateInit2() was not used. If a compressed stream with a larger window + size is given as input, inflate() will return with the error code + Z_DATA_ERROR instead of trying to allocate a larger window. + + windowBits can also be zero to request that inflate use the window size in + the zlib header of the compressed stream. + + windowBits can also be -8..-15 for raw inflate. In this case, -windowBits + determines the window size. inflate() will then process raw deflate data, + not looking for a zlib or gzip header, not generating a check value, and not + looking for any check values for comparison at the end of the stream. This + is for use with other formats that use the deflate compressed data format + such as zip. Those formats provide their own check values. If a custom + format is developed using the raw deflate format for compressed data, it is + recommended that a check value such as an Adler-32 or a CRC-32 be applied to + the uncompressed data as is done in the zlib, gzip, and zip formats. For + most applications, the zlib format should be used as is. Note that comments + above on the use in deflateInit2() applies to the magnitude of windowBits. + + windowBits can also be greater than 15 for optional gzip decoding. Add + 32 to windowBits to enable zlib and gzip decoding with automatic header + detection, or add 16 to decode only the gzip format (the zlib format will + return a Z_DATA_ERROR). If a gzip stream is being decoded, strm->adler is a + CRC-32 instead of an Adler-32. Unlike the gunzip utility and gzread() (see + below), inflate() will *not* automatically decode concatenated gzip members. + inflate() will return Z_STREAM_END at the end of the gzip member. The state + would need to be reset to continue decoding a subsequent gzip member. This + *must* be done if there is more data after a gzip member, in order for the + decompression to be compliant with the gzip standard (RFC 1952). + + inflateInit2 returns Z_OK if success, Z_MEM_ERROR if there was not enough + memory, Z_VERSION_ERROR if the zlib library version is incompatible with the + version assumed by the caller, or Z_STREAM_ERROR if the parameters are + invalid, such as a null pointer to the structure. msg is set to null if + there is no error message. inflateInit2 does not perform any decompression + apart from possibly reading the zlib header if present: actual decompression + will be done by inflate(). (So next_in and avail_in may be modified, but + next_out and avail_out are unused and unchanged.) The current implementation + of inflateInit2() does not process any header information -- that is + deferred until inflate() is called. +*/ + +ZEXTERN int ZEXPORT inflateSetDictionary(z_streamp strm, + const Bytef *dictionary, + uInt dictLength); +/* + Initializes the decompression dictionary from the given uncompressed byte + sequence. This function must be called immediately after a call of inflate, + if that call returned Z_NEED_DICT. The dictionary chosen by the compressor + can be determined from the Adler-32 value returned by that call of inflate. + The compressor and decompressor must use exactly the same dictionary (see + deflateSetDictionary). For raw inflate, this function can be called at any + time to set the dictionary. If the provided dictionary is smaller than the + window and there is already data in the window, then the provided dictionary + will amend what's there. The application must insure that the dictionary + that was used for compression is provided. + + inflateSetDictionary returns Z_OK if success, Z_STREAM_ERROR if a + parameter is invalid (e.g. dictionary being Z_NULL) or the stream state is + inconsistent, Z_DATA_ERROR if the given dictionary doesn't match the + expected one (incorrect Adler-32 value). inflateSetDictionary does not + perform any decompression: this will be done by subsequent calls of + inflate(). +*/ + +ZEXTERN int ZEXPORT inflateGetDictionary(z_streamp strm, + Bytef *dictionary, + uInt *dictLength); +/* + Returns the sliding dictionary being maintained by inflate. dictLength is + set to the number of bytes in the dictionary, and that many bytes are copied + to dictionary. dictionary must have enough space, where 32768 bytes is + always enough. If inflateGetDictionary() is called with dictionary equal to + Z_NULL, then only the dictionary length is returned, and nothing is copied. + Similarly, if dictLength is Z_NULL, then it is not set. + + inflateGetDictionary returns Z_OK on success, or Z_STREAM_ERROR if the + stream state is inconsistent. +*/ + +ZEXTERN int ZEXPORT inflateSync(z_streamp strm); +/* + Skips invalid compressed data until a possible full flush point (see above + for the description of deflate with Z_FULL_FLUSH) can be found, or until all + available input is skipped. No output is provided. + + inflateSync searches for a 00 00 FF FF pattern in the compressed data. + All full flush points have this pattern, but not all occurrences of this + pattern are full flush points. + + inflateSync returns Z_OK if a possible full flush point has been found, + Z_BUF_ERROR if no more input was provided, Z_DATA_ERROR if no flush point + has been found, or Z_STREAM_ERROR if the stream structure was inconsistent. + In the success case, the application may save the current value of total_in + which indicates where valid compressed data was found. In the error case, + the application may repeatedly call inflateSync, providing more input each + time, until success or end of the input data. +*/ + +ZEXTERN int ZEXPORT inflateCopy(z_streamp dest, + z_streamp source); +/* + Sets the destination stream as a complete copy of the source stream. + + This function can be useful when randomly accessing a large stream. The + first pass through the stream can periodically record the inflate state, + allowing restarting inflate at those points when randomly accessing the + stream. + + inflateCopy returns Z_OK if success, Z_MEM_ERROR if there was not + enough memory, Z_STREAM_ERROR if the source stream state was inconsistent + (such as zalloc being Z_NULL). msg is left unchanged in both source and + destination. +*/ + +ZEXTERN int ZEXPORT inflateReset(z_streamp strm); +/* + This function is equivalent to inflateEnd followed by inflateInit, + but does not free and reallocate the internal decompression state. The + stream will keep attributes that may have been set by inflateInit2. + total_in, total_out, adler, and msg are initialized. + + inflateReset returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent (such as zalloc or state being Z_NULL). +*/ + +ZEXTERN int ZEXPORT inflateReset2(z_streamp strm, + int windowBits); +/* + This function is the same as inflateReset, but it also permits changing + the wrap and window size requests. The windowBits parameter is interpreted + the same as it is for inflateInit2. If the window size is changed, then the + memory allocated for the window is freed, and the window will be reallocated + by inflate() if needed. + + inflateReset2 returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent (such as zalloc or state being Z_NULL), or if + the windowBits parameter is invalid. +*/ + +ZEXTERN int ZEXPORT inflatePrime(z_streamp strm, + int bits, + int value); +/* + This function inserts bits in the inflate input stream. The intent is + that this function is used to start inflating at a bit position in the + middle of a byte. The provided bits will be used before any bytes are used + from next_in. This function should only be used with raw inflate, and + should be used before the first inflate() call after inflateInit2() or + inflateReset(). bits must be less than or equal to 16, and that many of the + least significant bits of value will be inserted in the input. + + If bits is negative, then the input stream bit buffer is emptied. Then + inflatePrime() can be called again to put bits in the buffer. This is used + to clear out bits leftover after feeding inflate a block description prior + to feeding inflate codes. + + inflatePrime returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent. +*/ + +ZEXTERN long ZEXPORT inflateMark(z_streamp strm); +/* + This function returns two values, one in the lower 16 bits of the return + value, and the other in the remaining upper bits, obtained by shifting the + return value down 16 bits. If the upper value is -1 and the lower value is + zero, then inflate() is currently decoding information outside of a block. + If the upper value is -1 and the lower value is non-zero, then inflate is in + the middle of a stored block, with the lower value equaling the number of + bytes from the input remaining to copy. If the upper value is not -1, then + it is the number of bits back from the current bit position in the input of + the code (literal or length/distance pair) currently being processed. In + that case the lower value is the number of bytes already emitted for that + code. + + A code is being processed if inflate is waiting for more input to complete + decoding of the code, or if it has completed decoding but is waiting for + more output space to write the literal or match data. + + inflateMark() is used to mark locations in the input data for random + access, which may be at bit positions, and to note those cases where the + output of a code may span boundaries of random access blocks. The current + location in the input stream can be determined from avail_in and data_type + as noted in the description for the Z_BLOCK flush parameter for inflate. + + inflateMark returns the value noted above, or -65536 if the provided + source stream state was inconsistent. +*/ + +ZEXTERN int ZEXPORT inflateGetHeader(z_streamp strm, + gz_headerp head); +/* + inflateGetHeader() requests that gzip header information be stored in the + provided gz_header structure. inflateGetHeader() may be called after + inflateInit2() or inflateReset(), and before the first call of inflate(). + As inflate() processes the gzip stream, head->done is zero until the header + is completed, at which time head->done is set to one. If a zlib stream is + being decoded, then head->done is set to -1 to indicate that there will be + no gzip header information forthcoming. Note that Z_BLOCK or Z_TREES can be + used to force inflate() to return immediately after header processing is + complete and before any actual data is decompressed. + + The text, time, xflags, and os fields are filled in with the gzip header + contents. hcrc is set to true if there is a header CRC. (The header CRC + was valid if done is set to one.) If extra is not Z_NULL, then extra_max + contains the maximum number of bytes to write to extra. Once done is true, + extra_len contains the actual extra field length, and extra contains the + extra field, or that field truncated if extra_max is less than extra_len. + If name is not Z_NULL, then up to name_max characters are written there, + terminated with a zero unless the length is greater than name_max. If + comment is not Z_NULL, then up to comm_max characters are written there, + terminated with a zero unless the length is greater than comm_max. When any + of extra, name, or comment are not Z_NULL and the respective field is not + present in the header, then that field is set to Z_NULL to signal its + absence. This allows the use of deflateSetHeader() with the returned + structure to duplicate the header. However if those fields are set to + allocated memory, then the application will need to save those pointers + elsewhere so that they can be eventually freed. + + If inflateGetHeader is not used, then the header information is simply + discarded. The header is always checked for validity, including the header + CRC if present. inflateReset() will reset the process to discard the header + information. The application would need to call inflateGetHeader() again to + retrieve the header from the next gzip stream. + + inflateGetHeader returns Z_OK if success, or Z_STREAM_ERROR if the source + stream state was inconsistent. +*/ + +/* +ZEXTERN int ZEXPORT inflateBackInit(z_streamp strm, int windowBits, + unsigned char FAR *window); + + Initialize the internal stream state for decompression using inflateBack() + calls. The fields zalloc, zfree and opaque in strm must be initialized + before the call. If zalloc and zfree are Z_NULL, then the default library- + derived memory allocation routines are used. windowBits is the base two + logarithm of the window size, in the range 8..15. window is a caller + supplied buffer of that size. Except for special applications where it is + assured that deflate was used with small window sizes, windowBits must be 15 + and a 32K byte window must be supplied to be able to decompress general + deflate streams. + + See inflateBack() for the usage of these routines. + + inflateBackInit will return Z_OK on success, Z_STREAM_ERROR if any of + the parameters are invalid, Z_MEM_ERROR if the internal state could not be + allocated, or Z_VERSION_ERROR if the version of the library does not match + the version of the header file. +*/ + +typedef unsigned (*in_func)(void FAR *, + z_const unsigned char FAR * FAR *); +typedef int (*out_func)(void FAR *, unsigned char FAR *, unsigned); + +ZEXTERN int ZEXPORT inflateBack(z_streamp strm, + in_func in, void FAR *in_desc, + out_func out, void FAR *out_desc); +/* + inflateBack() does a raw inflate with a single call using a call-back + interface for input and output. This is potentially more efficient than + inflate() for file i/o applications, in that it avoids copying between the + output and the sliding window by simply making the window itself the output + buffer. inflate() can be faster on modern CPUs when used with large + buffers. inflateBack() trusts the application to not change the output + buffer passed by the output function, at least until inflateBack() returns. + + inflateBackInit() must be called first to allocate the internal state + and to initialize the state with the user-provided window buffer. + inflateBack() may then be used multiple times to inflate a complete, raw + deflate stream with each call. inflateBackEnd() is then called to free the + allocated state. + + A raw deflate stream is one with no zlib or gzip header or trailer. + This routine would normally be used in a utility that reads zip or gzip + files and writes out uncompressed files. The utility would decode the + header and process the trailer on its own, hence this routine expects only + the raw deflate stream to decompress. This is different from the default + behavior of inflate(), which expects a zlib header and trailer around the + deflate stream. + + inflateBack() uses two subroutines supplied by the caller that are then + called by inflateBack() for input and output. inflateBack() calls those + routines until it reads a complete deflate stream and writes out all of the + uncompressed data, or until it encounters an error. The function's + parameters and return types are defined above in the in_func and out_func + typedefs. inflateBack() will call in(in_desc, &buf) which should return the + number of bytes of provided input, and a pointer to that input in buf. If + there is no input available, in() must return zero -- buf is ignored in that + case -- and inflateBack() will return a buffer error. inflateBack() will + call out(out_desc, buf, len) to write the uncompressed data buf[0..len-1]. + out() should return zero on success, or non-zero on failure. If out() + returns non-zero, inflateBack() will return with an error. Neither in() nor + out() are permitted to change the contents of the window provided to + inflateBackInit(), which is also the buffer that out() uses to write from. + The length written by out() will be at most the window size. Any non-zero + amount of input may be provided by in(). + + For convenience, inflateBack() can be provided input on the first call by + setting strm->next_in and strm->avail_in. If that input is exhausted, then + in() will be called. Therefore strm->next_in must be initialized before + calling inflateBack(). If strm->next_in is Z_NULL, then in() will be called + immediately for input. If strm->next_in is not Z_NULL, then strm->avail_in + must also be initialized, and then if strm->avail_in is not zero, input will + initially be taken from strm->next_in[0 .. strm->avail_in - 1]. + + The in_desc and out_desc parameters of inflateBack() is passed as the + first parameter of in() and out() respectively when they are called. These + descriptors can be optionally used to pass any information that the caller- + supplied in() and out() functions need to do their job. + + On return, inflateBack() will set strm->next_in and strm->avail_in to + pass back any unused input that was provided by the last in() call. The + return values of inflateBack() can be Z_STREAM_END on success, Z_BUF_ERROR + if in() or out() returned an error, Z_DATA_ERROR if there was a format error + in the deflate stream (in which case strm->msg is set to indicate the nature + of the error), or Z_STREAM_ERROR if the stream was not properly initialized. + In the case of Z_BUF_ERROR, an input or output error can be distinguished + using strm->next_in which will be Z_NULL only if in() returned an error. If + strm->next_in is not Z_NULL, then the Z_BUF_ERROR was due to out() returning + non-zero. (in() will always be called before out(), so strm->next_in is + assured to be defined if out() returns non-zero.) Note that inflateBack() + cannot return Z_OK. +*/ + +ZEXTERN int ZEXPORT inflateBackEnd(z_streamp strm); +/* + All memory allocated by inflateBackInit() is freed. + + inflateBackEnd() returns Z_OK on success, or Z_STREAM_ERROR if the stream + state was inconsistent. +*/ + +ZEXTERN uLong ZEXPORT zlibCompileFlags(void); +/* Return flags indicating compile-time options. + + Type sizes, two bits each, 00 = 16 bits, 01 = 32, 10 = 64, 11 = other: + 1.0: size of uInt + 3.2: size of uLong + 5.4: size of voidpf (pointer) + 7.6: size of z_off_t + + Compiler, assembler, and debug options: + 8: ZLIB_DEBUG + 9: ASMV or ASMINF -- use ASM code + 10: ZLIB_WINAPI -- exported functions use the WINAPI calling convention + 11: 0 (reserved) + + One-time table building (smaller code, but not thread-safe if true): + 12: BUILDFIXED -- build static block decoding tables when needed + 13: DYNAMIC_CRC_TABLE -- build CRC calculation tables when needed + 14,15: 0 (reserved) + + Library content (indicates missing functionality): + 16: NO_GZCOMPRESS -- gz* functions cannot compress (to avoid linking + deflate code when not needed) + 17: NO_GZIP -- deflate can't write gzip streams, and inflate can't detect + and decode gzip streams (to avoid linking crc code) + 18-19: 0 (reserved) + + Operation variations (changes in library functionality): + 20: PKZIP_BUG_WORKAROUND -- slightly more permissive inflate + 21: FASTEST -- deflate algorithm with only one, lowest compression level + 22,23: 0 (reserved) + + The sprintf variant used by gzprintf (zero is best): + 24: 0 = vs*, 1 = s* -- 1 means limited to 20 arguments after the format + 25: 0 = *nprintf, 1 = *printf -- 1 means gzprintf() not secure! + 26: 0 = returns value, 1 = void -- 1 means inferred string length returned + + Remainder: + 27-31: 0 (reserved) + */ + +#ifndef Z_SOLO + + /* utility functions */ + +/* + The following utility functions are implemented on top of the basic + stream-oriented functions. To simplify the interface, some default options + are assumed (compression level and memory usage, standard memory allocation + functions). The source code of these utility functions can be modified if + you need special options. +*/ + +ZEXTERN int ZEXPORT compress(Bytef *dest, uLongf *destLen, + const Bytef *source, uLong sourceLen); +/* + Compresses the source buffer into the destination buffer. sourceLen is + the byte length of the source buffer. Upon entry, destLen is the total size + of the destination buffer, which must be at least the value returned by + compressBound(sourceLen). Upon exit, destLen is the actual size of the + compressed data. compress() is equivalent to compress2() with a level + parameter of Z_DEFAULT_COMPRESSION. + + compress returns Z_OK if success, Z_MEM_ERROR if there was not + enough memory, Z_BUF_ERROR if there was not enough room in the output + buffer. +*/ + +ZEXTERN int ZEXPORT compress2(Bytef *dest, uLongf *destLen, + const Bytef *source, uLong sourceLen, + int level); +/* + Compresses the source buffer into the destination buffer. The level + parameter has the same meaning as in deflateInit. sourceLen is the byte + length of the source buffer. Upon entry, destLen is the total size of the + destination buffer, which must be at least the value returned by + compressBound(sourceLen). Upon exit, destLen is the actual size of the + compressed data. + + compress2 returns Z_OK if success, Z_MEM_ERROR if there was not enough + memory, Z_BUF_ERROR if there was not enough room in the output buffer, + Z_STREAM_ERROR if the level parameter is invalid. +*/ + +ZEXTERN uLong ZEXPORT compressBound(uLong sourceLen); +/* + compressBound() returns an upper bound on the compressed size after + compress() or compress2() on sourceLen bytes. It would be used before a + compress() or compress2() call to allocate the destination buffer. +*/ + +ZEXTERN int ZEXPORT uncompress(Bytef *dest, uLongf *destLen, + const Bytef *source, uLong sourceLen); +/* + Decompresses the source buffer into the destination buffer. sourceLen is + the byte length of the source buffer. Upon entry, destLen is the total size + of the destination buffer, which must be large enough to hold the entire + uncompressed data. (The size of the uncompressed data must have been saved + previously by the compressor and transmitted to the decompressor by some + mechanism outside the scope of this compression library.) Upon exit, destLen + is the actual size of the uncompressed data. + + uncompress returns Z_OK if success, Z_MEM_ERROR if there was not + enough memory, Z_BUF_ERROR if there was not enough room in the output + buffer, or Z_DATA_ERROR if the input data was corrupted or incomplete. In + the case where there is not enough room, uncompress() will fill the output + buffer with the uncompressed data up to that point. +*/ + +ZEXTERN int ZEXPORT uncompress2(Bytef *dest, uLongf *destLen, + const Bytef *source, uLong *sourceLen); +/* + Same as uncompress, except that sourceLen is a pointer, where the + length of the source is *sourceLen. On return, *sourceLen is the number of + source bytes consumed. +*/ + + /* gzip file access functions */ + +/* + This library supports reading and writing files in gzip (.gz) format with + an interface similar to that of stdio, using the functions that start with + "gz". The gzip format is different from the zlib format. gzip is a gzip + wrapper, documented in RFC 1952, wrapped around a deflate stream. +*/ + +typedef struct gzFile_s *gzFile; /* semi-opaque gzip file descriptor */ + +/* +ZEXTERN gzFile ZEXPORT gzopen(const char *path, const char *mode); + + Open the gzip (.gz) file at path for reading and decompressing, or + compressing and writing. The mode parameter is as in fopen ("rb" or "wb") + but can also include a compression level ("wb9") or a strategy: 'f' for + filtered data as in "wb6f", 'h' for Huffman-only compression as in "wb1h", + 'R' for run-length encoding as in "wb1R", or 'F' for fixed code compression + as in "wb9F". (See the description of deflateInit2 for more information + about the strategy parameter.) 'T' will request transparent writing or + appending with no compression and not using the gzip format. + + "a" can be used instead of "w" to request that the gzip stream that will + be written be appended to the file. "+" will result in an error, since + reading and writing to the same gzip file is not supported. The addition of + "x" when writing will create the file exclusively, which fails if the file + already exists. On systems that support it, the addition of "e" when + reading or writing will set the flag to close the file on an execve() call. + + These functions, as well as gzip, will read and decode a sequence of gzip + streams in a file. The append function of gzopen() can be used to create + such a file. (Also see gzflush() for another way to do this.) When + appending, gzopen does not test whether the file begins with a gzip stream, + nor does it look for the end of the gzip streams to begin appending. gzopen + will simply append a gzip stream to the existing file. + + gzopen can be used to read a file which is not in gzip format; in this + case gzread will directly read from the file without decompression. When + reading, this will be detected automatically by looking for the magic two- + byte gzip header. + + gzopen returns NULL if the file could not be opened, if there was + insufficient memory to allocate the gzFile state, or if an invalid mode was + specified (an 'r', 'w', or 'a' was not provided, or '+' was provided). + errno can be checked to determine if the reason gzopen failed was that the + file could not be opened. +*/ + +ZEXTERN gzFile ZEXPORT gzdopen(int fd, const char *mode); +/* + Associate a gzFile with the file descriptor fd. File descriptors are + obtained from calls like open, dup, creat, pipe or fileno (if the file has + been previously opened with fopen). The mode parameter is as in gzopen. + + The next call of gzclose on the returned gzFile will also close the file + descriptor fd, just like fclose(fdopen(fd, mode)) closes the file descriptor + fd. If you want to keep fd open, use fd = dup(fd_keep); gz = gzdopen(fd, + mode);. The duplicated descriptor should be saved to avoid a leak, since + gzdopen does not close fd if it fails. If you are using fileno() to get the + file descriptor from a FILE *, then you will have to use dup() to avoid + double-close()ing the file descriptor. Both gzclose() and fclose() will + close the associated file descriptor, so they need to have different file + descriptors. + + gzdopen returns NULL if there was insufficient memory to allocate the + gzFile state, if an invalid mode was specified (an 'r', 'w', or 'a' was not + provided, or '+' was provided), or if fd is -1. The file descriptor is not + used until the next gz* read, write, seek, or close operation, so gzdopen + will not detect if fd is invalid (unless fd is -1). +*/ + +ZEXTERN int ZEXPORT gzbuffer(gzFile file, unsigned size); +/* + Set the internal buffer size used by this library's functions for file to + size. The default buffer size is 8192 bytes. This function must be called + after gzopen() or gzdopen(), and before any other calls that read or write + the file. The buffer memory allocation is always deferred to the first read + or write. Three times that size in buffer space is allocated. A larger + buffer size of, for example, 64K or 128K bytes will noticeably increase the + speed of decompression (reading). + + The new buffer size also affects the maximum length for gzprintf(). + + gzbuffer() returns 0 on success, or -1 on failure, such as being called + too late. +*/ + +ZEXTERN int ZEXPORT gzsetparams(gzFile file, int level, int strategy); +/* + Dynamically update the compression level and strategy for file. See the + description of deflateInit2 for the meaning of these parameters. Previously + provided data is flushed before applying the parameter changes. + + gzsetparams returns Z_OK if success, Z_STREAM_ERROR if the file was not + opened for writing, Z_ERRNO if there is an error writing the flushed data, + or Z_MEM_ERROR if there is a memory allocation error. +*/ + +ZEXTERN int ZEXPORT gzread(gzFile file, voidp buf, unsigned len); +/* + Read and decompress up to len uncompressed bytes from file into buf. If + the input file is not in gzip format, gzread copies the given number of + bytes into the buffer directly from the file. + + After reaching the end of a gzip stream in the input, gzread will continue + to read, looking for another gzip stream. Any number of gzip streams may be + concatenated in the input file, and will all be decompressed by gzread(). + If something other than a gzip stream is encountered after a gzip stream, + that remaining trailing garbage is ignored (and no error is returned). + + gzread can be used to read a gzip file that is being concurrently written. + Upon reaching the end of the input, gzread will return with the available + data. If the error code returned by gzerror is Z_OK or Z_BUF_ERROR, then + gzclearerr can be used to clear the end of file indicator in order to permit + gzread to be tried again. Z_OK indicates that a gzip stream was completed + on the last gzread. Z_BUF_ERROR indicates that the input file ended in the + middle of a gzip stream. Note that gzread does not return -1 in the event + of an incomplete gzip stream. This error is deferred until gzclose(), which + will return Z_BUF_ERROR if the last gzread ended in the middle of a gzip + stream. Alternatively, gzerror can be used before gzclose to detect this + case. + + gzread returns the number of uncompressed bytes actually read, less than + len for end of file, or -1 for error. If len is too large to fit in an int, + then nothing is read, -1 is returned, and the error state is set to + Z_STREAM_ERROR. +*/ + +ZEXTERN z_size_t ZEXPORT gzfread(voidp buf, z_size_t size, z_size_t nitems, + gzFile file); +/* + Read and decompress up to nitems items of size size from file into buf, + otherwise operating as gzread() does. This duplicates the interface of + stdio's fread(), with size_t request and return types. If the library + defines size_t, then z_size_t is identical to size_t. If not, then z_size_t + is an unsigned integer type that can contain a pointer. + + gzfread() returns the number of full items read of size size, or zero if + the end of the file was reached and a full item could not be read, or if + there was an error. gzerror() must be consulted if zero is returned in + order to determine if there was an error. If the multiplication of size and + nitems overflows, i.e. the product does not fit in a z_size_t, then nothing + is read, zero is returned, and the error state is set to Z_STREAM_ERROR. + + In the event that the end of file is reached and only a partial item is + available at the end, i.e. the remaining uncompressed data length is not a + multiple of size, then the final partial item is nevertheless read into buf + and the end-of-file flag is set. The length of the partial item read is not + provided, but could be inferred from the result of gztell(). This behavior + is the same as the behavior of fread() implementations in common libraries, + but it prevents the direct use of gzfread() to read a concurrently written + file, resetting and retrying on end-of-file, when size is not 1. +*/ + +ZEXTERN int ZEXPORT gzwrite(gzFile file, voidpc buf, unsigned len); +/* + Compress and write the len uncompressed bytes at buf to file. gzwrite + returns the number of uncompressed bytes written or 0 in case of error. +*/ + +ZEXTERN z_size_t ZEXPORT gzfwrite(voidpc buf, z_size_t size, + z_size_t nitems, gzFile file); +/* + Compress and write nitems items of size size from buf to file, duplicating + the interface of stdio's fwrite(), with size_t request and return types. If + the library defines size_t, then z_size_t is identical to size_t. If not, + then z_size_t is an unsigned integer type that can contain a pointer. + + gzfwrite() returns the number of full items written of size size, or zero + if there was an error. If the multiplication of size and nitems overflows, + i.e. the product does not fit in a z_size_t, then nothing is written, zero + is returned, and the error state is set to Z_STREAM_ERROR. +*/ + +ZEXTERN int ZEXPORTVA gzprintf(gzFile file, const char *format, ...); +/* + Convert, format, compress, and write the arguments (...) to file under + control of the string format, as in fprintf. gzprintf returns the number of + uncompressed bytes actually written, or a negative zlib error code in case + of error. The number of uncompressed bytes written is limited to 8191, or + one less than the buffer size given to gzbuffer(). The caller should assure + that this limit is not exceeded. If it is exceeded, then gzprintf() will + return an error (0) with nothing written. In this case, there may also be a + buffer overflow with unpredictable consequences, which is possible only if + zlib was compiled with the insecure functions sprintf() or vsprintf(), + because the secure snprintf() or vsnprintf() functions were not available. + This can be determined using zlibCompileFlags(). +*/ + +ZEXTERN int ZEXPORT gzputs(gzFile file, const char *s); +/* + Compress and write the given null-terminated string s to file, excluding + the terminating null character. + + gzputs returns the number of characters written, or -1 in case of error. +*/ + +ZEXTERN char * ZEXPORT gzgets(gzFile file, char *buf, int len); +/* + Read and decompress bytes from file into buf, until len-1 characters are + read, or until a newline character is read and transferred to buf, or an + end-of-file condition is encountered. If any characters are read or if len + is one, the string is terminated with a null character. If no characters + are read due to an end-of-file or len is less than one, then the buffer is + left untouched. + + gzgets returns buf which is a null-terminated string, or it returns NULL + for end-of-file or in case of error. If there was an error, the contents at + buf are indeterminate. +*/ + +ZEXTERN int ZEXPORT gzputc(gzFile file, int c); +/* + Compress and write c, converted to an unsigned char, into file. gzputc + returns the value that was written, or -1 in case of error. +*/ + +ZEXTERN int ZEXPORT gzgetc(gzFile file); +/* + Read and decompress one byte from file. gzgetc returns this byte or -1 + in case of end of file or error. This is implemented as a macro for speed. + As such, it does not do all of the checking the other functions do. I.e. + it does not check to see if file is NULL, nor whether the structure file + points to has been clobbered or not. +*/ + +ZEXTERN int ZEXPORT gzungetc(int c, gzFile file); +/* + Push c back onto the stream for file to be read as the first character on + the next read. At least one character of push-back is always allowed. + gzungetc() returns the character pushed, or -1 on failure. gzungetc() will + fail if c is -1, and may fail if a character has been pushed but not read + yet. If gzungetc is used immediately after gzopen or gzdopen, at least the + output buffer size of pushed characters is allowed. (See gzbuffer above.) + The pushed character will be discarded if the stream is repositioned with + gzseek() or gzrewind(). +*/ + +ZEXTERN int ZEXPORT gzflush(gzFile file, int flush); +/* + Flush all pending output to file. The parameter flush is as in the + deflate() function. The return value is the zlib error number (see function + gzerror below). gzflush is only permitted when writing. + + If the flush parameter is Z_FINISH, the remaining data is written and the + gzip stream is completed in the output. If gzwrite() is called again, a new + gzip stream will be started in the output. gzread() is able to read such + concatenated gzip streams. + + gzflush should be called only when strictly necessary because it will + degrade compression if called too often. +*/ + +/* +ZEXTERN z_off_t ZEXPORT gzseek(gzFile file, + z_off_t offset, int whence); + + Set the starting position to offset relative to whence for the next gzread + or gzwrite on file. The offset represents a number of bytes in the + uncompressed data stream. The whence parameter is defined as in lseek(2); + the value SEEK_END is not supported. + + If the file is opened for reading, this function is emulated but can be + extremely slow. If the file is opened for writing, only forward seeks are + supported; gzseek then compresses a sequence of zeroes up to the new + starting position. + + gzseek returns the resulting offset location as measured in bytes from + the beginning of the uncompressed stream, or -1 in case of error, in + particular if the file is opened for writing and the new starting position + would be before the current position. +*/ + +ZEXTERN int ZEXPORT gzrewind(gzFile file); +/* + Rewind file. This function is supported only for reading. + + gzrewind(file) is equivalent to (int)gzseek(file, 0L, SEEK_SET). +*/ + +/* +ZEXTERN z_off_t ZEXPORT gztell(gzFile file); + + Return the starting position for the next gzread or gzwrite on file. + This position represents a number of bytes in the uncompressed data stream, + and is zero when starting, even if appending or reading a gzip stream from + the middle of a file using gzdopen(). + + gztell(file) is equivalent to gzseek(file, 0L, SEEK_CUR) +*/ + +/* +ZEXTERN z_off_t ZEXPORT gzoffset(gzFile file); + + Return the current compressed (actual) read or write offset of file. This + offset includes the count of bytes that precede the gzip stream, for example + when appending or when using gzdopen() for reading. When reading, the + offset does not include as yet unused buffered input. This information can + be used for a progress indicator. On error, gzoffset() returns -1. +*/ + +ZEXTERN int ZEXPORT gzeof(gzFile file); +/* + Return true (1) if the end-of-file indicator for file has been set while + reading, false (0) otherwise. Note that the end-of-file indicator is set + only if the read tried to go past the end of the input, but came up short. + Therefore, just like feof(), gzeof() may return false even if there is no + more data to read, in the event that the last read request was for the exact + number of bytes remaining in the input file. This will happen if the input + file size is an exact multiple of the buffer size. + + If gzeof() returns true, then the read functions will return no more data, + unless the end-of-file indicator is reset by gzclearerr() and the input file + has grown since the previous end of file was detected. +*/ + +ZEXTERN int ZEXPORT gzdirect(gzFile file); +/* + Return true (1) if file is being copied directly while reading, or false + (0) if file is a gzip stream being decompressed. + + If the input file is empty, gzdirect() will return true, since the input + does not contain a gzip stream. + + If gzdirect() is used immediately after gzopen() or gzdopen() it will + cause buffers to be allocated to allow reading the file to determine if it + is a gzip file. Therefore if gzbuffer() is used, it should be called before + gzdirect(). + + When writing, gzdirect() returns true (1) if transparent writing was + requested ("wT" for the gzopen() mode), or false (0) otherwise. (Note: + gzdirect() is not needed when writing. Transparent writing must be + explicitly requested, so the application already knows the answer. When + linking statically, using gzdirect() will include all of the zlib code for + gzip file reading and decompression, which may not be desired.) +*/ + +ZEXTERN int ZEXPORT gzclose(gzFile file); +/* + Flush all pending output for file, if necessary, close file and + deallocate the (de)compression state. Note that once file is closed, you + cannot call gzerror with file, since its structures have been deallocated. + gzclose must not be called more than once on the same file, just as free + must not be called more than once on the same allocation. + + gzclose will return Z_STREAM_ERROR if file is not valid, Z_ERRNO on a + file operation error, Z_MEM_ERROR if out of memory, Z_BUF_ERROR if the + last read ended in the middle of a gzip stream, or Z_OK on success. +*/ + +ZEXTERN int ZEXPORT gzclose_r(gzFile file); +ZEXTERN int ZEXPORT gzclose_w(gzFile file); +/* + Same as gzclose(), but gzclose_r() is only for use when reading, and + gzclose_w() is only for use when writing or appending. The advantage to + using these instead of gzclose() is that they avoid linking in zlib + compression or decompression code that is not used when only reading or only + writing respectively. If gzclose() is used, then both compression and + decompression code will be included the application when linking to a static + zlib library. +*/ + +ZEXTERN const char * ZEXPORT gzerror(gzFile file, int *errnum); +/* + Return the error message for the last error which occurred on file. + errnum is set to zlib error number. If an error occurred in the file system + and not in the compression library, errnum is set to Z_ERRNO and the + application may consult errno to get the exact error code. + + The application must not modify the returned string. Future calls to + this function may invalidate the previously returned string. If file is + closed, then the string previously returned by gzerror will no longer be + available. + + gzerror() should be used to distinguish errors from end-of-file for those + functions above that do not distinguish those cases in their return values. +*/ + +ZEXTERN void ZEXPORT gzclearerr(gzFile file); +/* + Clear the error and end-of-file flags for file. This is analogous to the + clearerr() function in stdio. This is useful for continuing to read a gzip + file that is being written concurrently. +*/ + +#endif /* !Z_SOLO */ + + /* checksum functions */ + +/* + These functions are not related to compression but are exported + anyway because they might be useful in applications using the compression + library. +*/ + +ZEXTERN uLong ZEXPORT adler32(uLong adler, const Bytef *buf, uInt len); +/* + Update a running Adler-32 checksum with the bytes buf[0..len-1] and + return the updated checksum. An Adler-32 value is in the range of a 32-bit + unsigned integer. If buf is Z_NULL, this function returns the required + initial value for the checksum. + + An Adler-32 checksum is almost as reliable as a CRC-32 but can be computed + much faster. + + Usage example: + + uLong adler = adler32(0L, Z_NULL, 0); + + while (read_buffer(buffer, length) != EOF) { + adler = adler32(adler, buffer, length); + } + if (adler != original_adler) error(); +*/ + +ZEXTERN uLong ZEXPORT adler32_z(uLong adler, const Bytef *buf, + z_size_t len); +/* + Same as adler32(), but with a size_t length. +*/ + +/* +ZEXTERN uLong ZEXPORT adler32_combine(uLong adler1, uLong adler2, + z_off_t len2); + + Combine two Adler-32 checksums into one. For two sequences of bytes, seq1 + and seq2 with lengths len1 and len2, Adler-32 checksums were calculated for + each, adler1 and adler2. adler32_combine() returns the Adler-32 checksum of + seq1 and seq2 concatenated, requiring only adler1, adler2, and len2. Note + that the z_off_t type (like off_t) is a signed integer. If len2 is + negative, the result has no meaning or utility. +*/ + +ZEXTERN uLong ZEXPORT crc32(uLong crc, const Bytef *buf, uInt len); +/* + Update a running CRC-32 with the bytes buf[0..len-1] and return the + updated CRC-32. A CRC-32 value is in the range of a 32-bit unsigned integer. + If buf is Z_NULL, this function returns the required initial value for the + crc. Pre- and post-conditioning (one's complement) is performed within this + function so it shouldn't be done by the application. + + Usage example: + + uLong crc = crc32(0L, Z_NULL, 0); + + while (read_buffer(buffer, length) != EOF) { + crc = crc32(crc, buffer, length); + } + if (crc != original_crc) error(); +*/ + +ZEXTERN uLong ZEXPORT crc32_z(uLong crc, const Bytef *buf, + z_size_t len); +/* + Same as crc32(), but with a size_t length. +*/ + +/* +ZEXTERN uLong ZEXPORT crc32_combine(uLong crc1, uLong crc2, z_off_t len2); + + Combine two CRC-32 check values into one. For two sequences of bytes, + seq1 and seq2 with lengths len1 and len2, CRC-32 check values were + calculated for each, crc1 and crc2. crc32_combine() returns the CRC-32 + check value of seq1 and seq2 concatenated, requiring only crc1, crc2, and + len2. len2 must be non-negative. +*/ + +/* +ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t len2); + + Return the operator corresponding to length len2, to be used with + crc32_combine_op(). len2 must be non-negative. +*/ + +ZEXTERN uLong ZEXPORT crc32_combine_op(uLong crc1, uLong crc2, uLong op); +/* + Give the same result as crc32_combine(), using op in place of len2. op is + is generated from len2 by crc32_combine_gen(). This will be faster than + crc32_combine() if the generated op is used more than once. +*/ + + + /* various hacks, don't look :) */ + +/* deflateInit and inflateInit are macros to allow checking the zlib version + * and the compiler's view of z_stream: + */ +ZEXTERN int ZEXPORT deflateInit_(z_streamp strm, int level, + const char *version, int stream_size); +ZEXTERN int ZEXPORT inflateInit_(z_streamp strm, + const char *version, int stream_size); +ZEXTERN int ZEXPORT deflateInit2_(z_streamp strm, int level, int method, + int windowBits, int memLevel, + int strategy, const char *version, + int stream_size); +ZEXTERN int ZEXPORT inflateInit2_(z_streamp strm, int windowBits, + const char *version, int stream_size); +ZEXTERN int ZEXPORT inflateBackInit_(z_streamp strm, int windowBits, + unsigned char FAR *window, + const char *version, + int stream_size); +#ifdef Z_PREFIX_SET +# define z_deflateInit(strm, level) \ + deflateInit_((strm), (level), ZLIB_VERSION, (int)sizeof(z_stream)) +# define z_inflateInit(strm) \ + inflateInit_((strm), ZLIB_VERSION, (int)sizeof(z_stream)) +# define z_deflateInit2(strm, level, method, windowBits, memLevel, strategy) \ + deflateInit2_((strm),(level),(method),(windowBits),(memLevel),\ + (strategy), ZLIB_VERSION, (int)sizeof(z_stream)) +# define z_inflateInit2(strm, windowBits) \ + inflateInit2_((strm), (windowBits), ZLIB_VERSION, \ + (int)sizeof(z_stream)) +# define z_inflateBackInit(strm, windowBits, window) \ + inflateBackInit_((strm), (windowBits), (window), \ + ZLIB_VERSION, (int)sizeof(z_stream)) +#else +# define deflateInit(strm, level) \ + deflateInit_((strm), (level), ZLIB_VERSION, (int)sizeof(z_stream)) +# define inflateInit(strm) \ + inflateInit_((strm), ZLIB_VERSION, (int)sizeof(z_stream)) +# define deflateInit2(strm, level, method, windowBits, memLevel, strategy) \ + deflateInit2_((strm),(level),(method),(windowBits),(memLevel),\ + (strategy), ZLIB_VERSION, (int)sizeof(z_stream)) +# define inflateInit2(strm, windowBits) \ + inflateInit2_((strm), (windowBits), ZLIB_VERSION, \ + (int)sizeof(z_stream)) +# define inflateBackInit(strm, windowBits, window) \ + inflateBackInit_((strm), (windowBits), (window), \ + ZLIB_VERSION, (int)sizeof(z_stream)) +#endif + +#ifndef Z_SOLO + +/* gzgetc() macro and its supporting function and exposed data structure. Note + * that the real internal state is much larger than the exposed structure. + * This abbreviated structure exposes just enough for the gzgetc() macro. The + * user should not mess with these exposed elements, since their names or + * behavior could change in the future, perhaps even capriciously. They can + * only be used by the gzgetc() macro. You have been warned. + */ +struct gzFile_s { + unsigned have; + unsigned char *next; + z_off64_t pos; +}; +ZEXTERN int ZEXPORT gzgetc_(gzFile file); /* backward compatibility */ +#ifdef Z_PREFIX_SET +# undef z_gzgetc +# define z_gzgetc(g) \ + ((g)->have ? ((g)->have--, (g)->pos++, *((g)->next)++) : (gzgetc)(g)) +#else +# define gzgetc(g) \ + ((g)->have ? ((g)->have--, (g)->pos++, *((g)->next)++) : (gzgetc)(g)) +#endif + +/* provide 64-bit offset functions if _LARGEFILE64_SOURCE defined, and/or + * change the regular functions to 64 bits if _FILE_OFFSET_BITS is 64 (if + * both are true, the application gets the *64 functions, and the regular + * functions are changed to 64 bits) -- in case these are set on systems + * without large file support, _LFS64_LARGEFILE must also be true + */ +#ifdef Z_LARGE64 + ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *); + ZEXTERN z_off64_t ZEXPORT gzseek64(gzFile, z_off64_t, int); + ZEXTERN z_off64_t ZEXPORT gztell64(gzFile); + ZEXTERN z_off64_t ZEXPORT gzoffset64(gzFile); + ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off64_t); + ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off64_t); + ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off64_t); +#endif + +#if !defined(ZLIB_INTERNAL) && defined(Z_WANT64) +# ifdef Z_PREFIX_SET +# define z_gzopen z_gzopen64 +# define z_gzseek z_gzseek64 +# define z_gztell z_gztell64 +# define z_gzoffset z_gzoffset64 +# define z_adler32_combine z_adler32_combine64 +# define z_crc32_combine z_crc32_combine64 +# define z_crc32_combine_gen z_crc32_combine_gen64 +# else +# define gzopen gzopen64 +# define gzseek gzseek64 +# define gztell gztell64 +# define gzoffset gzoffset64 +# define adler32_combine adler32_combine64 +# define crc32_combine crc32_combine64 +# define crc32_combine_gen crc32_combine_gen64 +# endif +# ifndef Z_LARGE64 + ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *); + ZEXTERN z_off_t ZEXPORT gzseek64(gzFile, z_off_t, int); + ZEXTERN z_off_t ZEXPORT gztell64(gzFile); + ZEXTERN z_off_t ZEXPORT gzoffset64(gzFile); + ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off_t); + ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off_t); + ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off_t); +# endif +#else + ZEXTERN gzFile ZEXPORT gzopen(const char *, const char *); + ZEXTERN z_off_t ZEXPORT gzseek(gzFile, z_off_t, int); + ZEXTERN z_off_t ZEXPORT gztell(gzFile); + ZEXTERN z_off_t ZEXPORT gzoffset(gzFile); + ZEXTERN uLong ZEXPORT adler32_combine(uLong, uLong, z_off_t); + ZEXTERN uLong ZEXPORT crc32_combine(uLong, uLong, z_off_t); + ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t); +#endif + +#else /* Z_SOLO */ + + ZEXTERN uLong ZEXPORT adler32_combine(uLong, uLong, z_off_t); + ZEXTERN uLong ZEXPORT crc32_combine(uLong, uLong, z_off_t); + ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t); + +#endif /* !Z_SOLO */ + +/* undocumented functions */ +ZEXTERN const char * ZEXPORT zError(int); +ZEXTERN int ZEXPORT inflateSyncPoint(z_streamp); +ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table(void); +ZEXTERN int ZEXPORT inflateUndermine(z_streamp, int); +ZEXTERN int ZEXPORT inflateValidate(z_streamp, int); +ZEXTERN unsigned long ZEXPORT inflateCodesUsed(z_streamp); +ZEXTERN int ZEXPORT inflateResetKeep(z_streamp); +ZEXTERN int ZEXPORT deflateResetKeep(z_streamp); +#if defined(_WIN32) && !defined(Z_SOLO) +ZEXTERN gzFile ZEXPORT gzopen_w(const wchar_t *path, + const char *mode); +#endif +#if defined(STDC) || defined(Z_HAVE_STDARG_H) +# ifndef Z_SOLO +ZEXTERN int ZEXPORTVA gzvprintf(gzFile file, + const char *format, + va_list va); +# endif +#endif + +#ifdef __cplusplus +} +#endif + +#endif /* ZLIB_H */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/__init__.py b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/__pycache__/__init__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4583963d55cb473059920be34651ec3b64248a2c Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/__pycache__/__init__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exslt.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exslt.h new file mode 100644 index 0000000000000000000000000000000000000000..dfbd09befae13448fae876f297f3ab32b7e27954 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exslt.h @@ -0,0 +1,108 @@ +/* + * Summary: main header file + * + * Copy: See Copyright for the status of this software. + */ + + +#ifndef __EXSLT_H__ +#define __EXSLT_H__ + +#include +#include +#include "exsltexports.h" +#include + +#ifdef __cplusplus +extern "C" { +#endif + +EXSLTPUBVAR const char *exsltLibraryVersion; +EXSLTPUBVAR const int exsltLibexsltVersion; +EXSLTPUBVAR const int exsltLibxsltVersion; +EXSLTPUBVAR const int exsltLibxmlVersion; + +/** + * EXSLT_COMMON_NAMESPACE: + * + * Namespace for EXSLT common functions + */ +#define EXSLT_COMMON_NAMESPACE ((const xmlChar *) "http://exslt.org/common") +/** + * EXSLT_CRYPTO_NAMESPACE: + * + * Namespace for EXSLT crypto functions + */ +#define EXSLT_CRYPTO_NAMESPACE ((const xmlChar *) "http://exslt.org/crypto") +/** + * EXSLT_MATH_NAMESPACE: + * + * Namespace for EXSLT math functions + */ +#define EXSLT_MATH_NAMESPACE ((const xmlChar *) "http://exslt.org/math") +/** + * EXSLT_SETS_NAMESPACE: + * + * Namespace for EXSLT set functions + */ +#define EXSLT_SETS_NAMESPACE ((const xmlChar *) "http://exslt.org/sets") +/** + * EXSLT_FUNCTIONS_NAMESPACE: + * + * Namespace for EXSLT functions extension functions + */ +#define EXSLT_FUNCTIONS_NAMESPACE ((const xmlChar *) "http://exslt.org/functions") +/** + * EXSLT_STRINGS_NAMESPACE: + * + * Namespace for EXSLT strings functions + */ +#define EXSLT_STRINGS_NAMESPACE ((const xmlChar *) "http://exslt.org/strings") +/** + * EXSLT_DATE_NAMESPACE: + * + * Namespace for EXSLT date functions + */ +#define EXSLT_DATE_NAMESPACE ((const xmlChar *) "http://exslt.org/dates-and-times") +/** + * EXSLT_DYNAMIC_NAMESPACE: + * + * Namespace for EXSLT dynamic functions + */ +#define EXSLT_DYNAMIC_NAMESPACE ((const xmlChar *) "http://exslt.org/dynamic") + +/** + * SAXON_NAMESPACE: + * + * Namespace for SAXON extensions functions + */ +#define SAXON_NAMESPACE ((const xmlChar *) "http://icl.com/saxon") + +EXSLTPUBFUN void EXSLTCALL exsltCommonRegister (void); +#ifdef EXSLT_CRYPTO_ENABLED +EXSLTPUBFUN void EXSLTCALL exsltCryptoRegister (void); +#endif +EXSLTPUBFUN void EXSLTCALL exsltMathRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltSetsRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltFuncRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltStrRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltDateRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltSaxonRegister (void); +EXSLTPUBFUN void EXSLTCALL exsltDynRegister(void); + +EXSLTPUBFUN void EXSLTCALL exsltRegisterAll (void); + +EXSLTPUBFUN int EXSLTCALL exsltDateXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +EXSLTPUBFUN int EXSLTCALL exsltMathXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +EXSLTPUBFUN int EXSLTCALL exsltSetsXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +EXSLTPUBFUN int EXSLTCALL exsltStrXpathCtxtRegister (xmlXPathContextPtr ctxt, + const xmlChar *prefix); + +#ifdef __cplusplus +} +#endif +#endif /* __EXSLT_H__ */ + diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exsltconfig.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exsltconfig.h new file mode 100644 index 0000000000000000000000000000000000000000..9ae82143143637b0b70262146de89e60b7064c7f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exsltconfig.h @@ -0,0 +1,70 @@ +/* + * exsltconfig.h: compile-time version information for the EXSLT library + * + * See Copyright for the status of this software. + * + * daniel@veillard.com + */ + +#ifndef __XML_EXSLTCONFIG_H__ +#define __XML_EXSLTCONFIG_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * LIBEXSLT_DOTTED_VERSION: + * + * the version string like "1.2.3" + */ +#define LIBEXSLT_DOTTED_VERSION "0.8.21" + +/** + * LIBEXSLT_VERSION: + * + * the version number: 1.2.3 value is 10203 + */ +#define LIBEXSLT_VERSION 821 + +/** + * LIBEXSLT_VERSION_STRING: + * + * the version number string, 1.2.3 value is "10203" + */ +#define LIBEXSLT_VERSION_STRING "821" + +/** + * LIBEXSLT_VERSION_EXTRA: + * + * extra version information, used to show a Git commit description + */ +#define LIBEXSLT_VERSION_EXTRA "" + +/** + * WITH_CRYPTO: + * + * Whether crypto support is configured into exslt + */ +#if 0 +#define EXSLT_CRYPTO_ENABLED +#endif + +/** + * ATTRIBUTE_UNUSED: + * + * This macro is used to flag unused function parameters to GCC + */ +#ifdef __GNUC__ +#ifndef ATTRIBUTE_UNUSED +#define ATTRIBUTE_UNUSED __attribute__((unused)) +#endif +#else +#define ATTRIBUTE_UNUSED +#endif + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_EXSLTCONFIG_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exsltexports.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exsltexports.h new file mode 100644 index 0000000000000000000000000000000000000000..ee79ec7aea0feff6988cbfa08b716498760f409e --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libexslt/exsltexports.h @@ -0,0 +1,63 @@ +/* + * Summary: macros for marking symbols as exportable/importable. + * + * Copy: See Copyright for the status of this software. + */ + +#ifndef __EXSLT_EXPORTS_H__ +#define __EXSLT_EXPORTS_H__ + +#if defined(_WIN32) || defined(__CYGWIN__) +/** DOC_DISABLE */ + +#ifdef LIBEXSLT_STATIC + #define EXSLTPUBLIC +#elif defined(IN_LIBEXSLT) + #define EXSLTPUBLIC __declspec(dllexport) +#else + #define EXSLTPUBLIC __declspec(dllimport) +#endif + +#define EXSLTCALL __cdecl + +/** DOC_ENABLE */ +#else /* not Windows */ + +/** + * EXSLTPUBLIC: + * + * Macro which declares a public symbol + */ +#define EXSLTPUBLIC + +/** + * EXSLTCALL: + * + * Macro which declares the calling convention for exported functions + */ +#define EXSLTCALL + +#endif /* platform switch */ + +/* + * EXSLTPUBFUN: + * + * Macro which declares an exportable function + */ +#define EXSLTPUBFUN EXSLTPUBLIC + +/** + * EXSLTPUBVAR: + * + * Macro which declares an exportable variable + */ +#define EXSLTPUBVAR EXSLTPUBLIC extern + +/* Compatibility */ +#if !defined(LIBEXSLT_PUBLIC) +#define LIBEXSLT_PUBLIC EXSLTPUBVAR +#endif + +#endif /* __EXSLT_EXPORTS_H__ */ + + diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/SAX.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/SAX.h new file mode 100644 index 0000000000000000000000000000000000000000..eea1057bfcc533cd2309a4de73767f004c97380f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/SAX.h @@ -0,0 +1,202 @@ +/* + * Summary: Old SAX version 1 handler, deprecated + * Description: DEPRECATED set of SAX version 1 interfaces used to + * build the DOM tree. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SAX_H__ +#define __XML_SAX_H__ + +#include +#include + +#ifdef LIBXML_LEGACY_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif +XML_DEPRECATED +XMLPUBFUN const xmlChar * + getPublicId (void *ctx); +XML_DEPRECATED +XMLPUBFUN const xmlChar * + getSystemId (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + setDocumentLocator (void *ctx, + xmlSAXLocatorPtr loc); + +XML_DEPRECATED +XMLPUBFUN int + getLineNumber (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + getColumnNumber (void *ctx); + +XML_DEPRECATED +XMLPUBFUN int + isStandalone (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + hasInternalSubset (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + hasExternalSubset (void *ctx); + +XML_DEPRECATED +XMLPUBFUN void + internalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XML_DEPRECATED +XMLPUBFUN void + externalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XML_DEPRECATED +XMLPUBFUN xmlEntityPtr + getEntity (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN xmlEntityPtr + getParameterEntity (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN xmlParserInputPtr + resolveEntity (void *ctx, + const xmlChar *publicId, + const xmlChar *systemId); + +XML_DEPRECATED +XMLPUBFUN void + entityDecl (void *ctx, + const xmlChar *name, + int type, + const xmlChar *publicId, + const xmlChar *systemId, + xmlChar *content); +XML_DEPRECATED +XMLPUBFUN void + attributeDecl (void *ctx, + const xmlChar *elem, + const xmlChar *fullname, + int type, + int def, + const xmlChar *defaultValue, + xmlEnumerationPtr tree); +XML_DEPRECATED +XMLPUBFUN void + elementDecl (void *ctx, + const xmlChar *name, + int type, + xmlElementContentPtr content); +XML_DEPRECATED +XMLPUBFUN void + notationDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId); +XML_DEPRECATED +XMLPUBFUN void + unparsedEntityDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId, + const xmlChar *notationName); + +XML_DEPRECATED +XMLPUBFUN void + startDocument (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + endDocument (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + attribute (void *ctx, + const xmlChar *fullname, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN void + startElement (void *ctx, + const xmlChar *fullname, + const xmlChar **atts); +XML_DEPRECATED +XMLPUBFUN void + endElement (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN void + reference (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN void + characters (void *ctx, + const xmlChar *ch, + int len); +XML_DEPRECATED +XMLPUBFUN void + ignorableWhitespace (void *ctx, + const xmlChar *ch, + int len); +XML_DEPRECATED +XMLPUBFUN void + processingInstruction (void *ctx, + const xmlChar *target, + const xmlChar *data); +XML_DEPRECATED +XMLPUBFUN void + globalNamespace (void *ctx, + const xmlChar *href, + const xmlChar *prefix); +XML_DEPRECATED +XMLPUBFUN void + setNamespace (void *ctx, + const xmlChar *name); +XML_DEPRECATED +XMLPUBFUN xmlNsPtr + getNamespace (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + checkNamespace (void *ctx, + xmlChar *nameSpace); +XML_DEPRECATED +XMLPUBFUN void + namespaceDecl (void *ctx, + const xmlChar *href, + const xmlChar *prefix); +XML_DEPRECATED +XMLPUBFUN void + comment (void *ctx, + const xmlChar *value); +XML_DEPRECATED +XMLPUBFUN void + cdataBlock (void *ctx, + const xmlChar *value, + int len); + +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN void + initxmlDefaultSAXHandler (xmlSAXHandlerV1 *hdlr, + int warning); +#ifdef LIBXML_HTML_ENABLED +XML_DEPRECATED +XMLPUBFUN void + inithtmlDefaultSAXHandler (xmlSAXHandlerV1 *hdlr); +#endif +#endif /* LIBXML_SAX1_ENABLED */ + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_LEGACY_ENABLED */ + +#endif /* __XML_SAX_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/SAX2.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/SAX2.h new file mode 100644 index 0000000000000000000000000000000000000000..4c4ecce8e598b2bb2011f9daf6367bcc8f028b88 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/SAX2.h @@ -0,0 +1,171 @@ +/* + * Summary: SAX2 parser interface used to build the DOM tree + * Description: those are the default SAX2 interfaces used by + * the library when building DOM tree. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SAX2_H__ +#define __XML_SAX2_H__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif +XMLPUBFUN const xmlChar * + xmlSAX2GetPublicId (void *ctx); +XMLPUBFUN const xmlChar * + xmlSAX2GetSystemId (void *ctx); +XMLPUBFUN void + xmlSAX2SetDocumentLocator (void *ctx, + xmlSAXLocatorPtr loc); + +XMLPUBFUN int + xmlSAX2GetLineNumber (void *ctx); +XMLPUBFUN int + xmlSAX2GetColumnNumber (void *ctx); + +XMLPUBFUN int + xmlSAX2IsStandalone (void *ctx); +XMLPUBFUN int + xmlSAX2HasInternalSubset (void *ctx); +XMLPUBFUN int + xmlSAX2HasExternalSubset (void *ctx); + +XMLPUBFUN void + xmlSAX2InternalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN void + xmlSAX2ExternalSubset (void *ctx, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlEntityPtr + xmlSAX2GetEntity (void *ctx, + const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlSAX2GetParameterEntity (void *ctx, + const xmlChar *name); +XMLPUBFUN xmlParserInputPtr + xmlSAX2ResolveEntity (void *ctx, + const xmlChar *publicId, + const xmlChar *systemId); + +XMLPUBFUN void + xmlSAX2EntityDecl (void *ctx, + const xmlChar *name, + int type, + const xmlChar *publicId, + const xmlChar *systemId, + xmlChar *content); +XMLPUBFUN void + xmlSAX2AttributeDecl (void *ctx, + const xmlChar *elem, + const xmlChar *fullname, + int type, + int def, + const xmlChar *defaultValue, + xmlEnumerationPtr tree); +XMLPUBFUN void + xmlSAX2ElementDecl (void *ctx, + const xmlChar *name, + int type, + xmlElementContentPtr content); +XMLPUBFUN void + xmlSAX2NotationDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId); +XMLPUBFUN void + xmlSAX2UnparsedEntityDecl (void *ctx, + const xmlChar *name, + const xmlChar *publicId, + const xmlChar *systemId, + const xmlChar *notationName); + +XMLPUBFUN void + xmlSAX2StartDocument (void *ctx); +XMLPUBFUN void + xmlSAX2EndDocument (void *ctx); +#if defined(LIBXML_SAX1_ENABLED) || defined(LIBXML_HTML_ENABLED) || \ + defined(LIBXML_WRITER_ENABLED) || defined(LIBXML_LEGACY_ENABLED) +XMLPUBFUN void + xmlSAX2StartElement (void *ctx, + const xmlChar *fullname, + const xmlChar **atts); +XMLPUBFUN void + xmlSAX2EndElement (void *ctx, + const xmlChar *name); +#endif /* LIBXML_SAX1_ENABLED or LIBXML_HTML_ENABLED or LIBXML_LEGACY_ENABLED */ +XMLPUBFUN void + xmlSAX2StartElementNs (void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *URI, + int nb_namespaces, + const xmlChar **namespaces, + int nb_attributes, + int nb_defaulted, + const xmlChar **attributes); +XMLPUBFUN void + xmlSAX2EndElementNs (void *ctx, + const xmlChar *localname, + const xmlChar *prefix, + const xmlChar *URI); +XMLPUBFUN void + xmlSAX2Reference (void *ctx, + const xmlChar *name); +XMLPUBFUN void + xmlSAX2Characters (void *ctx, + const xmlChar *ch, + int len); +XMLPUBFUN void + xmlSAX2IgnorableWhitespace (void *ctx, + const xmlChar *ch, + int len); +XMLPUBFUN void + xmlSAX2ProcessingInstruction (void *ctx, + const xmlChar *target, + const xmlChar *data); +XMLPUBFUN void + xmlSAX2Comment (void *ctx, + const xmlChar *value); +XMLPUBFUN void + xmlSAX2CDataBlock (void *ctx, + const xmlChar *value, + int len); + +#ifdef LIBXML_SAX1_ENABLED +XML_DEPRECATED +XMLPUBFUN int + xmlSAXDefaultVersion (int version); +#endif /* LIBXML_SAX1_ENABLED */ + +XMLPUBFUN int + xmlSAXVersion (xmlSAXHandler *hdlr, + int version); +XMLPUBFUN void + xmlSAX2InitDefaultSAXHandler (xmlSAXHandler *hdlr, + int warning); +#ifdef LIBXML_HTML_ENABLED +XMLPUBFUN void + xmlSAX2InitHtmlDefaultSAXHandler(xmlSAXHandler *hdlr); +XML_DEPRECATED +XMLPUBFUN void + htmlDefaultSAXHandlerInit (void); +#endif +XML_DEPRECATED +XMLPUBFUN void + xmlDefaultSAXHandlerInit (void); +#ifdef __cplusplus +} +#endif +#endif /* __XML_SAX2_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/__init__.py b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/__pycache__/__init__.cpython-310.pyc b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41776b24aec9e94c3d3b6d31c9896e38c2d4ce6f Binary files /dev/null and b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/__pycache__/__init__.cpython-310.pyc differ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/c14n.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/c14n.h new file mode 100644 index 0000000000000000000000000000000000000000..f9bdf9b595819eca5deed6cfc2ec07028e9763c2 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/c14n.h @@ -0,0 +1,126 @@ +/* + * Summary: Provide Canonical XML and Exclusive XML Canonicalization + * Description: the c14n modules provides a + * + * "Canonical XML" implementation + * http://www.w3.org/TR/xml-c14n + * + * and an + * + * "Exclusive XML Canonicalization" implementation + * http://www.w3.org/TR/xml-exc-c14n + + * Copy: See Copyright for the status of this software. + * + * Author: Aleksey Sanin + */ +#ifndef __XML_C14N_H__ +#define __XML_C14N_H__ + +#include + +#ifdef LIBXML_C14N_ENABLED + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +/* + * XML Canonicalization + * http://www.w3.org/TR/xml-c14n + * + * Exclusive XML Canonicalization + * http://www.w3.org/TR/xml-exc-c14n + * + * Canonical form of an XML document could be created if and only if + * a) default attributes (if any) are added to all nodes + * b) all character and parsed entity references are resolved + * In order to achieve this in libxml2 the document MUST be loaded with + * following global settings: + * + * xmlLoadExtDtdDefaultValue = XML_DETECT_IDS | XML_COMPLETE_ATTRS; + * xmlSubstituteEntitiesDefault(1); + * + * or corresponding parser context setting: + * xmlParserCtxtPtr ctxt; + * + * ... + * ctxt->loadsubset = XML_DETECT_IDS | XML_COMPLETE_ATTRS; + * ctxt->replaceEntities = 1; + * ... + */ + +/* + * xmlC14NMode: + * + * Predefined values for C14N modes + * + */ +typedef enum { + XML_C14N_1_0 = 0, /* Original C14N 1.0 spec */ + XML_C14N_EXCLUSIVE_1_0 = 1, /* Exclusive C14N 1.0 spec */ + XML_C14N_1_1 = 2 /* C14N 1.1 spec */ +} xmlC14NMode; + +XMLPUBFUN int + xmlC14NDocSaveTo (xmlDocPtr doc, + xmlNodeSetPtr nodes, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + xmlOutputBufferPtr buf); + +XMLPUBFUN int + xmlC14NDocDumpMemory (xmlDocPtr doc, + xmlNodeSetPtr nodes, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + xmlChar **doc_txt_ptr); + +XMLPUBFUN int + xmlC14NDocSave (xmlDocPtr doc, + xmlNodeSetPtr nodes, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + const char* filename, + int compression); + + +/** + * This is the core C14N function + */ +/** + * xmlC14NIsVisibleCallback: + * @user_data: user data + * @node: the current node + * @parent: the parent node + * + * Signature for a C14N callback on visible nodes + * + * Returns 1 if the node should be included + */ +typedef int (*xmlC14NIsVisibleCallback) (void* user_data, + xmlNodePtr node, + xmlNodePtr parent); + +XMLPUBFUN int + xmlC14NExecute (xmlDocPtr doc, + xmlC14NIsVisibleCallback is_visible_callback, + void* user_data, + int mode, /* a xmlC14NMode */ + xmlChar **inclusive_ns_prefixes, + int with_comments, + xmlOutputBufferPtr buf); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* LIBXML_C14N_ENABLED */ +#endif /* __XML_C14N_H__ */ + diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/catalog.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/catalog.h new file mode 100644 index 0000000000000000000000000000000000000000..02fa7ab2a03a95563cf4ccdf7b644c95af1b15c2 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/catalog.h @@ -0,0 +1,182 @@ +/** + * Summary: interfaces to the Catalog handling system + * Description: the catalog module implements the support for + * XML Catalogs and SGML catalogs + * + * SGML Open Technical Resolution TR9401:1997. + * http://www.jclark.com/sp/catalog.htm + * + * XML Catalogs Working Draft 06 August 2001 + * http://www.oasis-open.org/committees/entity/spec-2001-08-06.html + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_CATALOG_H__ +#define __XML_CATALOG_H__ + +#include + +#include +#include +#include + +#ifdef LIBXML_CATALOG_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XML_CATALOGS_NAMESPACE: + * + * The namespace for the XML Catalogs elements. + */ +#define XML_CATALOGS_NAMESPACE \ + (const xmlChar *) "urn:oasis:names:tc:entity:xmlns:xml:catalog" +/** + * XML_CATALOG_PI: + * + * The specific XML Catalog Processing Instruction name. + */ +#define XML_CATALOG_PI \ + (const xmlChar *) "oasis-xml-catalog" + +/* + * The API is voluntarily limited to general cataloging. + */ +typedef enum { + XML_CATA_PREFER_NONE = 0, + XML_CATA_PREFER_PUBLIC = 1, + XML_CATA_PREFER_SYSTEM +} xmlCatalogPrefer; + +typedef enum { + XML_CATA_ALLOW_NONE = 0, + XML_CATA_ALLOW_GLOBAL = 1, + XML_CATA_ALLOW_DOCUMENT = 2, + XML_CATA_ALLOW_ALL = 3 +} xmlCatalogAllow; + +typedef struct _xmlCatalog xmlCatalog; +typedef xmlCatalog *xmlCatalogPtr; + +/* + * Operations on a given catalog. + */ +XMLPUBFUN xmlCatalogPtr + xmlNewCatalog (int sgml); +XMLPUBFUN xmlCatalogPtr + xmlLoadACatalog (const char *filename); +XMLPUBFUN xmlCatalogPtr + xmlLoadSGMLSuperCatalog (const char *filename); +XMLPUBFUN int + xmlConvertSGMLCatalog (xmlCatalogPtr catal); +XMLPUBFUN int + xmlACatalogAdd (xmlCatalogPtr catal, + const xmlChar *type, + const xmlChar *orig, + const xmlChar *replace); +XMLPUBFUN int + xmlACatalogRemove (xmlCatalogPtr catal, + const xmlChar *value); +XMLPUBFUN xmlChar * + xmlACatalogResolve (xmlCatalogPtr catal, + const xmlChar *pubID, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlACatalogResolveSystem(xmlCatalogPtr catal, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlACatalogResolvePublic(xmlCatalogPtr catal, + const xmlChar *pubID); +XMLPUBFUN xmlChar * + xmlACatalogResolveURI (xmlCatalogPtr catal, + const xmlChar *URI); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlACatalogDump (xmlCatalogPtr catal, + FILE *out); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN void + xmlFreeCatalog (xmlCatalogPtr catal); +XMLPUBFUN int + xmlCatalogIsEmpty (xmlCatalogPtr catal); + +/* + * Global operations. + */ +XMLPUBFUN void + xmlInitializeCatalog (void); +XMLPUBFUN int + xmlLoadCatalog (const char *filename); +XMLPUBFUN void + xmlLoadCatalogs (const char *paths); +XMLPUBFUN void + xmlCatalogCleanup (void); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlCatalogDump (FILE *out); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN xmlChar * + xmlCatalogResolve (const xmlChar *pubID, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlCatalogResolveSystem (const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlCatalogResolvePublic (const xmlChar *pubID); +XMLPUBFUN xmlChar * + xmlCatalogResolveURI (const xmlChar *URI); +XMLPUBFUN int + xmlCatalogAdd (const xmlChar *type, + const xmlChar *orig, + const xmlChar *replace); +XMLPUBFUN int + xmlCatalogRemove (const xmlChar *value); +XMLPUBFUN xmlDocPtr + xmlParseCatalogFile (const char *filename); +XMLPUBFUN int + xmlCatalogConvert (void); + +/* + * Strictly minimal interfaces for per-document catalogs used + * by the parser. + */ +XMLPUBFUN void + xmlCatalogFreeLocal (void *catalogs); +XMLPUBFUN void * + xmlCatalogAddLocal (void *catalogs, + const xmlChar *URL); +XMLPUBFUN xmlChar * + xmlCatalogLocalResolve (void *catalogs, + const xmlChar *pubID, + const xmlChar *sysID); +XMLPUBFUN xmlChar * + xmlCatalogLocalResolveURI(void *catalogs, + const xmlChar *URI); +/* + * Preference settings. + */ +XMLPUBFUN int + xmlCatalogSetDebug (int level); +XMLPUBFUN xmlCatalogPrefer + xmlCatalogSetDefaultPrefer(xmlCatalogPrefer prefer); +XMLPUBFUN void + xmlCatalogSetDefaults (xmlCatalogAllow allow); +XMLPUBFUN xmlCatalogAllow + xmlCatalogGetDefaults (void); + + +/* DEPRECATED interfaces */ +XMLPUBFUN const xmlChar * + xmlCatalogGetSystem (const xmlChar *sysID); +XMLPUBFUN const xmlChar * + xmlCatalogGetPublic (const xmlChar *pubID); + +#ifdef __cplusplus +} +#endif +#endif /* LIBXML_CATALOG_ENABLED */ +#endif /* __XML_CATALOG_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/chvalid.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/chvalid.h new file mode 100644 index 0000000000000000000000000000000000000000..8225c95ee865f9639f46bd22db54932f257df52c --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/chvalid.h @@ -0,0 +1,230 @@ +/* + * Summary: Unicode character range checking + * Description: this module exports interfaces for the character + * range validation APIs + * + * This file is automatically generated from the cvs source + * definition files using the genChRanges.py Python script + * + * Generation date: Mon Mar 27 11:09:48 2006 + * Sources: chvalid.def + * Author: William Brack + */ + +#ifndef __XML_CHVALID_H__ +#define __XML_CHVALID_H__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Define our typedefs and structures + * + */ +typedef struct _xmlChSRange xmlChSRange; +typedef xmlChSRange *xmlChSRangePtr; +struct _xmlChSRange { + unsigned short low; + unsigned short high; +}; + +typedef struct _xmlChLRange xmlChLRange; +typedef xmlChLRange *xmlChLRangePtr; +struct _xmlChLRange { + unsigned int low; + unsigned int high; +}; + +typedef struct _xmlChRangeGroup xmlChRangeGroup; +typedef xmlChRangeGroup *xmlChRangeGroupPtr; +struct _xmlChRangeGroup { + int nbShortRange; + int nbLongRange; + const xmlChSRange *shortRange; /* points to an array of ranges */ + const xmlChLRange *longRange; +}; + +/** + * Range checking routine + */ +XMLPUBFUN int + xmlCharInRange(unsigned int val, const xmlChRangeGroup *group); + + +/** + * xmlIsBaseChar_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBaseChar_ch(c) (((0x41 <= (c)) && ((c) <= 0x5a)) || \ + ((0x61 <= (c)) && ((c) <= 0x7a)) || \ + ((0xc0 <= (c)) && ((c) <= 0xd6)) || \ + ((0xd8 <= (c)) && ((c) <= 0xf6)) || \ + (0xf8 <= (c))) + +/** + * xmlIsBaseCharQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBaseCharQ(c) (((c) < 0x100) ? \ + xmlIsBaseChar_ch((c)) : \ + xmlCharInRange((c), &xmlIsBaseCharGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsBaseCharGroup; + +/** + * xmlIsBlank_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBlank_ch(c) (((c) == 0x20) || \ + ((0x9 <= (c)) && ((c) <= 0xa)) || \ + ((c) == 0xd)) + +/** + * xmlIsBlankQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsBlankQ(c) (((c) < 0x100) ? \ + xmlIsBlank_ch((c)) : 0) + + +/** + * xmlIsChar_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsChar_ch(c) (((0x9 <= (c)) && ((c) <= 0xa)) || \ + ((c) == 0xd) || \ + (0x20 <= (c))) + +/** + * xmlIsCharQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsCharQ(c) (((c) < 0x100) ? \ + xmlIsChar_ch((c)) :\ + (((0x100 <= (c)) && ((c) <= 0xd7ff)) || \ + ((0xe000 <= (c)) && ((c) <= 0xfffd)) || \ + ((0x10000 <= (c)) && ((c) <= 0x10ffff)))) + +XMLPUBVAR const xmlChRangeGroup xmlIsCharGroup; + +/** + * xmlIsCombiningQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsCombiningQ(c) (((c) < 0x100) ? \ + 0 : \ + xmlCharInRange((c), &xmlIsCombiningGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsCombiningGroup; + +/** + * xmlIsDigit_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsDigit_ch(c) (((0x30 <= (c)) && ((c) <= 0x39))) + +/** + * xmlIsDigitQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsDigitQ(c) (((c) < 0x100) ? \ + xmlIsDigit_ch((c)) : \ + xmlCharInRange((c), &xmlIsDigitGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsDigitGroup; + +/** + * xmlIsExtender_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsExtender_ch(c) (((c) == 0xb7)) + +/** + * xmlIsExtenderQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsExtenderQ(c) (((c) < 0x100) ? \ + xmlIsExtender_ch((c)) : \ + xmlCharInRange((c), &xmlIsExtenderGroup)) + +XMLPUBVAR const xmlChRangeGroup xmlIsExtenderGroup; + +/** + * xmlIsIdeographicQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsIdeographicQ(c) (((c) < 0x100) ? \ + 0 :\ + (((0x4e00 <= (c)) && ((c) <= 0x9fa5)) || \ + ((c) == 0x3007) || \ + ((0x3021 <= (c)) && ((c) <= 0x3029)))) + +XMLPUBVAR const xmlChRangeGroup xmlIsIdeographicGroup; +XMLPUBVAR const unsigned char xmlIsPubidChar_tab[256]; + +/** + * xmlIsPubidChar_ch: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsPubidChar_ch(c) (xmlIsPubidChar_tab[(c)]) + +/** + * xmlIsPubidCharQ: + * @c: char to validate + * + * Automatically generated by genChRanges.py + */ +#define xmlIsPubidCharQ(c) (((c) < 0x100) ? \ + xmlIsPubidChar_ch((c)) : 0) + +XMLPUBFUN int + xmlIsBaseChar(unsigned int ch); +XMLPUBFUN int + xmlIsBlank(unsigned int ch); +XMLPUBFUN int + xmlIsChar(unsigned int ch); +XMLPUBFUN int + xmlIsCombining(unsigned int ch); +XMLPUBFUN int + xmlIsDigit(unsigned int ch); +XMLPUBFUN int + xmlIsExtender(unsigned int ch); +XMLPUBFUN int + xmlIsIdeographic(unsigned int ch); +XMLPUBFUN int + xmlIsPubidChar(unsigned int ch); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_CHVALID_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/debugXML.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/debugXML.h new file mode 100644 index 0000000000000000000000000000000000000000..827468736484b12fe38fe7d445d5a8bb2f604c89 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/debugXML.h @@ -0,0 +1,217 @@ +/* + * Summary: Tree debugging APIs + * Description: Interfaces to a set of routines used for debugging the tree + * produced by the XML parser. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __DEBUG_XML__ +#define __DEBUG_XML__ +#include +#include +#include + +#ifdef LIBXML_DEBUG_ENABLED + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The standard Dump routines. + */ +XMLPUBFUN void + xmlDebugDumpString (FILE *output, + const xmlChar *str); +XMLPUBFUN void + xmlDebugDumpAttr (FILE *output, + xmlAttrPtr attr, + int depth); +XMLPUBFUN void + xmlDebugDumpAttrList (FILE *output, + xmlAttrPtr attr, + int depth); +XMLPUBFUN void + xmlDebugDumpOneNode (FILE *output, + xmlNodePtr node, + int depth); +XMLPUBFUN void + xmlDebugDumpNode (FILE *output, + xmlNodePtr node, + int depth); +XMLPUBFUN void + xmlDebugDumpNodeList (FILE *output, + xmlNodePtr node, + int depth); +XMLPUBFUN void + xmlDebugDumpDocumentHead(FILE *output, + xmlDocPtr doc); +XMLPUBFUN void + xmlDebugDumpDocument (FILE *output, + xmlDocPtr doc); +XMLPUBFUN void + xmlDebugDumpDTD (FILE *output, + xmlDtdPtr dtd); +XMLPUBFUN void + xmlDebugDumpEntities (FILE *output, + xmlDocPtr doc); + +/**************************************************************** + * * + * Checking routines * + * * + ****************************************************************/ + +XMLPUBFUN int + xmlDebugCheckDocument (FILE * output, + xmlDocPtr doc); + +/**************************************************************** + * * + * XML shell helpers * + * * + ****************************************************************/ + +XMLPUBFUN void + xmlLsOneNode (FILE *output, xmlNodePtr node); +XMLPUBFUN int + xmlLsCountNode (xmlNodePtr node); + +XMLPUBFUN const char * + xmlBoolToText (int boolval); + +/**************************************************************** + * * + * The XML shell related structures and functions * + * * + ****************************************************************/ + +#ifdef LIBXML_XPATH_ENABLED +/** + * xmlShellReadlineFunc: + * @prompt: a string prompt + * + * This is a generic signature for the XML shell input function. + * + * Returns a string which will be freed by the Shell. + */ +typedef char * (* xmlShellReadlineFunc)(char *prompt); + +/** + * xmlShellCtxt: + * + * A debugging shell context. + * TODO: add the defined function tables. + */ +typedef struct _xmlShellCtxt xmlShellCtxt; +typedef xmlShellCtxt *xmlShellCtxtPtr; +struct _xmlShellCtxt { + char *filename; + xmlDocPtr doc; + xmlNodePtr node; + xmlXPathContextPtr pctxt; + int loaded; + FILE *output; + xmlShellReadlineFunc input; +}; + +/** + * xmlShellCmd: + * @ctxt: a shell context + * @arg: a string argument + * @node: a first node + * @node2: a second node + * + * This is a generic signature for the XML shell functions. + * + * Returns an int, negative returns indicating errors. + */ +typedef int (* xmlShellCmd) (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); + +XMLPUBFUN void + xmlShellPrintXPathError (int errorType, + const char *arg); +XMLPUBFUN void + xmlShellPrintXPathResult(xmlXPathObjectPtr list); +XMLPUBFUN int + xmlShellList (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellBase (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellDir (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellLoad (xmlShellCtxtPtr ctxt, + char *filename, + xmlNodePtr node, + xmlNodePtr node2); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlShellPrintNode (xmlNodePtr node); +XMLPUBFUN int + xmlShellCat (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellWrite (xmlShellCtxtPtr ctxt, + char *filename, + xmlNodePtr node, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellSave (xmlShellCtxtPtr ctxt, + char *filename, + xmlNodePtr node, + xmlNodePtr node2); +#endif /* LIBXML_OUTPUT_ENABLED */ +#ifdef LIBXML_VALID_ENABLED +XMLPUBFUN int + xmlShellValidate (xmlShellCtxtPtr ctxt, + char *dtd, + xmlNodePtr node, + xmlNodePtr node2); +#endif /* LIBXML_VALID_ENABLED */ +XMLPUBFUN int + xmlShellDu (xmlShellCtxtPtr ctxt, + char *arg, + xmlNodePtr tree, + xmlNodePtr node2); +XMLPUBFUN int + xmlShellPwd (xmlShellCtxtPtr ctxt, + char *buffer, + xmlNodePtr node, + xmlNodePtr node2); + +/* + * The Shell interface. + */ +XMLPUBFUN void + xmlShell (xmlDocPtr doc, + char *filename, + xmlShellReadlineFunc input, + FILE *output); + +#endif /* LIBXML_XPATH_ENABLED */ + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_DEBUG_ENABLED */ +#endif /* __DEBUG_XML__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/entities.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/entities.h new file mode 100644 index 0000000000000000000000000000000000000000..f679375321e6499e8ca4faa7a312b1b88bfa325a --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/entities.h @@ -0,0 +1,155 @@ +/* + * Summary: interface for the XML entities handling + * Description: this module provides some of the entity API needed + * for the parser and applications. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_ENTITIES_H__ +#define __XML_ENTITIES_H__ + +#include +#define XML_TREE_INTERNALS +#include +#undef XML_TREE_INTERNALS + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The different valid entity types. + */ +typedef enum { + XML_INTERNAL_GENERAL_ENTITY = 1, + XML_EXTERNAL_GENERAL_PARSED_ENTITY = 2, + XML_EXTERNAL_GENERAL_UNPARSED_ENTITY = 3, + XML_INTERNAL_PARAMETER_ENTITY = 4, + XML_EXTERNAL_PARAMETER_ENTITY = 5, + XML_INTERNAL_PREDEFINED_ENTITY = 6 +} xmlEntityType; + +/* + * An unit of storage for an entity, contains the string, the value + * and the linkind data needed for the linking in the hash table. + */ + +struct _xmlEntity { + void *_private; /* application data */ + xmlElementType type; /* XML_ENTITY_DECL, must be second ! */ + const xmlChar *name; /* Entity name */ + struct _xmlNode *children; /* First child link */ + struct _xmlNode *last; /* Last child link */ + struct _xmlDtd *parent; /* -> DTD */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + xmlChar *orig; /* content without ref substitution */ + xmlChar *content; /* content or ndata if unparsed */ + int length; /* the content length */ + xmlEntityType etype; /* The entity type */ + const xmlChar *ExternalID; /* External identifier for PUBLIC */ + const xmlChar *SystemID; /* URI for a SYSTEM or PUBLIC Entity */ + + struct _xmlEntity *nexte; /* unused */ + const xmlChar *URI; /* the full URI as computed */ + int owner; /* does the entity own the childrens */ + int flags; /* various flags */ + unsigned long expandedSize; /* expanded size */ +}; + +/* + * All entities are stored in an hash table. + * There is 2 separate hash tables for global and parameter entities. + */ + +typedef struct _xmlHashTable xmlEntitiesTable; +typedef xmlEntitiesTable *xmlEntitiesTablePtr; + +/* + * External functions: + */ + +#ifdef LIBXML_LEGACY_ENABLED +XML_DEPRECATED +XMLPUBFUN void + xmlInitializePredefinedEntities (void); +#endif /* LIBXML_LEGACY_ENABLED */ + +XMLPUBFUN xmlEntityPtr + xmlNewEntity (xmlDocPtr doc, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content); +XMLPUBFUN void + xmlFreeEntity (xmlEntityPtr entity); +XMLPUBFUN xmlEntityPtr + xmlAddDocEntity (xmlDocPtr doc, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content); +XMLPUBFUN xmlEntityPtr + xmlAddDtdEntity (xmlDocPtr doc, + const xmlChar *name, + int type, + const xmlChar *ExternalID, + const xmlChar *SystemID, + const xmlChar *content); +XMLPUBFUN xmlEntityPtr + xmlGetPredefinedEntity (const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlGetDocEntity (const xmlDoc *doc, + const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlGetDtdEntity (xmlDocPtr doc, + const xmlChar *name); +XMLPUBFUN xmlEntityPtr + xmlGetParameterEntity (xmlDocPtr doc, + const xmlChar *name); +#ifdef LIBXML_LEGACY_ENABLED +XML_DEPRECATED +XMLPUBFUN const xmlChar * + xmlEncodeEntities (xmlDocPtr doc, + const xmlChar *input); +#endif /* LIBXML_LEGACY_ENABLED */ +XMLPUBFUN xmlChar * + xmlEncodeEntitiesReentrant(xmlDocPtr doc, + const xmlChar *input); +XMLPUBFUN xmlChar * + xmlEncodeSpecialChars (const xmlDoc *doc, + const xmlChar *input); +XMLPUBFUN xmlEntitiesTablePtr + xmlCreateEntitiesTable (void); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlEntitiesTablePtr + xmlCopyEntitiesTable (xmlEntitiesTablePtr table); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlFreeEntitiesTable (xmlEntitiesTablePtr table); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlDumpEntitiesTable (xmlBufferPtr buf, + xmlEntitiesTablePtr table); +XMLPUBFUN void + xmlDumpEntityDecl (xmlBufferPtr buf, + xmlEntityPtr ent); +#endif /* LIBXML_OUTPUT_ENABLED */ +#ifdef LIBXML_LEGACY_ENABLED +XMLPUBFUN void + xmlCleanupPredefinedEntities(void); +#endif /* LIBXML_LEGACY_ENABLED */ + + +#ifdef __cplusplus +} +#endif + +# endif /* __XML_ENTITIES_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/hash.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/hash.h new file mode 100644 index 0000000000000000000000000000000000000000..f4af09ee5f94fa856cb650d218c1fbe1e84d10d5 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/hash.h @@ -0,0 +1,232 @@ +/* + * Summary: Chained hash tables + * Description: This module implements the hash table support used in + * various places in the library. + * + * Copy: See Copyright for the status of this software. + * + * Author: Bjorn Reese + */ + +#ifndef __XML_HASH_H__ +#define __XML_HASH_H__ + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The hash table. + */ +typedef struct _xmlHashTable xmlHashTable; +typedef xmlHashTable *xmlHashTablePtr; + +/* + * Recent version of gcc produce a warning when a function pointer is assigned + * to an object pointer, or vice versa. The following macro is a dirty hack + * to allow suppression of the warning. If your architecture has function + * pointers which are a different size than a void pointer, there may be some + * serious trouble within the library. + */ +/** + * XML_CAST_FPTR: + * @fptr: pointer to a function + * + * Macro to do a casting from an object pointer to a + * function pointer without encountering a warning from + * gcc + * + * #define XML_CAST_FPTR(fptr) (*(void **)(&fptr)) + * This macro violated ISO C aliasing rules (gcc4 on s390 broke) + * so it is disabled now + */ + +#define XML_CAST_FPTR(fptr) fptr + +/* + * function types: + */ +/** + * xmlHashDeallocator: + * @payload: the data in the hash + * @name: the name associated + * + * Callback to free data from a hash. + */ +typedef void (*xmlHashDeallocator)(void *payload, const xmlChar *name); +/** + * xmlHashCopier: + * @payload: the data in the hash + * @name: the name associated + * + * Callback to copy data from a hash. + * + * Returns a copy of the data or NULL in case of error. + */ +typedef void *(*xmlHashCopier)(void *payload, const xmlChar *name); +/** + * xmlHashScanner: + * @payload: the data in the hash + * @data: extra scanner data + * @name: the name associated + * + * Callback when scanning data in a hash with the simple scanner. + */ +typedef void (*xmlHashScanner)(void *payload, void *data, const xmlChar *name); +/** + * xmlHashScannerFull: + * @payload: the data in the hash + * @data: extra scanner data + * @name: the name associated + * @name2: the second name associated + * @name3: the third name associated + * + * Callback when scanning data in a hash with the full scanner. + */ +typedef void (*xmlHashScannerFull)(void *payload, void *data, + const xmlChar *name, const xmlChar *name2, + const xmlChar *name3); + +/* + * Constructor and destructor. + */ +XMLPUBFUN xmlHashTablePtr + xmlHashCreate (int size); +XMLPUBFUN xmlHashTablePtr + xmlHashCreateDict (int size, + xmlDictPtr dict); +XMLPUBFUN void + xmlHashFree (xmlHashTablePtr hash, + xmlHashDeallocator dealloc); +XMLPUBFUN void + xmlHashDefaultDeallocator(void *entry, + const xmlChar *name); + +/* + * Add a new entry to the hash table. + */ +XMLPUBFUN int + xmlHashAddEntry (xmlHashTablePtr hash, + const xmlChar *name, + void *userdata); +XMLPUBFUN int + xmlHashUpdateEntry (xmlHashTablePtr hash, + const xmlChar *name, + void *userdata, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashAddEntry2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + void *userdata); +XMLPUBFUN int + xmlHashUpdateEntry2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + void *userdata, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashAddEntry3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + void *userdata); +XMLPUBFUN int + xmlHashUpdateEntry3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + void *userdata, + xmlHashDeallocator dealloc); + +/* + * Remove an entry from the hash table. + */ +XMLPUBFUN int + xmlHashRemoveEntry (xmlHashTablePtr hash, + const xmlChar *name, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashRemoveEntry2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + xmlHashDeallocator dealloc); +XMLPUBFUN int + xmlHashRemoveEntry3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + xmlHashDeallocator dealloc); + +/* + * Retrieve the payload. + */ +XMLPUBFUN void * + xmlHashLookup (xmlHashTablePtr hash, + const xmlChar *name); +XMLPUBFUN void * + xmlHashLookup2 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2); +XMLPUBFUN void * + xmlHashLookup3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3); +XMLPUBFUN void * + xmlHashQLookup (xmlHashTablePtr hash, + const xmlChar *prefix, + const xmlChar *name); +XMLPUBFUN void * + xmlHashQLookup2 (xmlHashTablePtr hash, + const xmlChar *prefix, + const xmlChar *name, + const xmlChar *prefix2, + const xmlChar *name2); +XMLPUBFUN void * + xmlHashQLookup3 (xmlHashTablePtr hash, + const xmlChar *prefix, + const xmlChar *name, + const xmlChar *prefix2, + const xmlChar *name2, + const xmlChar *prefix3, + const xmlChar *name3); + +/* + * Helpers. + */ +XMLPUBFUN xmlHashTablePtr + xmlHashCopy (xmlHashTablePtr hash, + xmlHashCopier copy); +XMLPUBFUN int + xmlHashSize (xmlHashTablePtr hash); +XMLPUBFUN void + xmlHashScan (xmlHashTablePtr hash, + xmlHashScanner scan, + void *data); +XMLPUBFUN void + xmlHashScan3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + xmlHashScanner scan, + void *data); +XMLPUBFUN void + xmlHashScanFull (xmlHashTablePtr hash, + xmlHashScannerFull scan, + void *data); +XMLPUBFUN void + xmlHashScanFull3 (xmlHashTablePtr hash, + const xmlChar *name, + const xmlChar *name2, + const xmlChar *name3, + xmlHashScannerFull scan, + void *data); +#ifdef __cplusplus +} +#endif +#endif /* ! __XML_HASH_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/list.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/list.h new file mode 100644 index 0000000000000000000000000000000000000000..5eab8f59d83cdbbf8960af836bdc1849d3d450bc --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/list.h @@ -0,0 +1,137 @@ +/* + * Summary: lists interfaces + * Description: this module implement the list support used in + * various place in the library. + * + * Copy: See Copyright for the status of this software. + * + * Author: Gary Pennington + */ + +#ifndef __XML_LINK_INCLUDE__ +#define __XML_LINK_INCLUDE__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct _xmlLink xmlLink; +typedef xmlLink *xmlLinkPtr; + +typedef struct _xmlList xmlList; +typedef xmlList *xmlListPtr; + +/** + * xmlListDeallocator: + * @lk: the data to deallocate + * + * Callback function used to free data from a list. + */ +typedef void (*xmlListDeallocator) (xmlLinkPtr lk); +/** + * xmlListDataCompare: + * @data0: the first data + * @data1: the second data + * + * Callback function used to compare 2 data. + * + * Returns 0 is equality, -1 or 1 otherwise depending on the ordering. + */ +typedef int (*xmlListDataCompare) (const void *data0, const void *data1); +/** + * xmlListWalker: + * @data: the data found in the list + * @user: extra user provided data to the walker + * + * Callback function used when walking a list with xmlListWalk(). + * + * Returns 0 to stop walking the list, 1 otherwise. + */ +typedef int (*xmlListWalker) (const void *data, void *user); + +/* Creation/Deletion */ +XMLPUBFUN xmlListPtr + xmlListCreate (xmlListDeallocator deallocator, + xmlListDataCompare compare); +XMLPUBFUN void + xmlListDelete (xmlListPtr l); + +/* Basic Operators */ +XMLPUBFUN void * + xmlListSearch (xmlListPtr l, + void *data); +XMLPUBFUN void * + xmlListReverseSearch (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListInsert (xmlListPtr l, + void *data) ; +XMLPUBFUN int + xmlListAppend (xmlListPtr l, + void *data) ; +XMLPUBFUN int + xmlListRemoveFirst (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListRemoveLast (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListRemoveAll (xmlListPtr l, + void *data); +XMLPUBFUN void + xmlListClear (xmlListPtr l); +XMLPUBFUN int + xmlListEmpty (xmlListPtr l); +XMLPUBFUN xmlLinkPtr + xmlListFront (xmlListPtr l); +XMLPUBFUN xmlLinkPtr + xmlListEnd (xmlListPtr l); +XMLPUBFUN int + xmlListSize (xmlListPtr l); + +XMLPUBFUN void + xmlListPopFront (xmlListPtr l); +XMLPUBFUN void + xmlListPopBack (xmlListPtr l); +XMLPUBFUN int + xmlListPushFront (xmlListPtr l, + void *data); +XMLPUBFUN int + xmlListPushBack (xmlListPtr l, + void *data); + +/* Advanced Operators */ +XMLPUBFUN void + xmlListReverse (xmlListPtr l); +XMLPUBFUN void + xmlListSort (xmlListPtr l); +XMLPUBFUN void + xmlListWalk (xmlListPtr l, + xmlListWalker walker, + void *user); +XMLPUBFUN void + xmlListReverseWalk (xmlListPtr l, + xmlListWalker walker, + void *user); +XMLPUBFUN void + xmlListMerge (xmlListPtr l1, + xmlListPtr l2); +XMLPUBFUN xmlListPtr + xmlListDup (const xmlListPtr old); +XMLPUBFUN int + xmlListCopy (xmlListPtr cur, + const xmlListPtr old); +/* Link operators */ +XMLPUBFUN void * + xmlLinkGetData (xmlLinkPtr lk); + +/* xmlListUnique() */ +/* xmlListSwap */ + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_LINK_INCLUDE__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/nanoftp.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/nanoftp.h new file mode 100644 index 0000000000000000000000000000000000000000..ed3ac4f1fd2d68901d0066a5eae3dd904a8634ea --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/nanoftp.h @@ -0,0 +1,186 @@ +/* + * Summary: minimal FTP implementation + * Description: minimal FTP implementation allowing to fetch resources + * like external subset. This module is DEPRECATED, do not + * use any of its functions. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __NANO_FTP_H__ +#define __NANO_FTP_H__ + +#include + +#if defined(LIBXML_FTP_ENABLED) + +/* Needed for portability to Windows 64 bits */ +#if defined(_WIN32) +#include +#else +/** + * SOCKET: + * + * macro used to provide portability of code to windows sockets + */ +#define SOCKET int +/** + * INVALID_SOCKET: + * + * macro used to provide portability of code to windows sockets + * the value to be used when the socket is not valid + */ +#undef INVALID_SOCKET +#define INVALID_SOCKET (-1) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * ftpListCallback: + * @userData: user provided data for the callback + * @filename: the file name (including "->" when links are shown) + * @attrib: the attribute string + * @owner: the owner string + * @group: the group string + * @size: the file size + * @links: the link count + * @year: the year + * @month: the month + * @day: the day + * @hour: the hour + * @minute: the minute + * + * A callback for the xmlNanoFTPList command. + * Note that only one of year and day:minute are specified. + */ +typedef void (*ftpListCallback) (void *userData, + const char *filename, const char *attrib, + const char *owner, const char *group, + unsigned long size, int links, int year, + const char *month, int day, int hour, + int minute); +/** + * ftpDataCallback: + * @userData: the user provided context + * @data: the data received + * @len: its size in bytes + * + * A callback for the xmlNanoFTPGet command. + */ +typedef void (*ftpDataCallback) (void *userData, + const char *data, + int len); + +/* + * Init + */ +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPInit (void); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPCleanup (void); + +/* + * Creating/freeing contexts. + */ +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoFTPNewCtxt (const char *URL); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPFreeCtxt (void * ctx); +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoFTPConnectTo (const char *server, + int port); +/* + * Opening/closing session connections. + */ +XML_DEPRECATED +XMLPUBFUN void * + xmlNanoFTPOpen (const char *URL); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPConnect (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPClose (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPQuit (void *ctx); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPScanProxy (const char *URL); +XML_DEPRECATED +XMLPUBFUN void + xmlNanoFTPProxy (const char *host, + int port, + const char *user, + const char *passwd, + int type); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPUpdateURL (void *ctx, + const char *URL); + +/* + * Rather internal commands. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPGetResponse (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPCheckResponse (void *ctx); + +/* + * CD/DIR/GET handlers. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPCwd (void *ctx, + const char *directory); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPDele (void *ctx, + const char *file); + +XML_DEPRECATED +XMLPUBFUN SOCKET + xmlNanoFTPGetConnection (void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPCloseConnection(void *ctx); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPList (void *ctx, + ftpListCallback callback, + void *userData, + const char *filename); +XML_DEPRECATED +XMLPUBFUN SOCKET + xmlNanoFTPGetSocket (void *ctx, + const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPGet (void *ctx, + ftpDataCallback callback, + void *userData, + const char *filename); +XML_DEPRECATED +XMLPUBFUN int + xmlNanoFTPRead (void *ctx, + void *dest, + int len); + +#ifdef __cplusplus +} +#endif +#endif /* defined(LIBXML_FTP_ENABLED) || defined(LIBXML_LEGACY_ENABLED) */ +#endif /* __NANO_FTP_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/nanohttp.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/nanohttp.h new file mode 100644 index 0000000000000000000000000000000000000000..3b5e037fc5b9e2474b74012f56f630d8c6813795 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/nanohttp.h @@ -0,0 +1,81 @@ +/* + * Summary: minimal HTTP implementation + * Description: minimal HTTP implementation allowing to fetch resources + * like external subset. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __NANO_HTTP_H__ +#define __NANO_HTTP_H__ + +#include + +#ifdef LIBXML_HTTP_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif +XMLPUBFUN void + xmlNanoHTTPInit (void); +XMLPUBFUN void + xmlNanoHTTPCleanup (void); +XMLPUBFUN void + xmlNanoHTTPScanProxy (const char *URL); +XMLPUBFUN int + xmlNanoHTTPFetch (const char *URL, + const char *filename, + char **contentType); +XMLPUBFUN void * + xmlNanoHTTPMethod (const char *URL, + const char *method, + const char *input, + char **contentType, + const char *headers, + int ilen); +XMLPUBFUN void * + xmlNanoHTTPMethodRedir (const char *URL, + const char *method, + const char *input, + char **contentType, + char **redir, + const char *headers, + int ilen); +XMLPUBFUN void * + xmlNanoHTTPOpen (const char *URL, + char **contentType); +XMLPUBFUN void * + xmlNanoHTTPOpenRedir (const char *URL, + char **contentType, + char **redir); +XMLPUBFUN int + xmlNanoHTTPReturnCode (void *ctx); +XMLPUBFUN const char * + xmlNanoHTTPAuthHeader (void *ctx); +XMLPUBFUN const char * + xmlNanoHTTPRedir (void *ctx); +XMLPUBFUN int + xmlNanoHTTPContentLength( void * ctx ); +XMLPUBFUN const char * + xmlNanoHTTPEncoding (void *ctx); +XMLPUBFUN const char * + xmlNanoHTTPMimeType (void *ctx); +XMLPUBFUN int + xmlNanoHTTPRead (void *ctx, + void *dest, + int len); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN int + xmlNanoHTTPSave (void *ctxt, + const char *filename); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN void + xmlNanoHTTPClose (void *ctx); +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_HTTP_ENABLED */ +#endif /* __NANO_HTTP_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/relaxng.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/relaxng.h new file mode 100644 index 0000000000000000000000000000000000000000..079b7f125d0d2d31b740faecd146bbd85705868e --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/relaxng.h @@ -0,0 +1,219 @@ +/* + * Summary: implementation of the Relax-NG validation + * Description: implementation of the Relax-NG validation + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_RELAX_NG__ +#define __XML_RELAX_NG__ + +#include +#include +#include +#include + +#ifdef LIBXML_SCHEMAS_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct _xmlRelaxNG xmlRelaxNG; +typedef xmlRelaxNG *xmlRelaxNGPtr; + + +/** + * xmlRelaxNGValidityErrorFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of an error callback from a Relax-NG validation + */ +typedef void (*xmlRelaxNGValidityErrorFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * xmlRelaxNGValidityWarningFunc: + * @ctx: the validation context + * @msg: the message + * @...: extra arguments + * + * Signature of a warning callback from a Relax-NG validation + */ +typedef void (*xmlRelaxNGValidityWarningFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); + +/** + * A schemas validation context + */ +typedef struct _xmlRelaxNGParserCtxt xmlRelaxNGParserCtxt; +typedef xmlRelaxNGParserCtxt *xmlRelaxNGParserCtxtPtr; + +typedef struct _xmlRelaxNGValidCtxt xmlRelaxNGValidCtxt; +typedef xmlRelaxNGValidCtxt *xmlRelaxNGValidCtxtPtr; + +/* + * xmlRelaxNGValidErr: + * + * List of possible Relax NG validation errors + */ +typedef enum { + XML_RELAXNG_OK = 0, + XML_RELAXNG_ERR_MEMORY, + XML_RELAXNG_ERR_TYPE, + XML_RELAXNG_ERR_TYPEVAL, + XML_RELAXNG_ERR_DUPID, + XML_RELAXNG_ERR_TYPECMP, + XML_RELAXNG_ERR_NOSTATE, + XML_RELAXNG_ERR_NODEFINE, + XML_RELAXNG_ERR_LISTEXTRA, + XML_RELAXNG_ERR_LISTEMPTY, + XML_RELAXNG_ERR_INTERNODATA, + XML_RELAXNG_ERR_INTERSEQ, + XML_RELAXNG_ERR_INTEREXTRA, + XML_RELAXNG_ERR_ELEMNAME, + XML_RELAXNG_ERR_ATTRNAME, + XML_RELAXNG_ERR_ELEMNONS, + XML_RELAXNG_ERR_ATTRNONS, + XML_RELAXNG_ERR_ELEMWRONGNS, + XML_RELAXNG_ERR_ATTRWRONGNS, + XML_RELAXNG_ERR_ELEMEXTRANS, + XML_RELAXNG_ERR_ATTREXTRANS, + XML_RELAXNG_ERR_ELEMNOTEMPTY, + XML_RELAXNG_ERR_NOELEM, + XML_RELAXNG_ERR_NOTELEM, + XML_RELAXNG_ERR_ATTRVALID, + XML_RELAXNG_ERR_CONTENTVALID, + XML_RELAXNG_ERR_EXTRACONTENT, + XML_RELAXNG_ERR_INVALIDATTR, + XML_RELAXNG_ERR_DATAELEM, + XML_RELAXNG_ERR_VALELEM, + XML_RELAXNG_ERR_LISTELEM, + XML_RELAXNG_ERR_DATATYPE, + XML_RELAXNG_ERR_VALUE, + XML_RELAXNG_ERR_LIST, + XML_RELAXNG_ERR_NOGRAMMAR, + XML_RELAXNG_ERR_EXTRADATA, + XML_RELAXNG_ERR_LACKDATA, + XML_RELAXNG_ERR_INTERNAL, + XML_RELAXNG_ERR_ELEMWRONG, + XML_RELAXNG_ERR_TEXTWRONG +} xmlRelaxNGValidErr; + +/* + * xmlRelaxNGParserFlags: + * + * List of possible Relax NG Parser flags + */ +typedef enum { + XML_RELAXNGP_NONE = 0, + XML_RELAXNGP_FREE_DOC = 1, + XML_RELAXNGP_CRNG = 2 +} xmlRelaxNGParserFlag; + +XMLPUBFUN int + xmlRelaxNGInitTypes (void); +XML_DEPRECATED +XMLPUBFUN void + xmlRelaxNGCleanupTypes (void); + +/* + * Interfaces for parsing. + */ +XMLPUBFUN xmlRelaxNGParserCtxtPtr + xmlRelaxNGNewParserCtxt (const char *URL); +XMLPUBFUN xmlRelaxNGParserCtxtPtr + xmlRelaxNGNewMemParserCtxt (const char *buffer, + int size); +XMLPUBFUN xmlRelaxNGParserCtxtPtr + xmlRelaxNGNewDocParserCtxt (xmlDocPtr doc); + +XMLPUBFUN int + xmlRelaxParserSetFlag (xmlRelaxNGParserCtxtPtr ctxt, + int flag); + +XMLPUBFUN void + xmlRelaxNGFreeParserCtxt (xmlRelaxNGParserCtxtPtr ctxt); +XMLPUBFUN void + xmlRelaxNGSetParserErrors(xmlRelaxNGParserCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc err, + xmlRelaxNGValidityWarningFunc warn, + void *ctx); +XMLPUBFUN int + xmlRelaxNGGetParserErrors(xmlRelaxNGParserCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc *err, + xmlRelaxNGValidityWarningFunc *warn, + void **ctx); +XMLPUBFUN void + xmlRelaxNGSetParserStructuredErrors( + xmlRelaxNGParserCtxtPtr ctxt, + xmlStructuredErrorFunc serror, + void *ctx); +XMLPUBFUN xmlRelaxNGPtr + xmlRelaxNGParse (xmlRelaxNGParserCtxtPtr ctxt); +XMLPUBFUN void + xmlRelaxNGFree (xmlRelaxNGPtr schema); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void + xmlRelaxNGDump (FILE *output, + xmlRelaxNGPtr schema); +XMLPUBFUN void + xmlRelaxNGDumpTree (FILE * output, + xmlRelaxNGPtr schema); +#endif /* LIBXML_OUTPUT_ENABLED */ +/* + * Interfaces for validating + */ +XMLPUBFUN void + xmlRelaxNGSetValidErrors(xmlRelaxNGValidCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc err, + xmlRelaxNGValidityWarningFunc warn, + void *ctx); +XMLPUBFUN int + xmlRelaxNGGetValidErrors(xmlRelaxNGValidCtxtPtr ctxt, + xmlRelaxNGValidityErrorFunc *err, + xmlRelaxNGValidityWarningFunc *warn, + void **ctx); +XMLPUBFUN void + xmlRelaxNGSetValidStructuredErrors(xmlRelaxNGValidCtxtPtr ctxt, + xmlStructuredErrorFunc serror, void *ctx); +XMLPUBFUN xmlRelaxNGValidCtxtPtr + xmlRelaxNGNewValidCtxt (xmlRelaxNGPtr schema); +XMLPUBFUN void + xmlRelaxNGFreeValidCtxt (xmlRelaxNGValidCtxtPtr ctxt); +XMLPUBFUN int + xmlRelaxNGValidateDoc (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc); +/* + * Interfaces for progressive validation when possible + */ +XMLPUBFUN int + xmlRelaxNGValidatePushElement (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); +XMLPUBFUN int + xmlRelaxNGValidatePushCData (xmlRelaxNGValidCtxtPtr ctxt, + const xmlChar *data, + int len); +XMLPUBFUN int + xmlRelaxNGValidatePopElement (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); +XMLPUBFUN int + xmlRelaxNGValidateFullElement (xmlRelaxNGValidCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr elem); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMAS_ENABLED */ + +#endif /* __XML_RELAX_NG__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/schemasInternals.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/schemasInternals.h new file mode 100644 index 0000000000000000000000000000000000000000..e9d3b3c7abb1e5b6fa1b1f25fb9bc2ee5e46d56f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/schemasInternals.h @@ -0,0 +1,959 @@ +/* + * Summary: internal interfaces for XML Schemas + * Description: internal interfaces for the XML Schemas handling + * and schema validity checking + * The Schemas development is a Work In Progress. + * Some of those interfaces are not guaranteed to be API or ABI stable ! + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __XML_SCHEMA_INTERNALS_H__ +#define __XML_SCHEMA_INTERNALS_H__ + +#include + +#ifdef LIBXML_SCHEMAS_ENABLED + +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum { + XML_SCHEMAS_UNKNOWN = 0, + XML_SCHEMAS_STRING = 1, + XML_SCHEMAS_NORMSTRING = 2, + XML_SCHEMAS_DECIMAL = 3, + XML_SCHEMAS_TIME = 4, + XML_SCHEMAS_GDAY = 5, + XML_SCHEMAS_GMONTH = 6, + XML_SCHEMAS_GMONTHDAY = 7, + XML_SCHEMAS_GYEAR = 8, + XML_SCHEMAS_GYEARMONTH = 9, + XML_SCHEMAS_DATE = 10, + XML_SCHEMAS_DATETIME = 11, + XML_SCHEMAS_DURATION = 12, + XML_SCHEMAS_FLOAT = 13, + XML_SCHEMAS_DOUBLE = 14, + XML_SCHEMAS_BOOLEAN = 15, + XML_SCHEMAS_TOKEN = 16, + XML_SCHEMAS_LANGUAGE = 17, + XML_SCHEMAS_NMTOKEN = 18, + XML_SCHEMAS_NMTOKENS = 19, + XML_SCHEMAS_NAME = 20, + XML_SCHEMAS_QNAME = 21, + XML_SCHEMAS_NCNAME = 22, + XML_SCHEMAS_ID = 23, + XML_SCHEMAS_IDREF = 24, + XML_SCHEMAS_IDREFS = 25, + XML_SCHEMAS_ENTITY = 26, + XML_SCHEMAS_ENTITIES = 27, + XML_SCHEMAS_NOTATION = 28, + XML_SCHEMAS_ANYURI = 29, + XML_SCHEMAS_INTEGER = 30, + XML_SCHEMAS_NPINTEGER = 31, + XML_SCHEMAS_NINTEGER = 32, + XML_SCHEMAS_NNINTEGER = 33, + XML_SCHEMAS_PINTEGER = 34, + XML_SCHEMAS_INT = 35, + XML_SCHEMAS_UINT = 36, + XML_SCHEMAS_LONG = 37, + XML_SCHEMAS_ULONG = 38, + XML_SCHEMAS_SHORT = 39, + XML_SCHEMAS_USHORT = 40, + XML_SCHEMAS_BYTE = 41, + XML_SCHEMAS_UBYTE = 42, + XML_SCHEMAS_HEXBINARY = 43, + XML_SCHEMAS_BASE64BINARY = 44, + XML_SCHEMAS_ANYTYPE = 45, + XML_SCHEMAS_ANYSIMPLETYPE = 46 +} xmlSchemaValType; + +/* + * XML Schemas defines multiple type of types. + */ +typedef enum { + XML_SCHEMA_TYPE_BASIC = 1, /* A built-in datatype */ + XML_SCHEMA_TYPE_ANY, + XML_SCHEMA_TYPE_FACET, + XML_SCHEMA_TYPE_SIMPLE, + XML_SCHEMA_TYPE_COMPLEX, + XML_SCHEMA_TYPE_SEQUENCE = 6, + XML_SCHEMA_TYPE_CHOICE, + XML_SCHEMA_TYPE_ALL, + XML_SCHEMA_TYPE_SIMPLE_CONTENT, + XML_SCHEMA_TYPE_COMPLEX_CONTENT, + XML_SCHEMA_TYPE_UR, + XML_SCHEMA_TYPE_RESTRICTION, + XML_SCHEMA_TYPE_EXTENSION, + XML_SCHEMA_TYPE_ELEMENT, + XML_SCHEMA_TYPE_ATTRIBUTE, + XML_SCHEMA_TYPE_ATTRIBUTEGROUP, + XML_SCHEMA_TYPE_GROUP, + XML_SCHEMA_TYPE_NOTATION, + XML_SCHEMA_TYPE_LIST, + XML_SCHEMA_TYPE_UNION, + XML_SCHEMA_TYPE_ANY_ATTRIBUTE, + XML_SCHEMA_TYPE_IDC_UNIQUE, + XML_SCHEMA_TYPE_IDC_KEY, + XML_SCHEMA_TYPE_IDC_KEYREF, + XML_SCHEMA_TYPE_PARTICLE = 25, + XML_SCHEMA_TYPE_ATTRIBUTE_USE, + XML_SCHEMA_FACET_MININCLUSIVE = 1000, + XML_SCHEMA_FACET_MINEXCLUSIVE, + XML_SCHEMA_FACET_MAXINCLUSIVE, + XML_SCHEMA_FACET_MAXEXCLUSIVE, + XML_SCHEMA_FACET_TOTALDIGITS, + XML_SCHEMA_FACET_FRACTIONDIGITS, + XML_SCHEMA_FACET_PATTERN, + XML_SCHEMA_FACET_ENUMERATION, + XML_SCHEMA_FACET_WHITESPACE, + XML_SCHEMA_FACET_LENGTH, + XML_SCHEMA_FACET_MAXLENGTH, + XML_SCHEMA_FACET_MINLENGTH, + XML_SCHEMA_EXTRA_QNAMEREF = 2000, + XML_SCHEMA_EXTRA_ATTR_USE_PROHIB +} xmlSchemaTypeType; + +typedef enum { + XML_SCHEMA_CONTENT_UNKNOWN = 0, + XML_SCHEMA_CONTENT_EMPTY = 1, + XML_SCHEMA_CONTENT_ELEMENTS, + XML_SCHEMA_CONTENT_MIXED, + XML_SCHEMA_CONTENT_SIMPLE, + XML_SCHEMA_CONTENT_MIXED_OR_ELEMENTS, /* Obsolete */ + XML_SCHEMA_CONTENT_BASIC, + XML_SCHEMA_CONTENT_ANY +} xmlSchemaContentType; + +typedef struct _xmlSchemaVal xmlSchemaVal; +typedef xmlSchemaVal *xmlSchemaValPtr; + +typedef struct _xmlSchemaType xmlSchemaType; +typedef xmlSchemaType *xmlSchemaTypePtr; + +typedef struct _xmlSchemaFacet xmlSchemaFacet; +typedef xmlSchemaFacet *xmlSchemaFacetPtr; + +/** + * Annotation + */ +typedef struct _xmlSchemaAnnot xmlSchemaAnnot; +typedef xmlSchemaAnnot *xmlSchemaAnnotPtr; +struct _xmlSchemaAnnot { + struct _xmlSchemaAnnot *next; + xmlNodePtr content; /* the annotation */ +}; + +/** + * XML_SCHEMAS_ANYATTR_SKIP: + * + * Skip unknown attribute from validation + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ANYATTR_SKIP 1 +/** + * XML_SCHEMAS_ANYATTR_LAX: + * + * Ignore validation non definition on attributes + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ANYATTR_LAX 2 +/** + * XML_SCHEMAS_ANYATTR_STRICT: + * + * Apply strict validation rules on attributes + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ANYATTR_STRICT 3 +/** + * XML_SCHEMAS_ANY_SKIP: + * + * Skip unknown attribute from validation + */ +#define XML_SCHEMAS_ANY_SKIP 1 +/** + * XML_SCHEMAS_ANY_LAX: + * + * Used by wildcards. + * Validate if type found, don't worry if not found + */ +#define XML_SCHEMAS_ANY_LAX 2 +/** + * XML_SCHEMAS_ANY_STRICT: + * + * Used by wildcards. + * Apply strict validation rules + */ +#define XML_SCHEMAS_ANY_STRICT 3 +/** + * XML_SCHEMAS_ATTR_USE_PROHIBITED: + * + * Used by wildcards. + * The attribute is prohibited. + */ +#define XML_SCHEMAS_ATTR_USE_PROHIBITED 0 +/** + * XML_SCHEMAS_ATTR_USE_REQUIRED: + * + * The attribute is required. + */ +#define XML_SCHEMAS_ATTR_USE_REQUIRED 1 +/** + * XML_SCHEMAS_ATTR_USE_OPTIONAL: + * + * The attribute is optional. + */ +#define XML_SCHEMAS_ATTR_USE_OPTIONAL 2 +/** + * XML_SCHEMAS_ATTR_GLOBAL: + * + * allow elements in no namespace + */ +#define XML_SCHEMAS_ATTR_GLOBAL 1 << 0 +/** + * XML_SCHEMAS_ATTR_NSDEFAULT: + * + * allow elements in no namespace + */ +#define XML_SCHEMAS_ATTR_NSDEFAULT 1 << 7 +/** + * XML_SCHEMAS_ATTR_INTERNAL_RESOLVED: + * + * this is set when the "type" and "ref" references + * have been resolved. + */ +#define XML_SCHEMAS_ATTR_INTERNAL_RESOLVED 1 << 8 +/** + * XML_SCHEMAS_ATTR_FIXED: + * + * the attribute has a fixed value + */ +#define XML_SCHEMAS_ATTR_FIXED 1 << 9 + +/** + * xmlSchemaAttribute: + * An attribute definition. + */ + +typedef struct _xmlSchemaAttribute xmlSchemaAttribute; +typedef xmlSchemaAttribute *xmlSchemaAttributePtr; +struct _xmlSchemaAttribute { + xmlSchemaTypeType type; + struct _xmlSchemaAttribute *next; /* the next attribute (not used?) */ + const xmlChar *name; /* the name of the declaration */ + const xmlChar *id; /* Deprecated; not used */ + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + const xmlChar *typeName; /* the local name of the type definition */ + const xmlChar *typeNs; /* the ns URI of the type definition */ + xmlSchemaAnnotPtr annot; + + xmlSchemaTypePtr base; /* Deprecated; not used */ + int occurs; /* Deprecated; not used */ + const xmlChar *defValue; /* The initial value of the value constraint */ + xmlSchemaTypePtr subtypes; /* the type definition */ + xmlNodePtr node; + const xmlChar *targetNamespace; + int flags; + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaValPtr defVal; /* The compiled value constraint */ + xmlSchemaAttributePtr refDecl; /* Deprecated; not used */ +}; + +/** + * xmlSchemaAttributeLink: + * Used to build a list of attribute uses on complexType definitions. + * WARNING: Deprecated; not used. + */ +typedef struct _xmlSchemaAttributeLink xmlSchemaAttributeLink; +typedef xmlSchemaAttributeLink *xmlSchemaAttributeLinkPtr; +struct _xmlSchemaAttributeLink { + struct _xmlSchemaAttributeLink *next;/* the next attribute link ... */ + struct _xmlSchemaAttribute *attr;/* the linked attribute */ +}; + +/** + * XML_SCHEMAS_WILDCARD_COMPLETE: + * + * If the wildcard is complete. + */ +#define XML_SCHEMAS_WILDCARD_COMPLETE 1 << 0 + +/** + * xmlSchemaCharValueLink: + * Used to build a list of namespaces on wildcards. + */ +typedef struct _xmlSchemaWildcardNs xmlSchemaWildcardNs; +typedef xmlSchemaWildcardNs *xmlSchemaWildcardNsPtr; +struct _xmlSchemaWildcardNs { + struct _xmlSchemaWildcardNs *next;/* the next constraint link ... */ + const xmlChar *value;/* the value */ +}; + +/** + * xmlSchemaWildcard. + * A wildcard. + */ +typedef struct _xmlSchemaWildcard xmlSchemaWildcard; +typedef xmlSchemaWildcard *xmlSchemaWildcardPtr; +struct _xmlSchemaWildcard { + xmlSchemaTypeType type; /* The kind of type */ + const xmlChar *id; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + xmlNodePtr node; + int minOccurs; /* Deprecated; not used */ + int maxOccurs; /* Deprecated; not used */ + int processContents; + int any; /* Indicates if the ns constraint is of ##any */ + xmlSchemaWildcardNsPtr nsSet; /* The list of allowed namespaces */ + xmlSchemaWildcardNsPtr negNsSet; /* The negated namespace */ + int flags; +}; + +/** + * XML_SCHEMAS_ATTRGROUP_WILDCARD_BUILDED: + * + * The attribute wildcard has been built. + */ +#define XML_SCHEMAS_ATTRGROUP_WILDCARD_BUILDED 1 << 0 +/** + * XML_SCHEMAS_ATTRGROUP_GLOBAL: + * + * The attribute group has been defined. + */ +#define XML_SCHEMAS_ATTRGROUP_GLOBAL 1 << 1 +/** + * XML_SCHEMAS_ATTRGROUP_MARKED: + * + * Marks the attr group as marked; used for circular checks. + */ +#define XML_SCHEMAS_ATTRGROUP_MARKED 1 << 2 + +/** + * XML_SCHEMAS_ATTRGROUP_REDEFINED: + * + * The attr group was redefined. + */ +#define XML_SCHEMAS_ATTRGROUP_REDEFINED 1 << 3 +/** + * XML_SCHEMAS_ATTRGROUP_HAS_REFS: + * + * Whether this attr. group contains attr. group references. + */ +#define XML_SCHEMAS_ATTRGROUP_HAS_REFS 1 << 4 + +/** + * An attribute group definition. + * + * xmlSchemaAttribute and xmlSchemaAttributeGroup start of structures + * must be kept similar + */ +typedef struct _xmlSchemaAttributeGroup xmlSchemaAttributeGroup; +typedef xmlSchemaAttributeGroup *xmlSchemaAttributeGroupPtr; +struct _xmlSchemaAttributeGroup { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaAttribute *next;/* the next attribute if in a group ... */ + const xmlChar *name; + const xmlChar *id; + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + + xmlSchemaAttributePtr attributes; /* Deprecated; not used */ + xmlNodePtr node; + int flags; + xmlSchemaWildcardPtr attributeWildcard; + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaAttributeGroupPtr refItem; /* Deprecated; not used */ + const xmlChar *targetNamespace; + void *attrUses; +}; + +/** + * xmlSchemaTypeLink: + * Used to build a list of types (e.g. member types of + * simpleType with variety "union"). + */ +typedef struct _xmlSchemaTypeLink xmlSchemaTypeLink; +typedef xmlSchemaTypeLink *xmlSchemaTypeLinkPtr; +struct _xmlSchemaTypeLink { + struct _xmlSchemaTypeLink *next;/* the next type link ... */ + xmlSchemaTypePtr type;/* the linked type */ +}; + +/** + * xmlSchemaFacetLink: + * Used to build a list of facets. + */ +typedef struct _xmlSchemaFacetLink xmlSchemaFacetLink; +typedef xmlSchemaFacetLink *xmlSchemaFacetLinkPtr; +struct _xmlSchemaFacetLink { + struct _xmlSchemaFacetLink *next;/* the next facet link ... */ + xmlSchemaFacetPtr facet;/* the linked facet */ +}; + +/** + * XML_SCHEMAS_TYPE_MIXED: + * + * the element content type is mixed + */ +#define XML_SCHEMAS_TYPE_MIXED 1 << 0 +/** + * XML_SCHEMAS_TYPE_DERIVATION_METHOD_EXTENSION: + * + * the simple or complex type has a derivation method of "extension". + */ +#define XML_SCHEMAS_TYPE_DERIVATION_METHOD_EXTENSION 1 << 1 +/** + * XML_SCHEMAS_TYPE_DERIVATION_METHOD_RESTRICTION: + * + * the simple or complex type has a derivation method of "restriction". + */ +#define XML_SCHEMAS_TYPE_DERIVATION_METHOD_RESTRICTION 1 << 2 +/** + * XML_SCHEMAS_TYPE_GLOBAL: + * + * the type is global + */ +#define XML_SCHEMAS_TYPE_GLOBAL 1 << 3 +/** + * XML_SCHEMAS_TYPE_OWNED_ATTR_WILDCARD: + * + * the complexType owns an attribute wildcard, i.e. + * it can be freed by the complexType + */ +#define XML_SCHEMAS_TYPE_OWNED_ATTR_WILDCARD 1 << 4 /* Obsolete. */ +/** + * XML_SCHEMAS_TYPE_VARIETY_ABSENT: + * + * the simpleType has a variety of "absent". + * TODO: Actually not necessary :-/, since if + * none of the variety flags occur then it's + * automatically absent. + */ +#define XML_SCHEMAS_TYPE_VARIETY_ABSENT 1 << 5 +/** + * XML_SCHEMAS_TYPE_VARIETY_LIST: + * + * the simpleType has a variety of "list". + */ +#define XML_SCHEMAS_TYPE_VARIETY_LIST 1 << 6 +/** + * XML_SCHEMAS_TYPE_VARIETY_UNION: + * + * the simpleType has a variety of "union". + */ +#define XML_SCHEMAS_TYPE_VARIETY_UNION 1 << 7 +/** + * XML_SCHEMAS_TYPE_VARIETY_ATOMIC: + * + * the simpleType has a variety of "union". + */ +#define XML_SCHEMAS_TYPE_VARIETY_ATOMIC 1 << 8 +/** + * XML_SCHEMAS_TYPE_FINAL_EXTENSION: + * + * the complexType has a final of "extension". + */ +#define XML_SCHEMAS_TYPE_FINAL_EXTENSION 1 << 9 +/** + * XML_SCHEMAS_TYPE_FINAL_RESTRICTION: + * + * the simpleType/complexType has a final of "restriction". + */ +#define XML_SCHEMAS_TYPE_FINAL_RESTRICTION 1 << 10 +/** + * XML_SCHEMAS_TYPE_FINAL_LIST: + * + * the simpleType has a final of "list". + */ +#define XML_SCHEMAS_TYPE_FINAL_LIST 1 << 11 +/** + * XML_SCHEMAS_TYPE_FINAL_UNION: + * + * the simpleType has a final of "union". + */ +#define XML_SCHEMAS_TYPE_FINAL_UNION 1 << 12 +/** + * XML_SCHEMAS_TYPE_FINAL_DEFAULT: + * + * the simpleType has a final of "default". + */ +#define XML_SCHEMAS_TYPE_FINAL_DEFAULT 1 << 13 +/** + * XML_SCHEMAS_TYPE_BUILTIN_PRIMITIVE: + * + * Marks the item as a builtin primitive. + */ +#define XML_SCHEMAS_TYPE_BUILTIN_PRIMITIVE 1 << 14 +/** + * XML_SCHEMAS_TYPE_MARKED: + * + * Marks the item as marked; used for circular checks. + */ +#define XML_SCHEMAS_TYPE_MARKED 1 << 16 +/** + * XML_SCHEMAS_TYPE_BLOCK_DEFAULT: + * + * the complexType did not specify 'block' so use the default of the + * item. + */ +#define XML_SCHEMAS_TYPE_BLOCK_DEFAULT 1 << 17 +/** + * XML_SCHEMAS_TYPE_BLOCK_EXTENSION: + * + * the complexType has a 'block' of "extension". + */ +#define XML_SCHEMAS_TYPE_BLOCK_EXTENSION 1 << 18 +/** + * XML_SCHEMAS_TYPE_BLOCK_RESTRICTION: + * + * the complexType has a 'block' of "restriction". + */ +#define XML_SCHEMAS_TYPE_BLOCK_RESTRICTION 1 << 19 +/** + * XML_SCHEMAS_TYPE_ABSTRACT: + * + * the simple/complexType is abstract. + */ +#define XML_SCHEMAS_TYPE_ABSTRACT 1 << 20 +/** + * XML_SCHEMAS_TYPE_FACETSNEEDVALUE: + * + * indicates if the facets need a computed value + */ +#define XML_SCHEMAS_TYPE_FACETSNEEDVALUE 1 << 21 +/** + * XML_SCHEMAS_TYPE_INTERNAL_RESOLVED: + * + * indicates that the type was typefixed + */ +#define XML_SCHEMAS_TYPE_INTERNAL_RESOLVED 1 << 22 +/** + * XML_SCHEMAS_TYPE_INTERNAL_INVALID: + * + * indicates that the type is invalid + */ +#define XML_SCHEMAS_TYPE_INTERNAL_INVALID 1 << 23 +/** + * XML_SCHEMAS_TYPE_WHITESPACE_PRESERVE: + * + * a whitespace-facet value of "preserve" + */ +#define XML_SCHEMAS_TYPE_WHITESPACE_PRESERVE 1 << 24 +/** + * XML_SCHEMAS_TYPE_WHITESPACE_REPLACE: + * + * a whitespace-facet value of "replace" + */ +#define XML_SCHEMAS_TYPE_WHITESPACE_REPLACE 1 << 25 +/** + * XML_SCHEMAS_TYPE_WHITESPACE_COLLAPSE: + * + * a whitespace-facet value of "collapse" + */ +#define XML_SCHEMAS_TYPE_WHITESPACE_COLLAPSE 1 << 26 +/** + * XML_SCHEMAS_TYPE_HAS_FACETS: + * + * has facets + */ +#define XML_SCHEMAS_TYPE_HAS_FACETS 1 << 27 +/** + * XML_SCHEMAS_TYPE_NORMVALUENEEDED: + * + * indicates if the facets (pattern) need a normalized value + */ +#define XML_SCHEMAS_TYPE_NORMVALUENEEDED 1 << 28 + +/** + * XML_SCHEMAS_TYPE_FIXUP_1: + * + * First stage of fixup was done. + */ +#define XML_SCHEMAS_TYPE_FIXUP_1 1 << 29 + +/** + * XML_SCHEMAS_TYPE_REDEFINED: + * + * The type was redefined. + */ +#define XML_SCHEMAS_TYPE_REDEFINED 1 << 30 +/** + * XML_SCHEMAS_TYPE_REDEFINING: + * + * The type redefines an other type. + */ +/* #define XML_SCHEMAS_TYPE_REDEFINING 1 << 31 */ + +/** + * _xmlSchemaType: + * + * Schemas type definition. + */ +struct _xmlSchemaType { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaType *next; /* the next type if in a sequence ... */ + const xmlChar *name; + const xmlChar *id ; /* Deprecated; not used */ + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + xmlSchemaTypePtr subtypes; + xmlSchemaAttributePtr attributes; /* Deprecated; not used */ + xmlNodePtr node; + int minOccurs; /* Deprecated; not used */ + int maxOccurs; /* Deprecated; not used */ + + int flags; + xmlSchemaContentType contentType; + const xmlChar *base; /* Base type's local name */ + const xmlChar *baseNs; /* Base type's target namespace */ + xmlSchemaTypePtr baseType; /* The base type component */ + xmlSchemaFacetPtr facets; /* Local facets */ + struct _xmlSchemaType *redef; /* Deprecated; not used */ + int recurse; /* Obsolete */ + xmlSchemaAttributeLinkPtr *attributeUses; /* Deprecated; not used */ + xmlSchemaWildcardPtr attributeWildcard; + int builtInType; /* Type of built-in types. */ + xmlSchemaTypeLinkPtr memberTypes; /* member-types if a union type. */ + xmlSchemaFacetLinkPtr facetSet; /* All facets (incl. inherited) */ + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaTypePtr contentTypeDef; /* Used for the simple content of complex types. + Could we use @subtypes for this? */ + xmlRegexpPtr contModel; /* Holds the automaton of the content model */ + const xmlChar *targetNamespace; + void *attrUses; +}; + +/* + * xmlSchemaElement: + * An element definition. + * + * xmlSchemaType, xmlSchemaFacet and xmlSchemaElement start of + * structures must be kept similar + */ +/** + * XML_SCHEMAS_ELEM_NILLABLE: + * + * the element is nillable + */ +#define XML_SCHEMAS_ELEM_NILLABLE 1 << 0 +/** + * XML_SCHEMAS_ELEM_GLOBAL: + * + * the element is global + */ +#define XML_SCHEMAS_ELEM_GLOBAL 1 << 1 +/** + * XML_SCHEMAS_ELEM_DEFAULT: + * + * the element has a default value + */ +#define XML_SCHEMAS_ELEM_DEFAULT 1 << 2 +/** + * XML_SCHEMAS_ELEM_FIXED: + * + * the element has a fixed value + */ +#define XML_SCHEMAS_ELEM_FIXED 1 << 3 +/** + * XML_SCHEMAS_ELEM_ABSTRACT: + * + * the element is abstract + */ +#define XML_SCHEMAS_ELEM_ABSTRACT 1 << 4 +/** + * XML_SCHEMAS_ELEM_TOPLEVEL: + * + * the element is top level + * obsolete: use XML_SCHEMAS_ELEM_GLOBAL instead + */ +#define XML_SCHEMAS_ELEM_TOPLEVEL 1 << 5 +/** + * XML_SCHEMAS_ELEM_REF: + * + * the element is a reference to a type + */ +#define XML_SCHEMAS_ELEM_REF 1 << 6 +/** + * XML_SCHEMAS_ELEM_NSDEFAULT: + * + * allow elements in no namespace + * Obsolete, not used anymore. + */ +#define XML_SCHEMAS_ELEM_NSDEFAULT 1 << 7 +/** + * XML_SCHEMAS_ELEM_INTERNAL_RESOLVED: + * + * this is set when "type", "ref", "substitutionGroup" + * references have been resolved. + */ +#define XML_SCHEMAS_ELEM_INTERNAL_RESOLVED 1 << 8 + /** + * XML_SCHEMAS_ELEM_CIRCULAR: + * + * a helper flag for the search of circular references. + */ +#define XML_SCHEMAS_ELEM_CIRCULAR 1 << 9 +/** + * XML_SCHEMAS_ELEM_BLOCK_ABSENT: + * + * the "block" attribute is absent + */ +#define XML_SCHEMAS_ELEM_BLOCK_ABSENT 1 << 10 +/** + * XML_SCHEMAS_ELEM_BLOCK_EXTENSION: + * + * disallowed substitutions are absent + */ +#define XML_SCHEMAS_ELEM_BLOCK_EXTENSION 1 << 11 +/** + * XML_SCHEMAS_ELEM_BLOCK_RESTRICTION: + * + * disallowed substitutions: "restriction" + */ +#define XML_SCHEMAS_ELEM_BLOCK_RESTRICTION 1 << 12 +/** + * XML_SCHEMAS_ELEM_BLOCK_SUBSTITUTION: + * + * disallowed substitutions: "substitution" + */ +#define XML_SCHEMAS_ELEM_BLOCK_SUBSTITUTION 1 << 13 +/** + * XML_SCHEMAS_ELEM_FINAL_ABSENT: + * + * substitution group exclusions are absent + */ +#define XML_SCHEMAS_ELEM_FINAL_ABSENT 1 << 14 +/** + * XML_SCHEMAS_ELEM_FINAL_EXTENSION: + * + * substitution group exclusions: "extension" + */ +#define XML_SCHEMAS_ELEM_FINAL_EXTENSION 1 << 15 +/** + * XML_SCHEMAS_ELEM_FINAL_RESTRICTION: + * + * substitution group exclusions: "restriction" + */ +#define XML_SCHEMAS_ELEM_FINAL_RESTRICTION 1 << 16 +/** + * XML_SCHEMAS_ELEM_SUBST_GROUP_HEAD: + * + * the declaration is a substitution group head + */ +#define XML_SCHEMAS_ELEM_SUBST_GROUP_HEAD 1 << 17 +/** + * XML_SCHEMAS_ELEM_INTERNAL_CHECKED: + * + * this is set when the elem decl has been checked against + * all constraints + */ +#define XML_SCHEMAS_ELEM_INTERNAL_CHECKED 1 << 18 + +typedef struct _xmlSchemaElement xmlSchemaElement; +typedef xmlSchemaElement *xmlSchemaElementPtr; +struct _xmlSchemaElement { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaType *next; /* Not used? */ + const xmlChar *name; + const xmlChar *id; /* Deprecated; not used */ + const xmlChar *ref; /* Deprecated; not used */ + const xmlChar *refNs; /* Deprecated; not used */ + xmlSchemaAnnotPtr annot; + xmlSchemaTypePtr subtypes; /* the type definition */ + xmlSchemaAttributePtr attributes; + xmlNodePtr node; + int minOccurs; /* Deprecated; not used */ + int maxOccurs; /* Deprecated; not used */ + + int flags; + const xmlChar *targetNamespace; + const xmlChar *namedType; + const xmlChar *namedTypeNs; + const xmlChar *substGroup; + const xmlChar *substGroupNs; + const xmlChar *scope; + const xmlChar *value; /* The original value of the value constraint. */ + struct _xmlSchemaElement *refDecl; /* This will now be used for the + substitution group affiliation */ + xmlRegexpPtr contModel; /* Obsolete for WXS, maybe used for RelaxNG */ + xmlSchemaContentType contentType; + const xmlChar *refPrefix; /* Deprecated; not used */ + xmlSchemaValPtr defVal; /* The compiled value constraint. */ + void *idcs; /* The identity-constraint defs */ +}; + +/* + * XML_SCHEMAS_FACET_UNKNOWN: + * + * unknown facet handling + */ +#define XML_SCHEMAS_FACET_UNKNOWN 0 +/* + * XML_SCHEMAS_FACET_PRESERVE: + * + * preserve the type of the facet + */ +#define XML_SCHEMAS_FACET_PRESERVE 1 +/* + * XML_SCHEMAS_FACET_REPLACE: + * + * replace the type of the facet + */ +#define XML_SCHEMAS_FACET_REPLACE 2 +/* + * XML_SCHEMAS_FACET_COLLAPSE: + * + * collapse the types of the facet + */ +#define XML_SCHEMAS_FACET_COLLAPSE 3 +/** + * A facet definition. + */ +struct _xmlSchemaFacet { + xmlSchemaTypeType type; /* The kind of type */ + struct _xmlSchemaFacet *next;/* the next type if in a sequence ... */ + const xmlChar *value; /* The original value */ + const xmlChar *id; /* Obsolete */ + xmlSchemaAnnotPtr annot; + xmlNodePtr node; + int fixed; /* XML_SCHEMAS_FACET_PRESERVE, etc. */ + int whitespace; + xmlSchemaValPtr val; /* The compiled value */ + xmlRegexpPtr regexp; /* The regex for patterns */ +}; + +/** + * A notation definition. + */ +typedef struct _xmlSchemaNotation xmlSchemaNotation; +typedef xmlSchemaNotation *xmlSchemaNotationPtr; +struct _xmlSchemaNotation { + xmlSchemaTypeType type; /* The kind of type */ + const xmlChar *name; + xmlSchemaAnnotPtr annot; + const xmlChar *identifier; + const xmlChar *targetNamespace; +}; + +/* +* TODO: Actually all those flags used for the schema should sit +* on the schema parser context, since they are used only +* during parsing an XML schema document, and not available +* on the component level as per spec. +*/ +/** + * XML_SCHEMAS_QUALIF_ELEM: + * + * Reflects elementFormDefault == qualified in + * an XML schema document. + */ +#define XML_SCHEMAS_QUALIF_ELEM 1 << 0 +/** + * XML_SCHEMAS_QUALIF_ATTR: + * + * Reflects attributeFormDefault == qualified in + * an XML schema document. + */ +#define XML_SCHEMAS_QUALIF_ATTR 1 << 1 +/** + * XML_SCHEMAS_FINAL_DEFAULT_EXTENSION: + * + * the schema has "extension" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_EXTENSION 1 << 2 +/** + * XML_SCHEMAS_FINAL_DEFAULT_RESTRICTION: + * + * the schema has "restriction" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_RESTRICTION 1 << 3 +/** + * XML_SCHEMAS_FINAL_DEFAULT_LIST: + * + * the schema has "list" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_LIST 1 << 4 +/** + * XML_SCHEMAS_FINAL_DEFAULT_UNION: + * + * the schema has "union" in the set of finalDefault. + */ +#define XML_SCHEMAS_FINAL_DEFAULT_UNION 1 << 5 +/** + * XML_SCHEMAS_BLOCK_DEFAULT_EXTENSION: + * + * the schema has "extension" in the set of blockDefault. + */ +#define XML_SCHEMAS_BLOCK_DEFAULT_EXTENSION 1 << 6 +/** + * XML_SCHEMAS_BLOCK_DEFAULT_RESTRICTION: + * + * the schema has "restriction" in the set of blockDefault. + */ +#define XML_SCHEMAS_BLOCK_DEFAULT_RESTRICTION 1 << 7 +/** + * XML_SCHEMAS_BLOCK_DEFAULT_SUBSTITUTION: + * + * the schema has "substitution" in the set of blockDefault. + */ +#define XML_SCHEMAS_BLOCK_DEFAULT_SUBSTITUTION 1 << 8 +/** + * XML_SCHEMAS_INCLUDING_CONVERT_NS: + * + * the schema is currently including an other schema with + * no target namespace. + */ +#define XML_SCHEMAS_INCLUDING_CONVERT_NS 1 << 9 +/** + * _xmlSchema: + * + * A Schemas definition + */ +struct _xmlSchema { + const xmlChar *name; /* schema name */ + const xmlChar *targetNamespace; /* the target namespace */ + const xmlChar *version; + const xmlChar *id; /* Obsolete */ + xmlDocPtr doc; + xmlSchemaAnnotPtr annot; + int flags; + + xmlHashTablePtr typeDecl; + xmlHashTablePtr attrDecl; + xmlHashTablePtr attrgrpDecl; + xmlHashTablePtr elemDecl; + xmlHashTablePtr notaDecl; + + xmlHashTablePtr schemasImports; + + void *_private; /* unused by the library for users or bindings */ + xmlHashTablePtr groupDecl; + xmlDictPtr dict; + void *includes; /* the includes, this is opaque for now */ + int preserve; /* whether to free the document */ + int counter; /* used to give anonymous components unique names */ + xmlHashTablePtr idcDef; /* All identity-constraint defs. */ + void *volatiles; /* Obsolete */ +}; + +XMLPUBFUN void xmlSchemaFreeType (xmlSchemaTypePtr type); +XMLPUBFUN void xmlSchemaFreeWildcard(xmlSchemaWildcardPtr wildcard); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_SCHEMAS_ENABLED */ +#endif /* __XML_SCHEMA_INTERNALS_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/threads.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/threads.h new file mode 100644 index 0000000000000000000000000000000000000000..8f4b6e174fbac6e724e5a8da4ed65a8a297c0373 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/threads.h @@ -0,0 +1,87 @@ +/** + * Summary: interfaces for thread handling + * Description: set of generic threading related routines + * should work with pthreads, Windows native or TLS threads + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_THREADS_H__ +#define __XML_THREADS_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * xmlMutex are a simple mutual exception locks. + */ +typedef struct _xmlMutex xmlMutex; +typedef xmlMutex *xmlMutexPtr; + +/* + * xmlRMutex are reentrant mutual exception locks. + */ +typedef struct _xmlRMutex xmlRMutex; +typedef xmlRMutex *xmlRMutexPtr; + +XMLPUBFUN int + xmlCheckThreadLocalStorage(void); + +XMLPUBFUN xmlMutexPtr + xmlNewMutex (void); +XMLPUBFUN void + xmlMutexLock (xmlMutexPtr tok); +XMLPUBFUN void + xmlMutexUnlock (xmlMutexPtr tok); +XMLPUBFUN void + xmlFreeMutex (xmlMutexPtr tok); + +XMLPUBFUN xmlRMutexPtr + xmlNewRMutex (void); +XMLPUBFUN void + xmlRMutexLock (xmlRMutexPtr tok); +XMLPUBFUN void + xmlRMutexUnlock (xmlRMutexPtr tok); +XMLPUBFUN void + xmlFreeRMutex (xmlRMutexPtr tok); + +/* + * Library wide APIs. + */ +XML_DEPRECATED +XMLPUBFUN void + xmlInitThreads (void); +XMLPUBFUN void + xmlLockLibrary (void); +XMLPUBFUN void + xmlUnlockLibrary(void); +XML_DEPRECATED +XMLPUBFUN int + xmlGetThreadId (void); +XML_DEPRECATED +XMLPUBFUN int + xmlIsMainThread (void); +XML_DEPRECATED +XMLPUBFUN void + xmlCleanupThreads(void); + +/** DOC_DISABLE */ +#if defined(LIBXML_THREAD_ENABLED) && defined(_WIN32) && \ + defined(LIBXML_STATIC_FOR_DLL) +int +xmlDllMain(void *hinstDLL, unsigned long fdwReason, + void *lpvReserved); +#endif +/** DOC_ENABLE */ + +#ifdef __cplusplus +} +#endif + + +#endif /* __XML_THREADS_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/tree.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/tree.h new file mode 100644 index 0000000000000000000000000000000000000000..a90a174f07477607e3828bf2d7785c505c904366 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/tree.h @@ -0,0 +1,1362 @@ +/* + * Summary: interfaces for tree manipulation + * Description: this module describes the structures found in an tree resulting + * from an XML or HTML parsing, as well as the API provided for + * various processing on that tree + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef XML_TREE_INTERNALS + +/* + * Emulate circular dependency for backward compatibility + */ +#include + +#else /* XML_TREE_INTERNALS */ + +#ifndef __XML_TREE_H__ +#define __XML_TREE_H__ + +#include +#include +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Some of the basic types pointer to structures: + */ +/* xmlIO.h */ +typedef struct _xmlParserInputBuffer xmlParserInputBuffer; +typedef xmlParserInputBuffer *xmlParserInputBufferPtr; + +typedef struct _xmlOutputBuffer xmlOutputBuffer; +typedef xmlOutputBuffer *xmlOutputBufferPtr; + +/* parser.h */ +typedef struct _xmlParserInput xmlParserInput; +typedef xmlParserInput *xmlParserInputPtr; + +typedef struct _xmlParserCtxt xmlParserCtxt; +typedef xmlParserCtxt *xmlParserCtxtPtr; + +typedef struct _xmlSAXLocator xmlSAXLocator; +typedef xmlSAXLocator *xmlSAXLocatorPtr; + +typedef struct _xmlSAXHandler xmlSAXHandler; +typedef xmlSAXHandler *xmlSAXHandlerPtr; + +/* entities.h */ +typedef struct _xmlEntity xmlEntity; +typedef xmlEntity *xmlEntityPtr; + +/** + * BASE_BUFFER_SIZE: + * + * default buffer size 4000. + */ +#define BASE_BUFFER_SIZE 4096 + +/** + * LIBXML_NAMESPACE_DICT: + * + * Defines experimental behaviour: + * 1) xmlNs gets an additional field @context (a xmlDoc) + * 2) when creating a tree, xmlNs->href is stored in the dict of xmlDoc. + */ +/* #define LIBXML_NAMESPACE_DICT */ + +/** + * xmlBufferAllocationScheme: + * + * A buffer allocation scheme can be defined to either match exactly the + * need or double it's allocated size each time it is found too small. + */ + +typedef enum { + XML_BUFFER_ALLOC_DOUBLEIT, /* double each time one need to grow */ + XML_BUFFER_ALLOC_EXACT, /* grow only to the minimal size */ + XML_BUFFER_ALLOC_IMMUTABLE, /* immutable buffer, deprecated */ + XML_BUFFER_ALLOC_IO, /* special allocation scheme used for I/O */ + XML_BUFFER_ALLOC_HYBRID, /* exact up to a threshold, and doubleit thereafter */ + XML_BUFFER_ALLOC_BOUNDED /* limit the upper size of the buffer */ +} xmlBufferAllocationScheme; + +/** + * xmlBuffer: + * + * A buffer structure, this old construct is limited to 2GB and + * is being deprecated, use API with xmlBuf instead + */ +typedef struct _xmlBuffer xmlBuffer; +typedef xmlBuffer *xmlBufferPtr; +struct _xmlBuffer { + xmlChar *content; /* The buffer content UTF8 */ + unsigned int use; /* The buffer size used */ + unsigned int size; /* The buffer size */ + xmlBufferAllocationScheme alloc; /* The realloc method */ + xmlChar *contentIO; /* in IO mode we may have a different base */ +}; + +/** + * xmlBuf: + * + * A buffer structure, new one, the actual structure internals are not public + */ + +typedef struct _xmlBuf xmlBuf; + +/** + * xmlBufPtr: + * + * A pointer to a buffer structure, the actual structure internals are not + * public + */ + +typedef xmlBuf *xmlBufPtr; + +/* + * A few public routines for xmlBuf. As those are expected to be used + * mostly internally the bulk of the routines are internal in buf.h + */ +XMLPUBFUN xmlChar* xmlBufContent (const xmlBuf* buf); +XMLPUBFUN xmlChar* xmlBufEnd (xmlBufPtr buf); +XMLPUBFUN size_t xmlBufUse (const xmlBufPtr buf); +XMLPUBFUN size_t xmlBufShrink (xmlBufPtr buf, size_t len); + +/* + * LIBXML2_NEW_BUFFER: + * + * Macro used to express that the API use the new buffers for + * xmlParserInputBuffer and xmlOutputBuffer. The change was + * introduced in 2.9.0. + */ +#define LIBXML2_NEW_BUFFER + +/** + * XML_XML_NAMESPACE: + * + * This is the namespace for the special xml: prefix predefined in the + * XML Namespace specification. + */ +#define XML_XML_NAMESPACE \ + (const xmlChar *) "http://www.w3.org/XML/1998/namespace" + +/** + * XML_XML_ID: + * + * This is the name for the special xml:id attribute + */ +#define XML_XML_ID (const xmlChar *) "xml:id" + +/* + * The different element types carried by an XML tree. + * + * NOTE: This is synchronized with DOM Level1 values + * See http://www.w3.org/TR/REC-DOM-Level-1/ + * + * Actually this had diverged a bit, and now XML_DOCUMENT_TYPE_NODE should + * be deprecated to use an XML_DTD_NODE. + */ +typedef enum { + XML_ELEMENT_NODE= 1, + XML_ATTRIBUTE_NODE= 2, + XML_TEXT_NODE= 3, + XML_CDATA_SECTION_NODE= 4, + XML_ENTITY_REF_NODE= 5, + XML_ENTITY_NODE= 6, + XML_PI_NODE= 7, + XML_COMMENT_NODE= 8, + XML_DOCUMENT_NODE= 9, + XML_DOCUMENT_TYPE_NODE= 10, + XML_DOCUMENT_FRAG_NODE= 11, + XML_NOTATION_NODE= 12, + XML_HTML_DOCUMENT_NODE= 13, + XML_DTD_NODE= 14, + XML_ELEMENT_DECL= 15, + XML_ATTRIBUTE_DECL= 16, + XML_ENTITY_DECL= 17, + XML_NAMESPACE_DECL= 18, + XML_XINCLUDE_START= 19, + XML_XINCLUDE_END= 20 + /* XML_DOCB_DOCUMENT_NODE= 21 */ /* removed */ +} xmlElementType; + +/** DOC_DISABLE */ +/* For backward compatibility */ +#define XML_DOCB_DOCUMENT_NODE 21 +/** DOC_ENABLE */ + +/** + * xmlNotation: + * + * A DTD Notation definition. + */ + +typedef struct _xmlNotation xmlNotation; +typedef xmlNotation *xmlNotationPtr; +struct _xmlNotation { + const xmlChar *name; /* Notation name */ + const xmlChar *PublicID; /* Public identifier, if any */ + const xmlChar *SystemID; /* System identifier, if any */ +}; + +/** + * xmlAttributeType: + * + * A DTD Attribute type definition. + */ + +typedef enum { + XML_ATTRIBUTE_CDATA = 1, + XML_ATTRIBUTE_ID, + XML_ATTRIBUTE_IDREF , + XML_ATTRIBUTE_IDREFS, + XML_ATTRIBUTE_ENTITY, + XML_ATTRIBUTE_ENTITIES, + XML_ATTRIBUTE_NMTOKEN, + XML_ATTRIBUTE_NMTOKENS, + XML_ATTRIBUTE_ENUMERATION, + XML_ATTRIBUTE_NOTATION +} xmlAttributeType; + +/** + * xmlAttributeDefault: + * + * A DTD Attribute default definition. + */ + +typedef enum { + XML_ATTRIBUTE_NONE = 1, + XML_ATTRIBUTE_REQUIRED, + XML_ATTRIBUTE_IMPLIED, + XML_ATTRIBUTE_FIXED +} xmlAttributeDefault; + +/** + * xmlEnumeration: + * + * List structure used when there is an enumeration in DTDs. + */ + +typedef struct _xmlEnumeration xmlEnumeration; +typedef xmlEnumeration *xmlEnumerationPtr; +struct _xmlEnumeration { + struct _xmlEnumeration *next; /* next one */ + const xmlChar *name; /* Enumeration name */ +}; + +/** + * xmlAttribute: + * + * An Attribute declaration in a DTD. + */ + +typedef struct _xmlAttribute xmlAttribute; +typedef xmlAttribute *xmlAttributePtr; +struct _xmlAttribute { + void *_private; /* application data */ + xmlElementType type; /* XML_ATTRIBUTE_DECL, must be second ! */ + const xmlChar *name; /* Attribute name */ + struct _xmlNode *children; /* NULL */ + struct _xmlNode *last; /* NULL */ + struct _xmlDtd *parent; /* -> DTD */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + struct _xmlAttribute *nexth; /* next in hash table */ + xmlAttributeType atype; /* The attribute type */ + xmlAttributeDefault def; /* the default */ + const xmlChar *defaultValue; /* or the default value */ + xmlEnumerationPtr tree; /* or the enumeration tree if any */ + const xmlChar *prefix; /* the namespace prefix if any */ + const xmlChar *elem; /* Element holding the attribute */ +}; + +/** + * xmlElementContentType: + * + * Possible definitions of element content types. + */ +typedef enum { + XML_ELEMENT_CONTENT_PCDATA = 1, + XML_ELEMENT_CONTENT_ELEMENT, + XML_ELEMENT_CONTENT_SEQ, + XML_ELEMENT_CONTENT_OR +} xmlElementContentType; + +/** + * xmlElementContentOccur: + * + * Possible definitions of element content occurrences. + */ +typedef enum { + XML_ELEMENT_CONTENT_ONCE = 1, + XML_ELEMENT_CONTENT_OPT, + XML_ELEMENT_CONTENT_MULT, + XML_ELEMENT_CONTENT_PLUS +} xmlElementContentOccur; + +/** + * xmlElementContent: + * + * An XML Element content as stored after parsing an element definition + * in a DTD. + */ + +typedef struct _xmlElementContent xmlElementContent; +typedef xmlElementContent *xmlElementContentPtr; +struct _xmlElementContent { + xmlElementContentType type; /* PCDATA, ELEMENT, SEQ or OR */ + xmlElementContentOccur ocur; /* ONCE, OPT, MULT or PLUS */ + const xmlChar *name; /* Element name */ + struct _xmlElementContent *c1; /* first child */ + struct _xmlElementContent *c2; /* second child */ + struct _xmlElementContent *parent; /* parent */ + const xmlChar *prefix; /* Namespace prefix */ +}; + +/** + * xmlElementTypeVal: + * + * The different possibilities for an element content type. + */ + +typedef enum { + XML_ELEMENT_TYPE_UNDEFINED = 0, + XML_ELEMENT_TYPE_EMPTY = 1, + XML_ELEMENT_TYPE_ANY, + XML_ELEMENT_TYPE_MIXED, + XML_ELEMENT_TYPE_ELEMENT +} xmlElementTypeVal; + +/** + * xmlElement: + * + * An XML Element declaration from a DTD. + */ + +typedef struct _xmlElement xmlElement; +typedef xmlElement *xmlElementPtr; +struct _xmlElement { + void *_private; /* application data */ + xmlElementType type; /* XML_ELEMENT_DECL, must be second ! */ + const xmlChar *name; /* Element name */ + struct _xmlNode *children; /* NULL */ + struct _xmlNode *last; /* NULL */ + struct _xmlDtd *parent; /* -> DTD */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + xmlElementTypeVal etype; /* The type */ + xmlElementContentPtr content; /* the allowed element content */ + xmlAttributePtr attributes; /* List of the declared attributes */ + const xmlChar *prefix; /* the namespace prefix if any */ +#ifdef LIBXML_REGEXP_ENABLED + xmlRegexpPtr contModel; /* the validating regexp */ +#else + void *contModel; +#endif +}; + + +/** + * XML_LOCAL_NAMESPACE: + * + * A namespace declaration node. + */ +#define XML_LOCAL_NAMESPACE XML_NAMESPACE_DECL +typedef xmlElementType xmlNsType; + +/** + * xmlNs: + * + * An XML namespace. + * Note that prefix == NULL is valid, it defines the default namespace + * within the subtree (until overridden). + * + * xmlNsType is unified with xmlElementType. + */ + +typedef struct _xmlNs xmlNs; +typedef xmlNs *xmlNsPtr; +struct _xmlNs { + struct _xmlNs *next; /* next Ns link for this node */ + xmlNsType type; /* global or local */ + const xmlChar *href; /* URL for the namespace */ + const xmlChar *prefix; /* prefix for the namespace */ + void *_private; /* application data */ + struct _xmlDoc *context; /* normally an xmlDoc */ +}; + +/** + * xmlDtd: + * + * An XML DTD, as defined by parent link */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + /* End of common part */ + void *notations; /* Hash table for notations if any */ + void *elements; /* Hash table for elements if any */ + void *attributes; /* Hash table for attributes if any */ + void *entities; /* Hash table for entities if any */ + const xmlChar *ExternalID; /* External identifier for PUBLIC DTD */ + const xmlChar *SystemID; /* URI for a SYSTEM or PUBLIC DTD */ + void *pentities; /* Hash table for param entities if any */ +}; + +/** + * xmlAttr: + * + * An attribute on an XML node. + */ +typedef struct _xmlAttr xmlAttr; +typedef xmlAttr *xmlAttrPtr; +struct _xmlAttr { + void *_private; /* application data */ + xmlElementType type; /* XML_ATTRIBUTE_NODE, must be second ! */ + const xmlChar *name; /* the name of the property */ + struct _xmlNode *children; /* the value of the property */ + struct _xmlNode *last; /* NULL */ + struct _xmlNode *parent; /* child->parent link */ + struct _xmlAttr *next; /* next sibling link */ + struct _xmlAttr *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + xmlNs *ns; /* pointer to the associated namespace */ + xmlAttributeType atype; /* the attribute type if validating */ + void *psvi; /* for type/PSVI information */ +}; + +/** + * xmlID: + * + * An XML ID instance. + */ + +typedef struct _xmlID xmlID; +typedef xmlID *xmlIDPtr; +struct _xmlID { + struct _xmlID *next; /* next ID */ + const xmlChar *value; /* The ID name */ + xmlAttrPtr attr; /* The attribute holding it */ + const xmlChar *name; /* The attribute if attr is not available */ + int lineno; /* The line number if attr is not available */ + struct _xmlDoc *doc; /* The document holding the ID */ +}; + +/** + * xmlRef: + * + * An XML IDREF instance. + */ + +typedef struct _xmlRef xmlRef; +typedef xmlRef *xmlRefPtr; +struct _xmlRef { + struct _xmlRef *next; /* next Ref */ + const xmlChar *value; /* The Ref name */ + xmlAttrPtr attr; /* The attribute holding it */ + const xmlChar *name; /* The attribute if attr is not available */ + int lineno; /* The line number if attr is not available */ +}; + +/** + * xmlNode: + * + * A node in an XML tree. + */ +typedef struct _xmlNode xmlNode; +typedef xmlNode *xmlNodePtr; +struct _xmlNode { + void *_private; /* application data */ + xmlElementType type; /* type number, must be second ! */ + const xmlChar *name; /* the name of the node, or the entity */ + struct _xmlNode *children; /* parent->childs link */ + struct _xmlNode *last; /* last child link */ + struct _xmlNode *parent; /* child->parent link */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* the containing document */ + + /* End of common part */ + xmlNs *ns; /* pointer to the associated namespace */ + xmlChar *content; /* the content */ + struct _xmlAttr *properties;/* properties list */ + xmlNs *nsDef; /* namespace definitions on this node */ + void *psvi; /* for type/PSVI information */ + unsigned short line; /* line number */ + unsigned short extra; /* extra data for XPath/XSLT */ +}; + +/** + * XML_GET_CONTENT: + * + * Macro to extract the content pointer of a node. + */ +#define XML_GET_CONTENT(n) \ + ((n)->type == XML_ELEMENT_NODE ? NULL : (n)->content) + +/** + * XML_GET_LINE: + * + * Macro to extract the line number of an element node. + */ +#define XML_GET_LINE(n) \ + (xmlGetLineNo(n)) + +/** + * xmlDocProperty + * + * Set of properties of the document as found by the parser + * Some of them are linked to similarly named xmlParserOption + */ +typedef enum { + XML_DOC_WELLFORMED = 1<<0, /* document is XML well formed */ + XML_DOC_NSVALID = 1<<1, /* document is Namespace valid */ + XML_DOC_OLD10 = 1<<2, /* parsed with old XML-1.0 parser */ + XML_DOC_DTDVALID = 1<<3, /* DTD validation was successful */ + XML_DOC_XINCLUDE = 1<<4, /* XInclude substitution was done */ + XML_DOC_USERBUILT = 1<<5, /* Document was built using the API + and not by parsing an instance */ + XML_DOC_INTERNAL = 1<<6, /* built for internal processing */ + XML_DOC_HTML = 1<<7 /* parsed or built HTML document */ +} xmlDocProperties; + +/** + * xmlDoc: + * + * An XML document. + */ +typedef struct _xmlDoc xmlDoc; +typedef xmlDoc *xmlDocPtr; +struct _xmlDoc { + void *_private; /* application data */ + xmlElementType type; /* XML_DOCUMENT_NODE, must be second ! */ + char *name; /* name/filename/URI of the document */ + struct _xmlNode *children; /* the document tree */ + struct _xmlNode *last; /* last child link */ + struct _xmlNode *parent; /* child->parent link */ + struct _xmlNode *next; /* next sibling link */ + struct _xmlNode *prev; /* previous sibling link */ + struct _xmlDoc *doc; /* autoreference to itself */ + + /* End of common part */ + int compression;/* level of zlib compression */ + int standalone; /* standalone document (no external refs) + 1 if standalone="yes" + 0 if standalone="no" + -1 if there is no XML declaration + -2 if there is an XML declaration, but no + standalone attribute was specified */ + struct _xmlDtd *intSubset; /* the document internal subset */ + struct _xmlDtd *extSubset; /* the document external subset */ + struct _xmlNs *oldNs; /* Global namespace, the old way */ + const xmlChar *version; /* the XML version string */ + const xmlChar *encoding; /* actual encoding, if any */ + void *ids; /* Hash table for ID attributes if any */ + void *refs; /* Hash table for IDREFs attributes if any */ + const xmlChar *URL; /* The URI for that document */ + int charset; /* unused */ + struct _xmlDict *dict; /* dict used to allocate names or NULL */ + void *psvi; /* for type/PSVI information */ + int parseFlags; /* set of xmlParserOption used to parse the + document */ + int properties; /* set of xmlDocProperties for this document + set at the end of parsing */ +}; + + +typedef struct _xmlDOMWrapCtxt xmlDOMWrapCtxt; +typedef xmlDOMWrapCtxt *xmlDOMWrapCtxtPtr; + +/** + * xmlDOMWrapAcquireNsFunction: + * @ctxt: a DOM wrapper context + * @node: the context node (element or attribute) + * @nsName: the requested namespace name + * @nsPrefix: the requested namespace prefix + * + * A function called to acquire namespaces (xmlNs) from the wrapper. + * + * Returns an xmlNsPtr or NULL in case of an error. + */ +typedef xmlNsPtr (*xmlDOMWrapAcquireNsFunction) (xmlDOMWrapCtxtPtr ctxt, + xmlNodePtr node, + const xmlChar *nsName, + const xmlChar *nsPrefix); + +/** + * xmlDOMWrapCtxt: + * + * Context for DOM wrapper-operations. + */ +struct _xmlDOMWrapCtxt { + void * _private; + /* + * The type of this context, just in case we need specialized + * contexts in the future. + */ + int type; + /* + * Internal namespace map used for various operations. + */ + void * namespaceMap; + /* + * Use this one to acquire an xmlNsPtr intended for node->ns. + * (Note that this is not intended for elem->nsDef). + */ + xmlDOMWrapAcquireNsFunction getNsForNodeFunc; +}; + +/** + * xmlRegisterNodeFunc: + * @node: the current node + * + * Signature for the registration callback of a created node + */ +typedef void (*xmlRegisterNodeFunc) (xmlNodePtr node); + +/** + * xmlDeregisterNodeFunc: + * @node: the current node + * + * Signature for the deregistration callback of a discarded node + */ +typedef void (*xmlDeregisterNodeFunc) (xmlNodePtr node); + +/** + * xmlChildrenNode: + * + * Macro for compatibility naming layer with libxml1. Maps + * to "children." + */ +#ifndef xmlChildrenNode +#define xmlChildrenNode children +#endif + +/** + * xmlRootNode: + * + * Macro for compatibility naming layer with libxml1. Maps + * to "children". + */ +#ifndef xmlRootNode +#define xmlRootNode children +#endif + +/* + * Variables. + */ + +/** DOC_DISABLE */ +#define XML_GLOBALS_TREE \ + XML_OP(xmlBufferAllocScheme, xmlBufferAllocationScheme, XML_DEPRECATED) \ + XML_OP(xmlDefaultBufferSize, int, XML_DEPRECATED) \ + XML_OP(xmlRegisterNodeDefaultValue, xmlRegisterNodeFunc, XML_DEPRECATED) \ + XML_OP(xmlDeregisterNodeDefaultValue, xmlDeregisterNodeFunc, \ + XML_DEPRECATED) + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_TREE +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlBufferAllocScheme XML_GLOBAL_MACRO(xmlBufferAllocScheme) + #define xmlDefaultBufferSize XML_GLOBAL_MACRO(xmlDefaultBufferSize) + #define xmlRegisterNodeDefaultValue \ + XML_GLOBAL_MACRO(xmlRegisterNodeDefaultValue) + #define xmlDeregisterNodeDefaultValue \ + XML_GLOBAL_MACRO(xmlDeregisterNodeDefaultValue) +#endif +/** DOC_ENABLE */ + +/* + * Some helper functions + */ +XMLPUBFUN int + xmlValidateNCName (const xmlChar *value, + int space); + +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN int + xmlValidateQName (const xmlChar *value, + int space); +XMLPUBFUN int + xmlValidateName (const xmlChar *value, + int space); +XMLPUBFUN int + xmlValidateNMToken (const xmlChar *value, + int space); +#endif + +XMLPUBFUN xmlChar * + xmlBuildQName (const xmlChar *ncname, + const xmlChar *prefix, + xmlChar *memory, + int len); +XMLPUBFUN xmlChar * + xmlSplitQName2 (const xmlChar *name, + xmlChar **prefix); +XMLPUBFUN const xmlChar * + xmlSplitQName3 (const xmlChar *name, + int *len); + +/* + * Handling Buffers, the old ones see @xmlBuf for the new ones. + */ + +XMLPUBFUN void + xmlSetBufferAllocationScheme(xmlBufferAllocationScheme scheme); +XMLPUBFUN xmlBufferAllocationScheme + xmlGetBufferAllocationScheme(void); + +XMLPUBFUN xmlBufferPtr + xmlBufferCreate (void); +XMLPUBFUN xmlBufferPtr + xmlBufferCreateSize (size_t size); +XMLPUBFUN xmlBufferPtr + xmlBufferCreateStatic (void *mem, + size_t size); +XMLPUBFUN int + xmlBufferResize (xmlBufferPtr buf, + unsigned int size); +XMLPUBFUN void + xmlBufferFree (xmlBufferPtr buf); +XMLPUBFUN int + xmlBufferDump (FILE *file, + xmlBufferPtr buf); +XMLPUBFUN int + xmlBufferAdd (xmlBufferPtr buf, + const xmlChar *str, + int len); +XMLPUBFUN int + xmlBufferAddHead (xmlBufferPtr buf, + const xmlChar *str, + int len); +XMLPUBFUN int + xmlBufferCat (xmlBufferPtr buf, + const xmlChar *str); +XMLPUBFUN int + xmlBufferCCat (xmlBufferPtr buf, + const char *str); +XMLPUBFUN int + xmlBufferShrink (xmlBufferPtr buf, + unsigned int len); +XMLPUBFUN int + xmlBufferGrow (xmlBufferPtr buf, + unsigned int len); +XMLPUBFUN void + xmlBufferEmpty (xmlBufferPtr buf); +XMLPUBFUN const xmlChar* + xmlBufferContent (const xmlBuffer *buf); +XMLPUBFUN xmlChar* + xmlBufferDetach (xmlBufferPtr buf); +XMLPUBFUN void + xmlBufferSetAllocationScheme(xmlBufferPtr buf, + xmlBufferAllocationScheme scheme); +XMLPUBFUN int + xmlBufferLength (const xmlBuffer *buf); + +/* + * Creating/freeing new structures. + */ +XMLPUBFUN xmlDtdPtr + xmlCreateIntSubset (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlDtdPtr + xmlNewDtd (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *ExternalID, + const xmlChar *SystemID); +XMLPUBFUN xmlDtdPtr + xmlGetIntSubset (const xmlDoc *doc); +XMLPUBFUN void + xmlFreeDtd (xmlDtdPtr cur); +#ifdef LIBXML_LEGACY_ENABLED +XML_DEPRECATED +XMLPUBFUN xmlNsPtr + xmlNewGlobalNs (xmlDocPtr doc, + const xmlChar *href, + const xmlChar *prefix); +#endif /* LIBXML_LEGACY_ENABLED */ +XMLPUBFUN xmlNsPtr + xmlNewNs (xmlNodePtr node, + const xmlChar *href, + const xmlChar *prefix); +XMLPUBFUN void + xmlFreeNs (xmlNsPtr cur); +XMLPUBFUN void + xmlFreeNsList (xmlNsPtr cur); +XMLPUBFUN xmlDocPtr + xmlNewDoc (const xmlChar *version); +XMLPUBFUN void + xmlFreeDoc (xmlDocPtr cur); +XMLPUBFUN xmlAttrPtr + xmlNewDocProp (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *value); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_HTML_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlAttrPtr + xmlNewProp (xmlNodePtr node, + const xmlChar *name, + const xmlChar *value); +#endif +XMLPUBFUN xmlAttrPtr + xmlNewNsProp (xmlNodePtr node, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *value); +XMLPUBFUN xmlAttrPtr + xmlNewNsPropEatName (xmlNodePtr node, + xmlNsPtr ns, + xmlChar *name, + const xmlChar *value); +XMLPUBFUN void + xmlFreePropList (xmlAttrPtr cur); +XMLPUBFUN void + xmlFreeProp (xmlAttrPtr cur); +XMLPUBFUN xmlAttrPtr + xmlCopyProp (xmlNodePtr target, + xmlAttrPtr cur); +XMLPUBFUN xmlAttrPtr + xmlCopyPropList (xmlNodePtr target, + xmlAttrPtr cur); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlDtdPtr + xmlCopyDtd (xmlDtdPtr dtd); +#endif /* LIBXML_TREE_ENABLED */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlDocPtr + xmlCopyDoc (xmlDocPtr doc, + int recursive); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) */ +/* + * Creating new nodes. + */ +XMLPUBFUN xmlNodePtr + xmlNewDocNode (xmlDocPtr doc, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocNodeEatName (xmlDocPtr doc, + xmlNsPtr ns, + xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewNode (xmlNsPtr ns, + const xmlChar *name); +XMLPUBFUN xmlNodePtr + xmlNewNodeEatName (xmlNsPtr ns, + xmlChar *name); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlNodePtr + xmlNewChild (xmlNodePtr parent, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +#endif +XMLPUBFUN xmlNodePtr + xmlNewDocText (const xmlDoc *doc, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewText (const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocPI (xmlDocPtr doc, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewPI (const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocTextLen (xmlDocPtr doc, + const xmlChar *content, + int len); +XMLPUBFUN xmlNodePtr + xmlNewTextLen (const xmlChar *content, + int len); +XMLPUBFUN xmlNodePtr + xmlNewDocComment (xmlDocPtr doc, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewComment (const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewCDataBlock (xmlDocPtr doc, + const xmlChar *content, + int len); +XMLPUBFUN xmlNodePtr + xmlNewCharRef (xmlDocPtr doc, + const xmlChar *name); +XMLPUBFUN xmlNodePtr + xmlNewReference (const xmlDoc *doc, + const xmlChar *name); +XMLPUBFUN xmlNodePtr + xmlCopyNode (xmlNodePtr node, + int recursive); +XMLPUBFUN xmlNodePtr + xmlDocCopyNode (xmlNodePtr node, + xmlDocPtr doc, + int recursive); +XMLPUBFUN xmlNodePtr + xmlDocCopyNodeList (xmlDocPtr doc, + xmlNodePtr node); +XMLPUBFUN xmlNodePtr + xmlCopyNodeList (xmlNodePtr node); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlNodePtr + xmlNewTextChild (xmlNodePtr parent, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocRawNode (xmlDocPtr doc, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *content); +XMLPUBFUN xmlNodePtr + xmlNewDocFragment (xmlDocPtr doc); +#endif /* LIBXML_TREE_ENABLED */ + +/* + * Navigating. + */ +XMLPUBFUN long + xmlGetLineNo (const xmlNode *node); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_DEBUG_ENABLED) +XMLPUBFUN xmlChar * + xmlGetNodePath (const xmlNode *node); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_DEBUG_ENABLED) */ +XMLPUBFUN xmlNodePtr + xmlDocGetRootElement (const xmlDoc *doc); +XMLPUBFUN xmlNodePtr + xmlGetLastChild (const xmlNode *parent); +XMLPUBFUN int + xmlNodeIsText (const xmlNode *node); +XMLPUBFUN int + xmlIsBlankNode (const xmlNode *node); + +/* + * Changing the structure. + */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) +XMLPUBFUN xmlNodePtr + xmlDocSetRootElement (xmlDocPtr doc, + xmlNodePtr root); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) */ +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN void + xmlNodeSetName (xmlNodePtr cur, + const xmlChar *name); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN xmlNodePtr + xmlAddChild (xmlNodePtr parent, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr + xmlAddChildList (xmlNodePtr parent, + xmlNodePtr cur); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) +XMLPUBFUN xmlNodePtr + xmlReplaceNode (xmlNodePtr old, + xmlNodePtr cur); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_WRITER_ENABLED) */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_HTML_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) +XMLPUBFUN xmlNodePtr + xmlAddPrevSibling (xmlNodePtr cur, + xmlNodePtr elem); +#endif /* LIBXML_TREE_ENABLED || LIBXML_HTML_ENABLED || LIBXML_SCHEMAS_ENABLED */ +XMLPUBFUN xmlNodePtr + xmlAddSibling (xmlNodePtr cur, + xmlNodePtr elem); +XMLPUBFUN xmlNodePtr + xmlAddNextSibling (xmlNodePtr cur, + xmlNodePtr elem); +XMLPUBFUN void + xmlUnlinkNode (xmlNodePtr cur); +XMLPUBFUN xmlNodePtr + xmlTextMerge (xmlNodePtr first, + xmlNodePtr second); +XMLPUBFUN int + xmlTextConcat (xmlNodePtr node, + const xmlChar *content, + int len); +XMLPUBFUN void + xmlFreeNodeList (xmlNodePtr cur); +XMLPUBFUN void + xmlFreeNode (xmlNodePtr cur); +XMLPUBFUN void + xmlSetTreeDoc (xmlNodePtr tree, + xmlDocPtr doc); +XMLPUBFUN void + xmlSetListDoc (xmlNodePtr list, + xmlDocPtr doc); +/* + * Namespaces. + */ +XMLPUBFUN xmlNsPtr + xmlSearchNs (xmlDocPtr doc, + xmlNodePtr node, + const xmlChar *nameSpace); +XMLPUBFUN xmlNsPtr + xmlSearchNsByHref (xmlDocPtr doc, + xmlNodePtr node, + const xmlChar *href); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XPATH_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN xmlNsPtr * + xmlGetNsList (const xmlDoc *doc, + const xmlNode *node); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XPATH_ENABLED) */ + +XMLPUBFUN void + xmlSetNs (xmlNodePtr node, + xmlNsPtr ns); +XMLPUBFUN xmlNsPtr + xmlCopyNamespace (xmlNsPtr cur); +XMLPUBFUN xmlNsPtr + xmlCopyNamespaceList (xmlNsPtr cur); + +/* + * Changing the content. + */ +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) || defined(LIBXML_HTML_ENABLED) +XMLPUBFUN xmlAttrPtr + xmlSetProp (xmlNodePtr node, + const xmlChar *name, + const xmlChar *value); +XMLPUBFUN xmlAttrPtr + xmlSetNsProp (xmlNodePtr node, + xmlNsPtr ns, + const xmlChar *name, + const xmlChar *value); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) || \ + defined(LIBXML_SCHEMAS_ENABLED) || defined(LIBXML_HTML_ENABLED) */ +XMLPUBFUN xmlChar * + xmlGetNoNsProp (const xmlNode *node, + const xmlChar *name); +XMLPUBFUN xmlChar * + xmlGetProp (const xmlNode *node, + const xmlChar *name); +XMLPUBFUN xmlAttrPtr + xmlHasProp (const xmlNode *node, + const xmlChar *name); +XMLPUBFUN xmlAttrPtr + xmlHasNsProp (const xmlNode *node, + const xmlChar *name, + const xmlChar *nameSpace); +XMLPUBFUN xmlChar * + xmlGetNsProp (const xmlNode *node, + const xmlChar *name, + const xmlChar *nameSpace); +XMLPUBFUN xmlNodePtr + xmlStringGetNodeList (const xmlDoc *doc, + const xmlChar *value); +XMLPUBFUN xmlNodePtr + xmlStringLenGetNodeList (const xmlDoc *doc, + const xmlChar *value, + int len); +XMLPUBFUN xmlChar * + xmlNodeListGetString (xmlDocPtr doc, + const xmlNode *list, + int inLine); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN xmlChar * + xmlNodeListGetRawString (const xmlDoc *doc, + const xmlNode *list, + int inLine); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlNodeSetContent (xmlNodePtr cur, + const xmlChar *content); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN void + xmlNodeSetContentLen (xmlNodePtr cur, + const xmlChar *content, + int len); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN void + xmlNodeAddContent (xmlNodePtr cur, + const xmlChar *content); +XMLPUBFUN void + xmlNodeAddContentLen (xmlNodePtr cur, + const xmlChar *content, + int len); +XMLPUBFUN xmlChar * + xmlNodeGetContent (const xmlNode *cur); + +XMLPUBFUN int + xmlNodeBufGetContent (xmlBufferPtr buffer, + const xmlNode *cur); +XMLPUBFUN int + xmlBufGetNodeContent (xmlBufPtr buf, + const xmlNode *cur); + +XMLPUBFUN xmlChar * + xmlNodeGetLang (const xmlNode *cur); +XMLPUBFUN int + xmlNodeGetSpacePreserve (const xmlNode *cur); +#ifdef LIBXML_TREE_ENABLED +XMLPUBFUN void + xmlNodeSetLang (xmlNodePtr cur, + const xmlChar *lang); +XMLPUBFUN void + xmlNodeSetSpacePreserve (xmlNodePtr cur, + int val); +#endif /* LIBXML_TREE_ENABLED */ +XMLPUBFUN xmlChar * + xmlNodeGetBase (const xmlDoc *doc, + const xmlNode *cur); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_XINCLUDE_ENABLED) +XMLPUBFUN void + xmlNodeSetBase (xmlNodePtr cur, + const xmlChar *uri); +#endif + +/* + * Removing content. + */ +XMLPUBFUN int + xmlRemoveProp (xmlAttrPtr cur); +#if defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XMLPUBFUN int + xmlUnsetNsProp (xmlNodePtr node, + xmlNsPtr ns, + const xmlChar *name); +XMLPUBFUN int + xmlUnsetProp (xmlNodePtr node, + const xmlChar *name); +#endif /* defined(LIBXML_TREE_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) */ + +/* + * Internal, don't use. + */ +XMLPUBFUN void + xmlBufferWriteCHAR (xmlBufferPtr buf, + const xmlChar *string); +XMLPUBFUN void + xmlBufferWriteChar (xmlBufferPtr buf, + const char *string); +XMLPUBFUN void + xmlBufferWriteQuotedString(xmlBufferPtr buf, + const xmlChar *string); + +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void xmlAttrSerializeTxtContent(xmlBufferPtr buf, + xmlDocPtr doc, + xmlAttrPtr attr, + const xmlChar *string); +#endif /* LIBXML_OUTPUT_ENABLED */ + +#ifdef LIBXML_TREE_ENABLED +/* + * Namespace handling. + */ +XMLPUBFUN int + xmlReconciliateNs (xmlDocPtr doc, + xmlNodePtr tree); +#endif + +#ifdef LIBXML_OUTPUT_ENABLED +/* + * Saving. + */ +XMLPUBFUN void + xmlDocDumpFormatMemory (xmlDocPtr cur, + xmlChar **mem, + int *size, + int format); +XMLPUBFUN void + xmlDocDumpMemory (xmlDocPtr cur, + xmlChar **mem, + int *size); +XMLPUBFUN void + xmlDocDumpMemoryEnc (xmlDocPtr out_doc, + xmlChar **doc_txt_ptr, + int * doc_txt_len, + const char *txt_encoding); +XMLPUBFUN void + xmlDocDumpFormatMemoryEnc(xmlDocPtr out_doc, + xmlChar **doc_txt_ptr, + int * doc_txt_len, + const char *txt_encoding, + int format); +XMLPUBFUN int + xmlDocFormatDump (FILE *f, + xmlDocPtr cur, + int format); +XMLPUBFUN int + xmlDocDump (FILE *f, + xmlDocPtr cur); +XMLPUBFUN void + xmlElemDump (FILE *f, + xmlDocPtr doc, + xmlNodePtr cur); +XMLPUBFUN int + xmlSaveFile (const char *filename, + xmlDocPtr cur); +XMLPUBFUN int + xmlSaveFormatFile (const char *filename, + xmlDocPtr cur, + int format); +XMLPUBFUN size_t + xmlBufNodeDump (xmlBufPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + int level, + int format); +XMLPUBFUN int + xmlNodeDump (xmlBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + int level, + int format); + +XMLPUBFUN int + xmlSaveFileTo (xmlOutputBufferPtr buf, + xmlDocPtr cur, + const char *encoding); +XMLPUBFUN int + xmlSaveFormatFileTo (xmlOutputBufferPtr buf, + xmlDocPtr cur, + const char *encoding, + int format); +XMLPUBFUN void + xmlNodeDumpOutput (xmlOutputBufferPtr buf, + xmlDocPtr doc, + xmlNodePtr cur, + int level, + int format, + const char *encoding); + +XMLPUBFUN int + xmlSaveFormatFileEnc (const char *filename, + xmlDocPtr cur, + const char *encoding, + int format); + +XMLPUBFUN int + xmlSaveFileEnc (const char *filename, + xmlDocPtr cur, + const char *encoding); + +#endif /* LIBXML_OUTPUT_ENABLED */ +/* + * XHTML + */ +XMLPUBFUN int + xmlIsXHTML (const xmlChar *systemID, + const xmlChar *publicID); + +/* + * Compression. + */ +XMLPUBFUN int + xmlGetDocCompressMode (const xmlDoc *doc); +XMLPUBFUN void + xmlSetDocCompressMode (xmlDocPtr doc, + int mode); +XMLPUBFUN int + xmlGetCompressMode (void); +XMLPUBFUN void + xmlSetCompressMode (int mode); + +/* +* DOM-wrapper helper functions. +*/ +XMLPUBFUN xmlDOMWrapCtxtPtr + xmlDOMWrapNewCtxt (void); +XMLPUBFUN void + xmlDOMWrapFreeCtxt (xmlDOMWrapCtxtPtr ctxt); +XMLPUBFUN int + xmlDOMWrapReconcileNamespaces(xmlDOMWrapCtxtPtr ctxt, + xmlNodePtr elem, + int options); +XMLPUBFUN int + xmlDOMWrapAdoptNode (xmlDOMWrapCtxtPtr ctxt, + xmlDocPtr sourceDoc, + xmlNodePtr node, + xmlDocPtr destDoc, + xmlNodePtr destParent, + int options); +XMLPUBFUN int + xmlDOMWrapRemoveNode (xmlDOMWrapCtxtPtr ctxt, + xmlDocPtr doc, + xmlNodePtr node, + int options); +XMLPUBFUN int + xmlDOMWrapCloneNode (xmlDOMWrapCtxtPtr ctxt, + xmlDocPtr sourceDoc, + xmlNodePtr node, + xmlNodePtr *clonedNode, + xmlDocPtr destDoc, + xmlNodePtr destParent, + int deep, + int options); + +#ifdef LIBXML_TREE_ENABLED +/* + * 5 interfaces from DOM ElementTraversal, but different in entities + * traversal. + */ +XMLPUBFUN unsigned long + xmlChildElementCount (xmlNodePtr parent); +XMLPUBFUN xmlNodePtr + xmlNextElementSibling (xmlNodePtr node); +XMLPUBFUN xmlNodePtr + xmlFirstElementChild (xmlNodePtr parent); +XMLPUBFUN xmlNodePtr + xmlLastElementChild (xmlNodePtr parent); +XMLPUBFUN xmlNodePtr + xmlPreviousElementSibling (xmlNodePtr node); +#endif + +XMLPUBFUN xmlRegisterNodeFunc + xmlRegisterNodeDefault (xmlRegisterNodeFunc func); +XMLPUBFUN xmlDeregisterNodeFunc + xmlDeregisterNodeDefault (xmlDeregisterNodeFunc func); +XMLPUBFUN xmlRegisterNodeFunc + xmlThrDefRegisterNodeDefault(xmlRegisterNodeFunc func); +XMLPUBFUN xmlDeregisterNodeFunc + xmlThrDefDeregisterNodeDefault(xmlDeregisterNodeFunc func); + +XML_DEPRECATED XMLPUBFUN xmlBufferAllocationScheme + xmlThrDefBufferAllocScheme (xmlBufferAllocationScheme v); +XML_DEPRECATED XMLPUBFUN int + xmlThrDefDefaultBufferSize (int v); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_TREE_H__ */ + +#endif /* XML_TREE_INTERNALS */ + diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/uri.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/uri.h new file mode 100644 index 0000000000000000000000000000000000000000..eb8631cf0811e37e9364373b62fd8b5cc72dbaee --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/uri.h @@ -0,0 +1,95 @@ +/** + * Summary: library of generic URI related routines + * Description: library of generic URI related routines + * Implements RFC 2396 + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_URI_H__ +#define __XML_URI_H__ + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlURI: + * + * A parsed URI reference. This is a struct containing the various fields + * as described in RFC 2396 but separated for further processing. + * + * Note: query is a deprecated field which is incorrectly unescaped. + * query_raw takes precedence over query if the former is set. + * See: http://mail.gnome.org/archives/xml/2007-April/thread.html#00127 + */ +typedef struct _xmlURI xmlURI; +typedef xmlURI *xmlURIPtr; +struct _xmlURI { + char *scheme; /* the URI scheme */ + char *opaque; /* opaque part */ + char *authority; /* the authority part */ + char *server; /* the server part */ + char *user; /* the user part */ + int port; /* the port number */ + char *path; /* the path string */ + char *query; /* the query string (deprecated - use with caution) */ + char *fragment; /* the fragment identifier */ + int cleanup; /* parsing potentially unclean URI */ + char *query_raw; /* the query string (as it appears in the URI) */ +}; + +/* + * This function is in tree.h: + * xmlChar * xmlNodeGetBase (xmlDocPtr doc, + * xmlNodePtr cur); + */ +XMLPUBFUN xmlURIPtr + xmlCreateURI (void); +XMLPUBFUN xmlChar * + xmlBuildURI (const xmlChar *URI, + const xmlChar *base); +XMLPUBFUN xmlChar * + xmlBuildRelativeURI (const xmlChar *URI, + const xmlChar *base); +XMLPUBFUN xmlURIPtr + xmlParseURI (const char *str); +XMLPUBFUN xmlURIPtr + xmlParseURIRaw (const char *str, + int raw); +XMLPUBFUN int + xmlParseURIReference (xmlURIPtr uri, + const char *str); +XMLPUBFUN xmlChar * + xmlSaveUri (xmlURIPtr uri); +XMLPUBFUN void + xmlPrintURI (FILE *stream, + xmlURIPtr uri); +XMLPUBFUN xmlChar * + xmlURIEscapeStr (const xmlChar *str, + const xmlChar *list); +XMLPUBFUN char * + xmlURIUnescapeString (const char *str, + int len, + char *target); +XMLPUBFUN int + xmlNormalizeURIPath (char *path); +XMLPUBFUN xmlChar * + xmlURIEscape (const xmlChar *str); +XMLPUBFUN void + xmlFreeURI (xmlURIPtr uri); +XMLPUBFUN xmlChar* + xmlCanonicPath (const xmlChar *path); +XMLPUBFUN xmlChar* + xmlPathToURI (const xmlChar *path); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_URI_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xinclude.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xinclude.h new file mode 100644 index 0000000000000000000000000000000000000000..e1d135b3baed45e706e8f951eae8a673b30f8aa0 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xinclude.h @@ -0,0 +1,129 @@ +/* + * Summary: implementation of XInclude + * Description: API to handle XInclude processing, + * implements the + * World Wide Web Consortium Last Call Working Draft 10 November 2003 + * http://www.w3.org/TR/2003/WD-xinclude-20031110 + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XINCLUDE_H__ +#define __XML_XINCLUDE_H__ + +#include +#include + +#ifdef LIBXML_XINCLUDE_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * XINCLUDE_NS: + * + * Macro defining the Xinclude namespace: http://www.w3.org/2003/XInclude + */ +#define XINCLUDE_NS (const xmlChar *) "http://www.w3.org/2003/XInclude" +/** + * XINCLUDE_OLD_NS: + * + * Macro defining the draft Xinclude namespace: http://www.w3.org/2001/XInclude + */ +#define XINCLUDE_OLD_NS (const xmlChar *) "http://www.w3.org/2001/XInclude" +/** + * XINCLUDE_NODE: + * + * Macro defining "include" + */ +#define XINCLUDE_NODE (const xmlChar *) "include" +/** + * XINCLUDE_FALLBACK: + * + * Macro defining "fallback" + */ +#define XINCLUDE_FALLBACK (const xmlChar *) "fallback" +/** + * XINCLUDE_HREF: + * + * Macro defining "href" + */ +#define XINCLUDE_HREF (const xmlChar *) "href" +/** + * XINCLUDE_PARSE: + * + * Macro defining "parse" + */ +#define XINCLUDE_PARSE (const xmlChar *) "parse" +/** + * XINCLUDE_PARSE_XML: + * + * Macro defining "xml" + */ +#define XINCLUDE_PARSE_XML (const xmlChar *) "xml" +/** + * XINCLUDE_PARSE_TEXT: + * + * Macro defining "text" + */ +#define XINCLUDE_PARSE_TEXT (const xmlChar *) "text" +/** + * XINCLUDE_PARSE_ENCODING: + * + * Macro defining "encoding" + */ +#define XINCLUDE_PARSE_ENCODING (const xmlChar *) "encoding" +/** + * XINCLUDE_PARSE_XPOINTER: + * + * Macro defining "xpointer" + */ +#define XINCLUDE_PARSE_XPOINTER (const xmlChar *) "xpointer" + +typedef struct _xmlXIncludeCtxt xmlXIncludeCtxt; +typedef xmlXIncludeCtxt *xmlXIncludeCtxtPtr; + +/* + * standalone processing + */ +XMLPUBFUN int + xmlXIncludeProcess (xmlDocPtr doc); +XMLPUBFUN int + xmlXIncludeProcessFlags (xmlDocPtr doc, + int flags); +XMLPUBFUN int + xmlXIncludeProcessFlagsData(xmlDocPtr doc, + int flags, + void *data); +XMLPUBFUN int + xmlXIncludeProcessTreeFlagsData(xmlNodePtr tree, + int flags, + void *data); +XMLPUBFUN int + xmlXIncludeProcessTree (xmlNodePtr tree); +XMLPUBFUN int + xmlXIncludeProcessTreeFlags(xmlNodePtr tree, + int flags); +/* + * contextual processing + */ +XMLPUBFUN xmlXIncludeCtxtPtr + xmlXIncludeNewContext (xmlDocPtr doc); +XMLPUBFUN int + xmlXIncludeSetFlags (xmlXIncludeCtxtPtr ctxt, + int flags); +XMLPUBFUN void + xmlXIncludeFreeContext (xmlXIncludeCtxtPtr ctxt); +XMLPUBFUN int + xmlXIncludeProcessNode (xmlXIncludeCtxtPtr ctxt, + xmlNodePtr tree); +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XINCLUDE_ENABLED */ + +#endif /* __XML_XINCLUDE_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xlink.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xlink.h new file mode 100644 index 0000000000000000000000000000000000000000..106573666ae462f2b4c1dafe86b39b1afb84530e --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xlink.h @@ -0,0 +1,189 @@ +/* + * Summary: unfinished XLink detection module + * Description: unfinished XLink detection module + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XLINK_H__ +#define __XML_XLINK_H__ + +#include +#include + +#ifdef LIBXML_XPTR_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * Various defines for the various Link properties. + * + * NOTE: the link detection layer will try to resolve QName expansion + * of namespaces. If "foo" is the prefix for "http://foo.com/" + * then the link detection layer will expand role="foo:myrole" + * to "http://foo.com/:myrole". + * NOTE: the link detection layer will expand URI-References found on + * href attributes by using the base mechanism if found. + */ +typedef xmlChar *xlinkHRef; +typedef xmlChar *xlinkRole; +typedef xmlChar *xlinkTitle; + +typedef enum { + XLINK_TYPE_NONE = 0, + XLINK_TYPE_SIMPLE, + XLINK_TYPE_EXTENDED, + XLINK_TYPE_EXTENDED_SET +} xlinkType; + +typedef enum { + XLINK_SHOW_NONE = 0, + XLINK_SHOW_NEW, + XLINK_SHOW_EMBED, + XLINK_SHOW_REPLACE +} xlinkShow; + +typedef enum { + XLINK_ACTUATE_NONE = 0, + XLINK_ACTUATE_AUTO, + XLINK_ACTUATE_ONREQUEST +} xlinkActuate; + +/** + * xlinkNodeDetectFunc: + * @ctx: user data pointer + * @node: the node to check + * + * This is the prototype for the link detection routine. + * It calls the default link detection callbacks upon link detection. + */ +typedef void (*xlinkNodeDetectFunc) (void *ctx, xmlNodePtr node); + +/* + * The link detection module interact with the upper layers using + * a set of callback registered at parsing time. + */ + +/** + * xlinkSimpleLinkFunk: + * @ctx: user data pointer + * @node: the node carrying the link + * @href: the target of the link + * @role: the role string + * @title: the link title + * + * This is the prototype for a simple link detection callback. + */ +typedef void +(*xlinkSimpleLinkFunk) (void *ctx, + xmlNodePtr node, + const xlinkHRef href, + const xlinkRole role, + const xlinkTitle title); + +/** + * xlinkExtendedLinkFunk: + * @ctx: user data pointer + * @node: the node carrying the link + * @nbLocators: the number of locators detected on the link + * @hrefs: pointer to the array of locator hrefs + * @roles: pointer to the array of locator roles + * @nbArcs: the number of arcs detected on the link + * @from: pointer to the array of source roles found on the arcs + * @to: pointer to the array of target roles found on the arcs + * @show: array of values for the show attributes found on the arcs + * @actuate: array of values for the actuate attributes found on the arcs + * @nbTitles: the number of titles detected on the link + * @title: array of titles detected on the link + * @langs: array of xml:lang values for the titles + * + * This is the prototype for a extended link detection callback. + */ +typedef void +(*xlinkExtendedLinkFunk)(void *ctx, + xmlNodePtr node, + int nbLocators, + const xlinkHRef *hrefs, + const xlinkRole *roles, + int nbArcs, + const xlinkRole *from, + const xlinkRole *to, + xlinkShow *show, + xlinkActuate *actuate, + int nbTitles, + const xlinkTitle *titles, + const xmlChar **langs); + +/** + * xlinkExtendedLinkSetFunk: + * @ctx: user data pointer + * @node: the node carrying the link + * @nbLocators: the number of locators detected on the link + * @hrefs: pointer to the array of locator hrefs + * @roles: pointer to the array of locator roles + * @nbTitles: the number of titles detected on the link + * @title: array of titles detected on the link + * @langs: array of xml:lang values for the titles + * + * This is the prototype for a extended link set detection callback. + */ +typedef void +(*xlinkExtendedLinkSetFunk) (void *ctx, + xmlNodePtr node, + int nbLocators, + const xlinkHRef *hrefs, + const xlinkRole *roles, + int nbTitles, + const xlinkTitle *titles, + const xmlChar **langs); + +/** + * This is the structure containing a set of Links detection callbacks. + * + * There is no default xlink callbacks, if one want to get link + * recognition activated, those call backs must be provided before parsing. + */ +typedef struct _xlinkHandler xlinkHandler; +typedef xlinkHandler *xlinkHandlerPtr; +struct _xlinkHandler { + xlinkSimpleLinkFunk simple; + xlinkExtendedLinkFunk extended; + xlinkExtendedLinkSetFunk set; +}; + +/* + * The default detection routine, can be overridden, they call the default + * detection callbacks. + */ + +XMLPUBFUN xlinkNodeDetectFunc + xlinkGetDefaultDetect (void); +XMLPUBFUN void + xlinkSetDefaultDetect (xlinkNodeDetectFunc func); + +/* + * Routines to set/get the default handlers. + */ +XMLPUBFUN xlinkHandlerPtr + xlinkGetDefaultHandler (void); +XMLPUBFUN void + xlinkSetDefaultHandler (xlinkHandlerPtr handler); + +/* + * Link detection module itself. + */ +XMLPUBFUN xlinkType + xlinkIsLink (xmlDocPtr doc, + xmlNodePtr node); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPTR_ENABLED */ + +#endif /* __XML_XLINK_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlIO.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlIO.h new file mode 100644 index 0000000000000000000000000000000000000000..2487be3be0d09239811429f49535a8856f37e81b --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlIO.h @@ -0,0 +1,421 @@ +/* + * Summary: interface for the I/O interfaces used by the parser + * Description: interface for the I/O interfaces used by the parser + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_IO_H__ +#define __XML_IO_H__ + +#include +#include +#include +#define XML_TREE_INTERNALS +#include +#undef XML_TREE_INTERNALS + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * Those are the functions and datatypes for the parser input + * I/O structures. + */ + +/** + * xmlInputMatchCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Input API to detect if the current handler + * can provide input functionality for this resource. + * + * Returns 1 if yes and 0 if another Input module should be used + */ +typedef int (*xmlInputMatchCallback) (char const *filename); +/** + * xmlInputOpenCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Input API to open the resource + * + * Returns an Input context or NULL in case or error + */ +typedef void * (*xmlInputOpenCallback) (char const *filename); +/** + * xmlInputReadCallback: + * @context: an Input context + * @buffer: the buffer to store data read + * @len: the length of the buffer in bytes + * + * Callback used in the I/O Input API to read the resource + * + * Returns the number of bytes read or -1 in case of error + */ +typedef int (*xmlInputReadCallback) (void * context, char * buffer, int len); +/** + * xmlInputCloseCallback: + * @context: an Input context + * + * Callback used in the I/O Input API to close the resource + * + * Returns 0 or -1 in case of error + */ +typedef int (*xmlInputCloseCallback) (void * context); + +#ifdef LIBXML_OUTPUT_ENABLED +/* + * Those are the functions and datatypes for the library output + * I/O structures. + */ + +/** + * xmlOutputMatchCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Output API to detect if the current handler + * can provide output functionality for this resource. + * + * Returns 1 if yes and 0 if another Output module should be used + */ +typedef int (*xmlOutputMatchCallback) (char const *filename); +/** + * xmlOutputOpenCallback: + * @filename: the filename or URI + * + * Callback used in the I/O Output API to open the resource + * + * Returns an Output context or NULL in case or error + */ +typedef void * (*xmlOutputOpenCallback) (char const *filename); +/** + * xmlOutputWriteCallback: + * @context: an Output context + * @buffer: the buffer of data to write + * @len: the length of the buffer in bytes + * + * Callback used in the I/O Output API to write to the resource + * + * Returns the number of bytes written or -1 in case of error + */ +typedef int (*xmlOutputWriteCallback) (void * context, const char * buffer, + int len); +/** + * xmlOutputCloseCallback: + * @context: an Output context + * + * Callback used in the I/O Output API to close the resource + * + * Returns 0 or -1 in case of error + */ +typedef int (*xmlOutputCloseCallback) (void * context); +#endif /* LIBXML_OUTPUT_ENABLED */ + +/** + * xmlParserInputBufferCreateFilenameFunc: + * @URI: the URI to read from + * @enc: the requested source encoding + * + * Signature for the function doing the lookup for a suitable input method + * corresponding to an URI. + * + * Returns the new xmlParserInputBufferPtr in case of success or NULL if no + * method was found. + */ +typedef xmlParserInputBufferPtr +(*xmlParserInputBufferCreateFilenameFunc)(const char *URI, xmlCharEncoding enc); + +/** + * xmlOutputBufferCreateFilenameFunc: + * @URI: the URI to write to + * @enc: the requested target encoding + * + * Signature for the function doing the lookup for a suitable output method + * corresponding to an URI. + * + * Returns the new xmlOutputBufferPtr in case of success or NULL if no + * method was found. + */ +typedef xmlOutputBufferPtr +(*xmlOutputBufferCreateFilenameFunc)(const char *URI, + xmlCharEncodingHandlerPtr encoder, int compression); + +struct _xmlParserInputBuffer { + void* context; + xmlInputReadCallback readcallback; + xmlInputCloseCallback closecallback; + + xmlCharEncodingHandlerPtr encoder; /* I18N conversions to UTF-8 */ + + xmlBufPtr buffer; /* Local buffer encoded in UTF-8 */ + xmlBufPtr raw; /* if encoder != NULL buffer for raw input */ + int compressed; /* -1=unknown, 0=not compressed, 1=compressed */ + int error; + unsigned long rawconsumed;/* amount consumed from raw */ +}; + + +#ifdef LIBXML_OUTPUT_ENABLED +struct _xmlOutputBuffer { + void* context; + xmlOutputWriteCallback writecallback; + xmlOutputCloseCallback closecallback; + + xmlCharEncodingHandlerPtr encoder; /* I18N conversions to UTF-8 */ + + xmlBufPtr buffer; /* Local buffer encoded in UTF-8 or ISOLatin */ + xmlBufPtr conv; /* if encoder != NULL buffer for output */ + int written; /* total number of byte written */ + int error; +}; +#endif /* LIBXML_OUTPUT_ENABLED */ + +/** DOC_DISABLE */ +#define XML_GLOBALS_IO \ + XML_OP(xmlParserInputBufferCreateFilenameValue, \ + xmlParserInputBufferCreateFilenameFunc, XML_DEPRECATED) \ + XML_OP(xmlOutputBufferCreateFilenameValue, \ + xmlOutputBufferCreateFilenameFunc, XML_DEPRECATED) + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_IO +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlParserInputBufferCreateFilenameValue \ + XML_GLOBAL_MACRO(xmlParserInputBufferCreateFilenameValue) + #define xmlOutputBufferCreateFilenameValue \ + XML_GLOBAL_MACRO(xmlOutputBufferCreateFilenameValue) +#endif +/** DOC_ENABLE */ + +/* + * Interfaces for input + */ +XMLPUBFUN void + xmlCleanupInputCallbacks (void); + +XMLPUBFUN int + xmlPopInputCallbacks (void); + +XMLPUBFUN void + xmlRegisterDefaultInputCallbacks (void); +XMLPUBFUN xmlParserInputBufferPtr + xmlAllocParserInputBuffer (xmlCharEncoding enc); + +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateFilename (const char *URI, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateFile (FILE *file, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateFd (int fd, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateMem (const char *mem, int size, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateStatic (const char *mem, int size, + xmlCharEncoding enc); +XMLPUBFUN xmlParserInputBufferPtr + xmlParserInputBufferCreateIO (xmlInputReadCallback ioread, + xmlInputCloseCallback ioclose, + void *ioctx, + xmlCharEncoding enc); +XMLPUBFUN int + xmlParserInputBufferRead (xmlParserInputBufferPtr in, + int len); +XMLPUBFUN int + xmlParserInputBufferGrow (xmlParserInputBufferPtr in, + int len); +XMLPUBFUN int + xmlParserInputBufferPush (xmlParserInputBufferPtr in, + int len, + const char *buf); +XMLPUBFUN void + xmlFreeParserInputBuffer (xmlParserInputBufferPtr in); +XMLPUBFUN char * + xmlParserGetDirectory (const char *filename); + +XMLPUBFUN int + xmlRegisterInputCallbacks (xmlInputMatchCallback matchFunc, + xmlInputOpenCallback openFunc, + xmlInputReadCallback readFunc, + xmlInputCloseCallback closeFunc); + +xmlParserInputBufferPtr + __xmlParserInputBufferCreateFilename(const char *URI, + xmlCharEncoding enc); + +#ifdef LIBXML_OUTPUT_ENABLED +/* + * Interfaces for output + */ +XMLPUBFUN void + xmlCleanupOutputCallbacks (void); +XMLPUBFUN int + xmlPopOutputCallbacks (void); +XMLPUBFUN void + xmlRegisterDefaultOutputCallbacks(void); +XMLPUBFUN xmlOutputBufferPtr + xmlAllocOutputBuffer (xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateFilename (const char *URI, + xmlCharEncodingHandlerPtr encoder, + int compression); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateFile (FILE *file, + xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateBuffer (xmlBufferPtr buffer, + xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateFd (int fd, + xmlCharEncodingHandlerPtr encoder); + +XMLPUBFUN xmlOutputBufferPtr + xmlOutputBufferCreateIO (xmlOutputWriteCallback iowrite, + xmlOutputCloseCallback ioclose, + void *ioctx, + xmlCharEncodingHandlerPtr encoder); + +/* Couple of APIs to get the output without digging into the buffers */ +XMLPUBFUN const xmlChar * + xmlOutputBufferGetContent (xmlOutputBufferPtr out); +XMLPUBFUN size_t + xmlOutputBufferGetSize (xmlOutputBufferPtr out); + +XMLPUBFUN int + xmlOutputBufferWrite (xmlOutputBufferPtr out, + int len, + const char *buf); +XMLPUBFUN int + xmlOutputBufferWriteString (xmlOutputBufferPtr out, + const char *str); +XMLPUBFUN int + xmlOutputBufferWriteEscape (xmlOutputBufferPtr out, + const xmlChar *str, + xmlCharEncodingOutputFunc escaping); + +XMLPUBFUN int + xmlOutputBufferFlush (xmlOutputBufferPtr out); +XMLPUBFUN int + xmlOutputBufferClose (xmlOutputBufferPtr out); + +XMLPUBFUN int + xmlRegisterOutputCallbacks (xmlOutputMatchCallback matchFunc, + xmlOutputOpenCallback openFunc, + xmlOutputWriteCallback writeFunc, + xmlOutputCloseCallback closeFunc); + +xmlOutputBufferPtr + __xmlOutputBufferCreateFilename(const char *URI, + xmlCharEncodingHandlerPtr encoder, + int compression); + +#ifdef LIBXML_HTTP_ENABLED +/* This function only exists if HTTP support built into the library */ +XMLPUBFUN void + xmlRegisterHTTPPostCallbacks (void ); +#endif /* LIBXML_HTTP_ENABLED */ + +#endif /* LIBXML_OUTPUT_ENABLED */ + +XMLPUBFUN xmlParserInputPtr + xmlCheckHTTPInput (xmlParserCtxtPtr ctxt, + xmlParserInputPtr ret); + +/* + * A predefined entity loader disabling network accesses + */ +XMLPUBFUN xmlParserInputPtr + xmlNoNetExternalEntityLoader (const char *URL, + const char *ID, + xmlParserCtxtPtr ctxt); + +/* + * xmlNormalizeWindowsPath is obsolete, don't use it. + * Check xmlCanonicPath in uri.h for a better alternative. + */ +XMLPUBFUN xmlChar * + xmlNormalizeWindowsPath (const xmlChar *path); + +XMLPUBFUN int + xmlCheckFilename (const char *path); +/** + * Default 'file://' protocol callbacks + */ +XMLPUBFUN int + xmlFileMatch (const char *filename); +XMLPUBFUN void * + xmlFileOpen (const char *filename); +XMLPUBFUN int + xmlFileRead (void * context, + char * buffer, + int len); +XMLPUBFUN int + xmlFileClose (void * context); + +/** + * Default 'http://' protocol callbacks + */ +#ifdef LIBXML_HTTP_ENABLED +XMLPUBFUN int + xmlIOHTTPMatch (const char *filename); +XMLPUBFUN void * + xmlIOHTTPOpen (const char *filename); +#ifdef LIBXML_OUTPUT_ENABLED +XMLPUBFUN void * + xmlIOHTTPOpenW (const char * post_uri, + int compression ); +#endif /* LIBXML_OUTPUT_ENABLED */ +XMLPUBFUN int + xmlIOHTTPRead (void * context, + char * buffer, + int len); +XMLPUBFUN int + xmlIOHTTPClose (void * context); +#endif /* LIBXML_HTTP_ENABLED */ + +/** + * Default 'ftp://' protocol callbacks + */ +#if defined(LIBXML_FTP_ENABLED) +XMLPUBFUN int + xmlIOFTPMatch (const char *filename); +XMLPUBFUN void * + xmlIOFTPOpen (const char *filename); +XMLPUBFUN int + xmlIOFTPRead (void * context, + char * buffer, + int len); +XMLPUBFUN int + xmlIOFTPClose (void * context); +#endif /* defined(LIBXML_FTP_ENABLED) */ + +XMLPUBFUN xmlParserInputBufferCreateFilenameFunc + xmlParserInputBufferCreateFilenameDefault( + xmlParserInputBufferCreateFilenameFunc func); +XMLPUBFUN xmlOutputBufferCreateFilenameFunc + xmlOutputBufferCreateFilenameDefault( + xmlOutputBufferCreateFilenameFunc func); +XMLPUBFUN xmlOutputBufferCreateFilenameFunc + xmlThrDefOutputBufferCreateFilenameDefault( + xmlOutputBufferCreateFilenameFunc func); +XMLPUBFUN xmlParserInputBufferCreateFilenameFunc + xmlThrDefParserInputBufferCreateFilenameDefault( + xmlParserInputBufferCreateFilenameFunc func); + +#ifdef __cplusplus +} +#endif + +#endif /* __XML_IO_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlautomata.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlautomata.h new file mode 100644 index 0000000000000000000000000000000000000000..ea38eb37f09fe43a34d0d3f6fedbb8313d9b839f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlautomata.h @@ -0,0 +1,146 @@ +/* + * Summary: API to build regexp automata + * Description: the API to build regexp automata + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_AUTOMATA_H__ +#define __XML_AUTOMATA_H__ + +#include + +#ifdef LIBXML_REGEXP_ENABLED +#ifdef LIBXML_AUTOMATA_ENABLED + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlAutomataPtr: + * + * A libxml automata description, It can be compiled into a regexp + */ +typedef struct _xmlAutomata xmlAutomata; +typedef xmlAutomata *xmlAutomataPtr; + +/** + * xmlAutomataStatePtr: + * + * A state int the automata description, + */ +typedef struct _xmlAutomataState xmlAutomataState; +typedef xmlAutomataState *xmlAutomataStatePtr; + +/* + * Building API + */ +XMLPUBFUN xmlAutomataPtr + xmlNewAutomata (void); +XMLPUBFUN void + xmlFreeAutomata (xmlAutomataPtr am); + +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataGetInitState (xmlAutomataPtr am); +XMLPUBFUN int + xmlAutomataSetFinalState (xmlAutomataPtr am, + xmlAutomataStatePtr state); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewState (xmlAutomataPtr am); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewTransition (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewTransition2 (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewNegTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + void *data); + +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCountTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCountTrans2 (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewOnceTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewOnceTrans2 (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + const xmlChar *token, + const xmlChar *token2, + int min, + int max, + void *data); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewAllTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + int lax); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewEpsilon (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCountedTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + int counter); +XMLPUBFUN xmlAutomataStatePtr + xmlAutomataNewCounterTrans (xmlAutomataPtr am, + xmlAutomataStatePtr from, + xmlAutomataStatePtr to, + int counter); +XMLPUBFUN int + xmlAutomataNewCounter (xmlAutomataPtr am, + int min, + int max); + +XMLPUBFUN struct _xmlRegexp * + xmlAutomataCompile (xmlAutomataPtr am); +XMLPUBFUN int + xmlAutomataIsDeterminist (xmlAutomataPtr am); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_AUTOMATA_ENABLED */ +#endif /* LIBXML_REGEXP_ENABLED */ + +#endif /* __XML_AUTOMATA_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlerror.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlerror.h new file mode 100644 index 0000000000000000000000000000000000000000..1f0ab4a3a2fa6c08457dd99c050c653eefdbcb27 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlerror.h @@ -0,0 +1,948 @@ +/* + * Summary: error handling + * Description: the API used to report errors + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_ERROR_H__ +#define __XML_ERROR_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlErrorLevel: + * + * Indicates the level of an error + */ +typedef enum { + XML_ERR_NONE = 0, + XML_ERR_WARNING = 1, /* A simple warning */ + XML_ERR_ERROR = 2, /* A recoverable error */ + XML_ERR_FATAL = 3 /* A fatal error */ +} xmlErrorLevel; + +/** + * xmlErrorDomain: + * + * Indicates where an error may have come from + */ +typedef enum { + XML_FROM_NONE = 0, + XML_FROM_PARSER, /* The XML parser */ + XML_FROM_TREE, /* The tree module */ + XML_FROM_NAMESPACE, /* The XML Namespace module */ + XML_FROM_DTD, /* The XML DTD validation with parser context*/ + XML_FROM_HTML, /* The HTML parser */ + XML_FROM_MEMORY, /* The memory allocator */ + XML_FROM_OUTPUT, /* The serialization code */ + XML_FROM_IO, /* The Input/Output stack */ + XML_FROM_FTP, /* The FTP module */ + XML_FROM_HTTP, /* The HTTP module */ + XML_FROM_XINCLUDE, /* The XInclude processing */ + XML_FROM_XPATH, /* The XPath module */ + XML_FROM_XPOINTER, /* The XPointer module */ + XML_FROM_REGEXP, /* The regular expressions module */ + XML_FROM_DATATYPE, /* The W3C XML Schemas Datatype module */ + XML_FROM_SCHEMASP, /* The W3C XML Schemas parser module */ + XML_FROM_SCHEMASV, /* The W3C XML Schemas validation module */ + XML_FROM_RELAXNGP, /* The Relax-NG parser module */ + XML_FROM_RELAXNGV, /* The Relax-NG validator module */ + XML_FROM_CATALOG, /* The Catalog module */ + XML_FROM_C14N, /* The Canonicalization module */ + XML_FROM_XSLT, /* The XSLT engine from libxslt */ + XML_FROM_VALID, /* The XML DTD validation with valid context */ + XML_FROM_CHECK, /* The error checking module */ + XML_FROM_WRITER, /* The xmlwriter module */ + XML_FROM_MODULE, /* The dynamically loaded module module*/ + XML_FROM_I18N, /* The module handling character conversion */ + XML_FROM_SCHEMATRONV,/* The Schematron validator module */ + XML_FROM_BUFFER, /* The buffers module */ + XML_FROM_URI /* The URI module */ +} xmlErrorDomain; + +/** + * xmlError: + * + * An XML Error instance. + */ + +typedef struct _xmlError xmlError; +typedef xmlError *xmlErrorPtr; +struct _xmlError { + int domain; /* What part of the library raised this error */ + int code; /* The error code, e.g. an xmlParserError */ + char *message;/* human-readable informative error message */ + xmlErrorLevel level;/* how consequent is the error */ + char *file; /* the filename */ + int line; /* the line number if available */ + char *str1; /* extra string information */ + char *str2; /* extra string information */ + char *str3; /* extra string information */ + int int1; /* extra number information */ + int int2; /* error column # or 0 if N/A (todo: rename field when we would brk ABI) */ + void *ctxt; /* the parser context if available */ + void *node; /* the node in the tree */ +}; + +/** + * xmlParserError: + * + * This is an error that the XML (or HTML) parser can generate + */ +typedef enum { + XML_ERR_OK = 0, + XML_ERR_INTERNAL_ERROR, /* 1 */ + XML_ERR_NO_MEMORY, /* 2 */ + XML_ERR_DOCUMENT_START, /* 3 */ + XML_ERR_DOCUMENT_EMPTY, /* 4 */ + XML_ERR_DOCUMENT_END, /* 5 */ + XML_ERR_INVALID_HEX_CHARREF, /* 6 */ + XML_ERR_INVALID_DEC_CHARREF, /* 7 */ + XML_ERR_INVALID_CHARREF, /* 8 */ + XML_ERR_INVALID_CHAR, /* 9 */ + XML_ERR_CHARREF_AT_EOF, /* 10 */ + XML_ERR_CHARREF_IN_PROLOG, /* 11 */ + XML_ERR_CHARREF_IN_EPILOG, /* 12 */ + XML_ERR_CHARREF_IN_DTD, /* 13 */ + XML_ERR_ENTITYREF_AT_EOF, /* 14 */ + XML_ERR_ENTITYREF_IN_PROLOG, /* 15 */ + XML_ERR_ENTITYREF_IN_EPILOG, /* 16 */ + XML_ERR_ENTITYREF_IN_DTD, /* 17 */ + XML_ERR_PEREF_AT_EOF, /* 18 */ + XML_ERR_PEREF_IN_PROLOG, /* 19 */ + XML_ERR_PEREF_IN_EPILOG, /* 20 */ + XML_ERR_PEREF_IN_INT_SUBSET, /* 21 */ + XML_ERR_ENTITYREF_NO_NAME, /* 22 */ + XML_ERR_ENTITYREF_SEMICOL_MISSING, /* 23 */ + XML_ERR_PEREF_NO_NAME, /* 24 */ + XML_ERR_PEREF_SEMICOL_MISSING, /* 25 */ + XML_ERR_UNDECLARED_ENTITY, /* 26 */ + XML_WAR_UNDECLARED_ENTITY, /* 27 */ + XML_ERR_UNPARSED_ENTITY, /* 28 */ + XML_ERR_ENTITY_IS_EXTERNAL, /* 29 */ + XML_ERR_ENTITY_IS_PARAMETER, /* 30 */ + XML_ERR_UNKNOWN_ENCODING, /* 31 */ + XML_ERR_UNSUPPORTED_ENCODING, /* 32 */ + XML_ERR_STRING_NOT_STARTED, /* 33 */ + XML_ERR_STRING_NOT_CLOSED, /* 34 */ + XML_ERR_NS_DECL_ERROR, /* 35 */ + XML_ERR_ENTITY_NOT_STARTED, /* 36 */ + XML_ERR_ENTITY_NOT_FINISHED, /* 37 */ + XML_ERR_LT_IN_ATTRIBUTE, /* 38 */ + XML_ERR_ATTRIBUTE_NOT_STARTED, /* 39 */ + XML_ERR_ATTRIBUTE_NOT_FINISHED, /* 40 */ + XML_ERR_ATTRIBUTE_WITHOUT_VALUE, /* 41 */ + XML_ERR_ATTRIBUTE_REDEFINED, /* 42 */ + XML_ERR_LITERAL_NOT_STARTED, /* 43 */ + XML_ERR_LITERAL_NOT_FINISHED, /* 44 */ + XML_ERR_COMMENT_NOT_FINISHED, /* 45 */ + XML_ERR_PI_NOT_STARTED, /* 46 */ + XML_ERR_PI_NOT_FINISHED, /* 47 */ + XML_ERR_NOTATION_NOT_STARTED, /* 48 */ + XML_ERR_NOTATION_NOT_FINISHED, /* 49 */ + XML_ERR_ATTLIST_NOT_STARTED, /* 50 */ + XML_ERR_ATTLIST_NOT_FINISHED, /* 51 */ + XML_ERR_MIXED_NOT_STARTED, /* 52 */ + XML_ERR_MIXED_NOT_FINISHED, /* 53 */ + XML_ERR_ELEMCONTENT_NOT_STARTED, /* 54 */ + XML_ERR_ELEMCONTENT_NOT_FINISHED, /* 55 */ + XML_ERR_XMLDECL_NOT_STARTED, /* 56 */ + XML_ERR_XMLDECL_NOT_FINISHED, /* 57 */ + XML_ERR_CONDSEC_NOT_STARTED, /* 58 */ + XML_ERR_CONDSEC_NOT_FINISHED, /* 59 */ + XML_ERR_EXT_SUBSET_NOT_FINISHED, /* 60 */ + XML_ERR_DOCTYPE_NOT_FINISHED, /* 61 */ + XML_ERR_MISPLACED_CDATA_END, /* 62 */ + XML_ERR_CDATA_NOT_FINISHED, /* 63 */ + XML_ERR_RESERVED_XML_NAME, /* 64 */ + XML_ERR_SPACE_REQUIRED, /* 65 */ + XML_ERR_SEPARATOR_REQUIRED, /* 66 */ + XML_ERR_NMTOKEN_REQUIRED, /* 67 */ + XML_ERR_NAME_REQUIRED, /* 68 */ + XML_ERR_PCDATA_REQUIRED, /* 69 */ + XML_ERR_URI_REQUIRED, /* 70 */ + XML_ERR_PUBID_REQUIRED, /* 71 */ + XML_ERR_LT_REQUIRED, /* 72 */ + XML_ERR_GT_REQUIRED, /* 73 */ + XML_ERR_LTSLASH_REQUIRED, /* 74 */ + XML_ERR_EQUAL_REQUIRED, /* 75 */ + XML_ERR_TAG_NAME_MISMATCH, /* 76 */ + XML_ERR_TAG_NOT_FINISHED, /* 77 */ + XML_ERR_STANDALONE_VALUE, /* 78 */ + XML_ERR_ENCODING_NAME, /* 79 */ + XML_ERR_HYPHEN_IN_COMMENT, /* 80 */ + XML_ERR_INVALID_ENCODING, /* 81 */ + XML_ERR_EXT_ENTITY_STANDALONE, /* 82 */ + XML_ERR_CONDSEC_INVALID, /* 83 */ + XML_ERR_VALUE_REQUIRED, /* 84 */ + XML_ERR_NOT_WELL_BALANCED, /* 85 */ + XML_ERR_EXTRA_CONTENT, /* 86 */ + XML_ERR_ENTITY_CHAR_ERROR, /* 87 */ + XML_ERR_ENTITY_PE_INTERNAL, /* 88 */ + XML_ERR_ENTITY_LOOP, /* 89 */ + XML_ERR_ENTITY_BOUNDARY, /* 90 */ + XML_ERR_INVALID_URI, /* 91 */ + XML_ERR_URI_FRAGMENT, /* 92 */ + XML_WAR_CATALOG_PI, /* 93 */ + XML_ERR_NO_DTD, /* 94 */ + XML_ERR_CONDSEC_INVALID_KEYWORD, /* 95 */ + XML_ERR_VERSION_MISSING, /* 96 */ + XML_WAR_UNKNOWN_VERSION, /* 97 */ + XML_WAR_LANG_VALUE, /* 98 */ + XML_WAR_NS_URI, /* 99 */ + XML_WAR_NS_URI_RELATIVE, /* 100 */ + XML_ERR_MISSING_ENCODING, /* 101 */ + XML_WAR_SPACE_VALUE, /* 102 */ + XML_ERR_NOT_STANDALONE, /* 103 */ + XML_ERR_ENTITY_PROCESSING, /* 104 */ + XML_ERR_NOTATION_PROCESSING, /* 105 */ + XML_WAR_NS_COLUMN, /* 106 */ + XML_WAR_ENTITY_REDEFINED, /* 107 */ + XML_ERR_UNKNOWN_VERSION, /* 108 */ + XML_ERR_VERSION_MISMATCH, /* 109 */ + XML_ERR_NAME_TOO_LONG, /* 110 */ + XML_ERR_USER_STOP, /* 111 */ + XML_ERR_COMMENT_ABRUPTLY_ENDED, /* 112 */ + XML_WAR_ENCODING_MISMATCH, /* 113 */ + XML_NS_ERR_XML_NAMESPACE = 200, + XML_NS_ERR_UNDEFINED_NAMESPACE, /* 201 */ + XML_NS_ERR_QNAME, /* 202 */ + XML_NS_ERR_ATTRIBUTE_REDEFINED, /* 203 */ + XML_NS_ERR_EMPTY, /* 204 */ + XML_NS_ERR_COLON, /* 205 */ + XML_DTD_ATTRIBUTE_DEFAULT = 500, + XML_DTD_ATTRIBUTE_REDEFINED, /* 501 */ + XML_DTD_ATTRIBUTE_VALUE, /* 502 */ + XML_DTD_CONTENT_ERROR, /* 503 */ + XML_DTD_CONTENT_MODEL, /* 504 */ + XML_DTD_CONTENT_NOT_DETERMINIST, /* 505 */ + XML_DTD_DIFFERENT_PREFIX, /* 506 */ + XML_DTD_ELEM_DEFAULT_NAMESPACE, /* 507 */ + XML_DTD_ELEM_NAMESPACE, /* 508 */ + XML_DTD_ELEM_REDEFINED, /* 509 */ + XML_DTD_EMPTY_NOTATION, /* 510 */ + XML_DTD_ENTITY_TYPE, /* 511 */ + XML_DTD_ID_FIXED, /* 512 */ + XML_DTD_ID_REDEFINED, /* 513 */ + XML_DTD_ID_SUBSET, /* 514 */ + XML_DTD_INVALID_CHILD, /* 515 */ + XML_DTD_INVALID_DEFAULT, /* 516 */ + XML_DTD_LOAD_ERROR, /* 517 */ + XML_DTD_MISSING_ATTRIBUTE, /* 518 */ + XML_DTD_MIXED_CORRUPT, /* 519 */ + XML_DTD_MULTIPLE_ID, /* 520 */ + XML_DTD_NO_DOC, /* 521 */ + XML_DTD_NO_DTD, /* 522 */ + XML_DTD_NO_ELEM_NAME, /* 523 */ + XML_DTD_NO_PREFIX, /* 524 */ + XML_DTD_NO_ROOT, /* 525 */ + XML_DTD_NOTATION_REDEFINED, /* 526 */ + XML_DTD_NOTATION_VALUE, /* 527 */ + XML_DTD_NOT_EMPTY, /* 528 */ + XML_DTD_NOT_PCDATA, /* 529 */ + XML_DTD_NOT_STANDALONE, /* 530 */ + XML_DTD_ROOT_NAME, /* 531 */ + XML_DTD_STANDALONE_WHITE_SPACE, /* 532 */ + XML_DTD_UNKNOWN_ATTRIBUTE, /* 533 */ + XML_DTD_UNKNOWN_ELEM, /* 534 */ + XML_DTD_UNKNOWN_ENTITY, /* 535 */ + XML_DTD_UNKNOWN_ID, /* 536 */ + XML_DTD_UNKNOWN_NOTATION, /* 537 */ + XML_DTD_STANDALONE_DEFAULTED, /* 538 */ + XML_DTD_XMLID_VALUE, /* 539 */ + XML_DTD_XMLID_TYPE, /* 540 */ + XML_DTD_DUP_TOKEN, /* 541 */ + XML_HTML_STRUCURE_ERROR = 800, + XML_HTML_UNKNOWN_TAG, /* 801 */ + XML_HTML_INCORRECTLY_OPENED_COMMENT, /* 802 */ + XML_RNGP_ANYNAME_ATTR_ANCESTOR = 1000, + XML_RNGP_ATTR_CONFLICT, /* 1001 */ + XML_RNGP_ATTRIBUTE_CHILDREN, /* 1002 */ + XML_RNGP_ATTRIBUTE_CONTENT, /* 1003 */ + XML_RNGP_ATTRIBUTE_EMPTY, /* 1004 */ + XML_RNGP_ATTRIBUTE_NOOP, /* 1005 */ + XML_RNGP_CHOICE_CONTENT, /* 1006 */ + XML_RNGP_CHOICE_EMPTY, /* 1007 */ + XML_RNGP_CREATE_FAILURE, /* 1008 */ + XML_RNGP_DATA_CONTENT, /* 1009 */ + XML_RNGP_DEF_CHOICE_AND_INTERLEAVE, /* 1010 */ + XML_RNGP_DEFINE_CREATE_FAILED, /* 1011 */ + XML_RNGP_DEFINE_EMPTY, /* 1012 */ + XML_RNGP_DEFINE_MISSING, /* 1013 */ + XML_RNGP_DEFINE_NAME_MISSING, /* 1014 */ + XML_RNGP_ELEM_CONTENT_EMPTY, /* 1015 */ + XML_RNGP_ELEM_CONTENT_ERROR, /* 1016 */ + XML_RNGP_ELEMENT_EMPTY, /* 1017 */ + XML_RNGP_ELEMENT_CONTENT, /* 1018 */ + XML_RNGP_ELEMENT_NAME, /* 1019 */ + XML_RNGP_ELEMENT_NO_CONTENT, /* 1020 */ + XML_RNGP_ELEM_TEXT_CONFLICT, /* 1021 */ + XML_RNGP_EMPTY, /* 1022 */ + XML_RNGP_EMPTY_CONSTRUCT, /* 1023 */ + XML_RNGP_EMPTY_CONTENT, /* 1024 */ + XML_RNGP_EMPTY_NOT_EMPTY, /* 1025 */ + XML_RNGP_ERROR_TYPE_LIB, /* 1026 */ + XML_RNGP_EXCEPT_EMPTY, /* 1027 */ + XML_RNGP_EXCEPT_MISSING, /* 1028 */ + XML_RNGP_EXCEPT_MULTIPLE, /* 1029 */ + XML_RNGP_EXCEPT_NO_CONTENT, /* 1030 */ + XML_RNGP_EXTERNALREF_EMTPY, /* 1031 */ + XML_RNGP_EXTERNAL_REF_FAILURE, /* 1032 */ + XML_RNGP_EXTERNALREF_RECURSE, /* 1033 */ + XML_RNGP_FORBIDDEN_ATTRIBUTE, /* 1034 */ + XML_RNGP_FOREIGN_ELEMENT, /* 1035 */ + XML_RNGP_GRAMMAR_CONTENT, /* 1036 */ + XML_RNGP_GRAMMAR_EMPTY, /* 1037 */ + XML_RNGP_GRAMMAR_MISSING, /* 1038 */ + XML_RNGP_GRAMMAR_NO_START, /* 1039 */ + XML_RNGP_GROUP_ATTR_CONFLICT, /* 1040 */ + XML_RNGP_HREF_ERROR, /* 1041 */ + XML_RNGP_INCLUDE_EMPTY, /* 1042 */ + XML_RNGP_INCLUDE_FAILURE, /* 1043 */ + XML_RNGP_INCLUDE_RECURSE, /* 1044 */ + XML_RNGP_INTERLEAVE_ADD, /* 1045 */ + XML_RNGP_INTERLEAVE_CREATE_FAILED, /* 1046 */ + XML_RNGP_INTERLEAVE_EMPTY, /* 1047 */ + XML_RNGP_INTERLEAVE_NO_CONTENT, /* 1048 */ + XML_RNGP_INVALID_DEFINE_NAME, /* 1049 */ + XML_RNGP_INVALID_URI, /* 1050 */ + XML_RNGP_INVALID_VALUE, /* 1051 */ + XML_RNGP_MISSING_HREF, /* 1052 */ + XML_RNGP_NAME_MISSING, /* 1053 */ + XML_RNGP_NEED_COMBINE, /* 1054 */ + XML_RNGP_NOTALLOWED_NOT_EMPTY, /* 1055 */ + XML_RNGP_NSNAME_ATTR_ANCESTOR, /* 1056 */ + XML_RNGP_NSNAME_NO_NS, /* 1057 */ + XML_RNGP_PARAM_FORBIDDEN, /* 1058 */ + XML_RNGP_PARAM_NAME_MISSING, /* 1059 */ + XML_RNGP_PARENTREF_CREATE_FAILED, /* 1060 */ + XML_RNGP_PARENTREF_NAME_INVALID, /* 1061 */ + XML_RNGP_PARENTREF_NO_NAME, /* 1062 */ + XML_RNGP_PARENTREF_NO_PARENT, /* 1063 */ + XML_RNGP_PARENTREF_NOT_EMPTY, /* 1064 */ + XML_RNGP_PARSE_ERROR, /* 1065 */ + XML_RNGP_PAT_ANYNAME_EXCEPT_ANYNAME, /* 1066 */ + XML_RNGP_PAT_ATTR_ATTR, /* 1067 */ + XML_RNGP_PAT_ATTR_ELEM, /* 1068 */ + XML_RNGP_PAT_DATA_EXCEPT_ATTR, /* 1069 */ + XML_RNGP_PAT_DATA_EXCEPT_ELEM, /* 1070 */ + XML_RNGP_PAT_DATA_EXCEPT_EMPTY, /* 1071 */ + XML_RNGP_PAT_DATA_EXCEPT_GROUP, /* 1072 */ + XML_RNGP_PAT_DATA_EXCEPT_INTERLEAVE, /* 1073 */ + XML_RNGP_PAT_DATA_EXCEPT_LIST, /* 1074 */ + XML_RNGP_PAT_DATA_EXCEPT_ONEMORE, /* 1075 */ + XML_RNGP_PAT_DATA_EXCEPT_REF, /* 1076 */ + XML_RNGP_PAT_DATA_EXCEPT_TEXT, /* 1077 */ + XML_RNGP_PAT_LIST_ATTR, /* 1078 */ + XML_RNGP_PAT_LIST_ELEM, /* 1079 */ + XML_RNGP_PAT_LIST_INTERLEAVE, /* 1080 */ + XML_RNGP_PAT_LIST_LIST, /* 1081 */ + XML_RNGP_PAT_LIST_REF, /* 1082 */ + XML_RNGP_PAT_LIST_TEXT, /* 1083 */ + XML_RNGP_PAT_NSNAME_EXCEPT_ANYNAME, /* 1084 */ + XML_RNGP_PAT_NSNAME_EXCEPT_NSNAME, /* 1085 */ + XML_RNGP_PAT_ONEMORE_GROUP_ATTR, /* 1086 */ + XML_RNGP_PAT_ONEMORE_INTERLEAVE_ATTR, /* 1087 */ + XML_RNGP_PAT_START_ATTR, /* 1088 */ + XML_RNGP_PAT_START_DATA, /* 1089 */ + XML_RNGP_PAT_START_EMPTY, /* 1090 */ + XML_RNGP_PAT_START_GROUP, /* 1091 */ + XML_RNGP_PAT_START_INTERLEAVE, /* 1092 */ + XML_RNGP_PAT_START_LIST, /* 1093 */ + XML_RNGP_PAT_START_ONEMORE, /* 1094 */ + XML_RNGP_PAT_START_TEXT, /* 1095 */ + XML_RNGP_PAT_START_VALUE, /* 1096 */ + XML_RNGP_PREFIX_UNDEFINED, /* 1097 */ + XML_RNGP_REF_CREATE_FAILED, /* 1098 */ + XML_RNGP_REF_CYCLE, /* 1099 */ + XML_RNGP_REF_NAME_INVALID, /* 1100 */ + XML_RNGP_REF_NO_DEF, /* 1101 */ + XML_RNGP_REF_NO_NAME, /* 1102 */ + XML_RNGP_REF_NOT_EMPTY, /* 1103 */ + XML_RNGP_START_CHOICE_AND_INTERLEAVE, /* 1104 */ + XML_RNGP_START_CONTENT, /* 1105 */ + XML_RNGP_START_EMPTY, /* 1106 */ + XML_RNGP_START_MISSING, /* 1107 */ + XML_RNGP_TEXT_EXPECTED, /* 1108 */ + XML_RNGP_TEXT_HAS_CHILD, /* 1109 */ + XML_RNGP_TYPE_MISSING, /* 1110 */ + XML_RNGP_TYPE_NOT_FOUND, /* 1111 */ + XML_RNGP_TYPE_VALUE, /* 1112 */ + XML_RNGP_UNKNOWN_ATTRIBUTE, /* 1113 */ + XML_RNGP_UNKNOWN_COMBINE, /* 1114 */ + XML_RNGP_UNKNOWN_CONSTRUCT, /* 1115 */ + XML_RNGP_UNKNOWN_TYPE_LIB, /* 1116 */ + XML_RNGP_URI_FRAGMENT, /* 1117 */ + XML_RNGP_URI_NOT_ABSOLUTE, /* 1118 */ + XML_RNGP_VALUE_EMPTY, /* 1119 */ + XML_RNGP_VALUE_NO_CONTENT, /* 1120 */ + XML_RNGP_XMLNS_NAME, /* 1121 */ + XML_RNGP_XML_NS, /* 1122 */ + XML_XPATH_EXPRESSION_OK = 1200, + XML_XPATH_NUMBER_ERROR, /* 1201 */ + XML_XPATH_UNFINISHED_LITERAL_ERROR, /* 1202 */ + XML_XPATH_START_LITERAL_ERROR, /* 1203 */ + XML_XPATH_VARIABLE_REF_ERROR, /* 1204 */ + XML_XPATH_UNDEF_VARIABLE_ERROR, /* 1205 */ + XML_XPATH_INVALID_PREDICATE_ERROR, /* 1206 */ + XML_XPATH_EXPR_ERROR, /* 1207 */ + XML_XPATH_UNCLOSED_ERROR, /* 1208 */ + XML_XPATH_UNKNOWN_FUNC_ERROR, /* 1209 */ + XML_XPATH_INVALID_OPERAND, /* 1210 */ + XML_XPATH_INVALID_TYPE, /* 1211 */ + XML_XPATH_INVALID_ARITY, /* 1212 */ + XML_XPATH_INVALID_CTXT_SIZE, /* 1213 */ + XML_XPATH_INVALID_CTXT_POSITION, /* 1214 */ + XML_XPATH_MEMORY_ERROR, /* 1215 */ + XML_XPTR_SYNTAX_ERROR, /* 1216 */ + XML_XPTR_RESOURCE_ERROR, /* 1217 */ + XML_XPTR_SUB_RESOURCE_ERROR, /* 1218 */ + XML_XPATH_UNDEF_PREFIX_ERROR, /* 1219 */ + XML_XPATH_ENCODING_ERROR, /* 1220 */ + XML_XPATH_INVALID_CHAR_ERROR, /* 1221 */ + XML_TREE_INVALID_HEX = 1300, + XML_TREE_INVALID_DEC, /* 1301 */ + XML_TREE_UNTERMINATED_ENTITY, /* 1302 */ + XML_TREE_NOT_UTF8, /* 1303 */ + XML_SAVE_NOT_UTF8 = 1400, + XML_SAVE_CHAR_INVALID, /* 1401 */ + XML_SAVE_NO_DOCTYPE, /* 1402 */ + XML_SAVE_UNKNOWN_ENCODING, /* 1403 */ + XML_REGEXP_COMPILE_ERROR = 1450, + XML_IO_UNKNOWN = 1500, + XML_IO_EACCES, /* 1501 */ + XML_IO_EAGAIN, /* 1502 */ + XML_IO_EBADF, /* 1503 */ + XML_IO_EBADMSG, /* 1504 */ + XML_IO_EBUSY, /* 1505 */ + XML_IO_ECANCELED, /* 1506 */ + XML_IO_ECHILD, /* 1507 */ + XML_IO_EDEADLK, /* 1508 */ + XML_IO_EDOM, /* 1509 */ + XML_IO_EEXIST, /* 1510 */ + XML_IO_EFAULT, /* 1511 */ + XML_IO_EFBIG, /* 1512 */ + XML_IO_EINPROGRESS, /* 1513 */ + XML_IO_EINTR, /* 1514 */ + XML_IO_EINVAL, /* 1515 */ + XML_IO_EIO, /* 1516 */ + XML_IO_EISDIR, /* 1517 */ + XML_IO_EMFILE, /* 1518 */ + XML_IO_EMLINK, /* 1519 */ + XML_IO_EMSGSIZE, /* 1520 */ + XML_IO_ENAMETOOLONG, /* 1521 */ + XML_IO_ENFILE, /* 1522 */ + XML_IO_ENODEV, /* 1523 */ + XML_IO_ENOENT, /* 1524 */ + XML_IO_ENOEXEC, /* 1525 */ + XML_IO_ENOLCK, /* 1526 */ + XML_IO_ENOMEM, /* 1527 */ + XML_IO_ENOSPC, /* 1528 */ + XML_IO_ENOSYS, /* 1529 */ + XML_IO_ENOTDIR, /* 1530 */ + XML_IO_ENOTEMPTY, /* 1531 */ + XML_IO_ENOTSUP, /* 1532 */ + XML_IO_ENOTTY, /* 1533 */ + XML_IO_ENXIO, /* 1534 */ + XML_IO_EPERM, /* 1535 */ + XML_IO_EPIPE, /* 1536 */ + XML_IO_ERANGE, /* 1537 */ + XML_IO_EROFS, /* 1538 */ + XML_IO_ESPIPE, /* 1539 */ + XML_IO_ESRCH, /* 1540 */ + XML_IO_ETIMEDOUT, /* 1541 */ + XML_IO_EXDEV, /* 1542 */ + XML_IO_NETWORK_ATTEMPT, /* 1543 */ + XML_IO_ENCODER, /* 1544 */ + XML_IO_FLUSH, /* 1545 */ + XML_IO_WRITE, /* 1546 */ + XML_IO_NO_INPUT, /* 1547 */ + XML_IO_BUFFER_FULL, /* 1548 */ + XML_IO_LOAD_ERROR, /* 1549 */ + XML_IO_ENOTSOCK, /* 1550 */ + XML_IO_EISCONN, /* 1551 */ + XML_IO_ECONNREFUSED, /* 1552 */ + XML_IO_ENETUNREACH, /* 1553 */ + XML_IO_EADDRINUSE, /* 1554 */ + XML_IO_EALREADY, /* 1555 */ + XML_IO_EAFNOSUPPORT, /* 1556 */ + XML_XINCLUDE_RECURSION=1600, + XML_XINCLUDE_PARSE_VALUE, /* 1601 */ + XML_XINCLUDE_ENTITY_DEF_MISMATCH, /* 1602 */ + XML_XINCLUDE_NO_HREF, /* 1603 */ + XML_XINCLUDE_NO_FALLBACK, /* 1604 */ + XML_XINCLUDE_HREF_URI, /* 1605 */ + XML_XINCLUDE_TEXT_FRAGMENT, /* 1606 */ + XML_XINCLUDE_TEXT_DOCUMENT, /* 1607 */ + XML_XINCLUDE_INVALID_CHAR, /* 1608 */ + XML_XINCLUDE_BUILD_FAILED, /* 1609 */ + XML_XINCLUDE_UNKNOWN_ENCODING, /* 1610 */ + XML_XINCLUDE_MULTIPLE_ROOT, /* 1611 */ + XML_XINCLUDE_XPTR_FAILED, /* 1612 */ + XML_XINCLUDE_XPTR_RESULT, /* 1613 */ + XML_XINCLUDE_INCLUDE_IN_INCLUDE, /* 1614 */ + XML_XINCLUDE_FALLBACKS_IN_INCLUDE, /* 1615 */ + XML_XINCLUDE_FALLBACK_NOT_IN_INCLUDE, /* 1616 */ + XML_XINCLUDE_DEPRECATED_NS, /* 1617 */ + XML_XINCLUDE_FRAGMENT_ID, /* 1618 */ + XML_CATALOG_MISSING_ATTR = 1650, + XML_CATALOG_ENTRY_BROKEN, /* 1651 */ + XML_CATALOG_PREFER_VALUE, /* 1652 */ + XML_CATALOG_NOT_CATALOG, /* 1653 */ + XML_CATALOG_RECURSION, /* 1654 */ + XML_SCHEMAP_PREFIX_UNDEFINED = 1700, + XML_SCHEMAP_ATTRFORMDEFAULT_VALUE, /* 1701 */ + XML_SCHEMAP_ATTRGRP_NONAME_NOREF, /* 1702 */ + XML_SCHEMAP_ATTR_NONAME_NOREF, /* 1703 */ + XML_SCHEMAP_COMPLEXTYPE_NONAME_NOREF, /* 1704 */ + XML_SCHEMAP_ELEMFORMDEFAULT_VALUE, /* 1705 */ + XML_SCHEMAP_ELEM_NONAME_NOREF, /* 1706 */ + XML_SCHEMAP_EXTENSION_NO_BASE, /* 1707 */ + XML_SCHEMAP_FACET_NO_VALUE, /* 1708 */ + XML_SCHEMAP_FAILED_BUILD_IMPORT, /* 1709 */ + XML_SCHEMAP_GROUP_NONAME_NOREF, /* 1710 */ + XML_SCHEMAP_IMPORT_NAMESPACE_NOT_URI, /* 1711 */ + XML_SCHEMAP_IMPORT_REDEFINE_NSNAME, /* 1712 */ + XML_SCHEMAP_IMPORT_SCHEMA_NOT_URI, /* 1713 */ + XML_SCHEMAP_INVALID_BOOLEAN, /* 1714 */ + XML_SCHEMAP_INVALID_ENUM, /* 1715 */ + XML_SCHEMAP_INVALID_FACET, /* 1716 */ + XML_SCHEMAP_INVALID_FACET_VALUE, /* 1717 */ + XML_SCHEMAP_INVALID_MAXOCCURS, /* 1718 */ + XML_SCHEMAP_INVALID_MINOCCURS, /* 1719 */ + XML_SCHEMAP_INVALID_REF_AND_SUBTYPE, /* 1720 */ + XML_SCHEMAP_INVALID_WHITE_SPACE, /* 1721 */ + XML_SCHEMAP_NOATTR_NOREF, /* 1722 */ + XML_SCHEMAP_NOTATION_NO_NAME, /* 1723 */ + XML_SCHEMAP_NOTYPE_NOREF, /* 1724 */ + XML_SCHEMAP_REF_AND_SUBTYPE, /* 1725 */ + XML_SCHEMAP_RESTRICTION_NONAME_NOREF, /* 1726 */ + XML_SCHEMAP_SIMPLETYPE_NONAME, /* 1727 */ + XML_SCHEMAP_TYPE_AND_SUBTYPE, /* 1728 */ + XML_SCHEMAP_UNKNOWN_ALL_CHILD, /* 1729 */ + XML_SCHEMAP_UNKNOWN_ANYATTRIBUTE_CHILD, /* 1730 */ + XML_SCHEMAP_UNKNOWN_ATTR_CHILD, /* 1731 */ + XML_SCHEMAP_UNKNOWN_ATTRGRP_CHILD, /* 1732 */ + XML_SCHEMAP_UNKNOWN_ATTRIBUTE_GROUP, /* 1733 */ + XML_SCHEMAP_UNKNOWN_BASE_TYPE, /* 1734 */ + XML_SCHEMAP_UNKNOWN_CHOICE_CHILD, /* 1735 */ + XML_SCHEMAP_UNKNOWN_COMPLEXCONTENT_CHILD, /* 1736 */ + XML_SCHEMAP_UNKNOWN_COMPLEXTYPE_CHILD, /* 1737 */ + XML_SCHEMAP_UNKNOWN_ELEM_CHILD, /* 1738 */ + XML_SCHEMAP_UNKNOWN_EXTENSION_CHILD, /* 1739 */ + XML_SCHEMAP_UNKNOWN_FACET_CHILD, /* 1740 */ + XML_SCHEMAP_UNKNOWN_FACET_TYPE, /* 1741 */ + XML_SCHEMAP_UNKNOWN_GROUP_CHILD, /* 1742 */ + XML_SCHEMAP_UNKNOWN_IMPORT_CHILD, /* 1743 */ + XML_SCHEMAP_UNKNOWN_LIST_CHILD, /* 1744 */ + XML_SCHEMAP_UNKNOWN_NOTATION_CHILD, /* 1745 */ + XML_SCHEMAP_UNKNOWN_PROCESSCONTENT_CHILD, /* 1746 */ + XML_SCHEMAP_UNKNOWN_REF, /* 1747 */ + XML_SCHEMAP_UNKNOWN_RESTRICTION_CHILD, /* 1748 */ + XML_SCHEMAP_UNKNOWN_SCHEMAS_CHILD, /* 1749 */ + XML_SCHEMAP_UNKNOWN_SEQUENCE_CHILD, /* 1750 */ + XML_SCHEMAP_UNKNOWN_SIMPLECONTENT_CHILD, /* 1751 */ + XML_SCHEMAP_UNKNOWN_SIMPLETYPE_CHILD, /* 1752 */ + XML_SCHEMAP_UNKNOWN_TYPE, /* 1753 */ + XML_SCHEMAP_UNKNOWN_UNION_CHILD, /* 1754 */ + XML_SCHEMAP_ELEM_DEFAULT_FIXED, /* 1755 */ + XML_SCHEMAP_REGEXP_INVALID, /* 1756 */ + XML_SCHEMAP_FAILED_LOAD, /* 1757 */ + XML_SCHEMAP_NOTHING_TO_PARSE, /* 1758 */ + XML_SCHEMAP_NOROOT, /* 1759 */ + XML_SCHEMAP_REDEFINED_GROUP, /* 1760 */ + XML_SCHEMAP_REDEFINED_TYPE, /* 1761 */ + XML_SCHEMAP_REDEFINED_ELEMENT, /* 1762 */ + XML_SCHEMAP_REDEFINED_ATTRGROUP, /* 1763 */ + XML_SCHEMAP_REDEFINED_ATTR, /* 1764 */ + XML_SCHEMAP_REDEFINED_NOTATION, /* 1765 */ + XML_SCHEMAP_FAILED_PARSE, /* 1766 */ + XML_SCHEMAP_UNKNOWN_PREFIX, /* 1767 */ + XML_SCHEMAP_DEF_AND_PREFIX, /* 1768 */ + XML_SCHEMAP_UNKNOWN_INCLUDE_CHILD, /* 1769 */ + XML_SCHEMAP_INCLUDE_SCHEMA_NOT_URI, /* 1770 */ + XML_SCHEMAP_INCLUDE_SCHEMA_NO_URI, /* 1771 */ + XML_SCHEMAP_NOT_SCHEMA, /* 1772 */ + XML_SCHEMAP_UNKNOWN_MEMBER_TYPE, /* 1773 */ + XML_SCHEMAP_INVALID_ATTR_USE, /* 1774 */ + XML_SCHEMAP_RECURSIVE, /* 1775 */ + XML_SCHEMAP_SUPERNUMEROUS_LIST_ITEM_TYPE, /* 1776 */ + XML_SCHEMAP_INVALID_ATTR_COMBINATION, /* 1777 */ + XML_SCHEMAP_INVALID_ATTR_INLINE_COMBINATION, /* 1778 */ + XML_SCHEMAP_MISSING_SIMPLETYPE_CHILD, /* 1779 */ + XML_SCHEMAP_INVALID_ATTR_NAME, /* 1780 */ + XML_SCHEMAP_REF_AND_CONTENT, /* 1781 */ + XML_SCHEMAP_CT_PROPS_CORRECT_1, /* 1782 */ + XML_SCHEMAP_CT_PROPS_CORRECT_2, /* 1783 */ + XML_SCHEMAP_CT_PROPS_CORRECT_3, /* 1784 */ + XML_SCHEMAP_CT_PROPS_CORRECT_4, /* 1785 */ + XML_SCHEMAP_CT_PROPS_CORRECT_5, /* 1786 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_1, /* 1787 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_1_1, /* 1788 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_1_2, /* 1789 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_2, /* 1790 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_3, /* 1791 */ + XML_SCHEMAP_WILDCARD_INVALID_NS_MEMBER, /* 1792 */ + XML_SCHEMAP_INTERSECTION_NOT_EXPRESSIBLE, /* 1793 */ + XML_SCHEMAP_UNION_NOT_EXPRESSIBLE, /* 1794 */ + XML_SCHEMAP_SRC_IMPORT_3_1, /* 1795 */ + XML_SCHEMAP_SRC_IMPORT_3_2, /* 1796 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_4_1, /* 1797 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_4_2, /* 1798 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_4_3, /* 1799 */ + XML_SCHEMAP_COS_CT_EXTENDS_1_3, /* 1800 */ + XML_SCHEMAV_NOROOT = 1801, + XML_SCHEMAV_UNDECLAREDELEM, /* 1802 */ + XML_SCHEMAV_NOTTOPLEVEL, /* 1803 */ + XML_SCHEMAV_MISSING, /* 1804 */ + XML_SCHEMAV_WRONGELEM, /* 1805 */ + XML_SCHEMAV_NOTYPE, /* 1806 */ + XML_SCHEMAV_NOROLLBACK, /* 1807 */ + XML_SCHEMAV_ISABSTRACT, /* 1808 */ + XML_SCHEMAV_NOTEMPTY, /* 1809 */ + XML_SCHEMAV_ELEMCONT, /* 1810 */ + XML_SCHEMAV_HAVEDEFAULT, /* 1811 */ + XML_SCHEMAV_NOTNILLABLE, /* 1812 */ + XML_SCHEMAV_EXTRACONTENT, /* 1813 */ + XML_SCHEMAV_INVALIDATTR, /* 1814 */ + XML_SCHEMAV_INVALIDELEM, /* 1815 */ + XML_SCHEMAV_NOTDETERMINIST, /* 1816 */ + XML_SCHEMAV_CONSTRUCT, /* 1817 */ + XML_SCHEMAV_INTERNAL, /* 1818 */ + XML_SCHEMAV_NOTSIMPLE, /* 1819 */ + XML_SCHEMAV_ATTRUNKNOWN, /* 1820 */ + XML_SCHEMAV_ATTRINVALID, /* 1821 */ + XML_SCHEMAV_VALUE, /* 1822 */ + XML_SCHEMAV_FACET, /* 1823 */ + XML_SCHEMAV_CVC_DATATYPE_VALID_1_2_1, /* 1824 */ + XML_SCHEMAV_CVC_DATATYPE_VALID_1_2_2, /* 1825 */ + XML_SCHEMAV_CVC_DATATYPE_VALID_1_2_3, /* 1826 */ + XML_SCHEMAV_CVC_TYPE_3_1_1, /* 1827 */ + XML_SCHEMAV_CVC_TYPE_3_1_2, /* 1828 */ + XML_SCHEMAV_CVC_FACET_VALID, /* 1829 */ + XML_SCHEMAV_CVC_LENGTH_VALID, /* 1830 */ + XML_SCHEMAV_CVC_MINLENGTH_VALID, /* 1831 */ + XML_SCHEMAV_CVC_MAXLENGTH_VALID, /* 1832 */ + XML_SCHEMAV_CVC_MININCLUSIVE_VALID, /* 1833 */ + XML_SCHEMAV_CVC_MAXINCLUSIVE_VALID, /* 1834 */ + XML_SCHEMAV_CVC_MINEXCLUSIVE_VALID, /* 1835 */ + XML_SCHEMAV_CVC_MAXEXCLUSIVE_VALID, /* 1836 */ + XML_SCHEMAV_CVC_TOTALDIGITS_VALID, /* 1837 */ + XML_SCHEMAV_CVC_FRACTIONDIGITS_VALID, /* 1838 */ + XML_SCHEMAV_CVC_PATTERN_VALID, /* 1839 */ + XML_SCHEMAV_CVC_ENUMERATION_VALID, /* 1840 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_1, /* 1841 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_2, /* 1842 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_3, /* 1843 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_2_4, /* 1844 */ + XML_SCHEMAV_CVC_ELT_1, /* 1845 */ + XML_SCHEMAV_CVC_ELT_2, /* 1846 */ + XML_SCHEMAV_CVC_ELT_3_1, /* 1847 */ + XML_SCHEMAV_CVC_ELT_3_2_1, /* 1848 */ + XML_SCHEMAV_CVC_ELT_3_2_2, /* 1849 */ + XML_SCHEMAV_CVC_ELT_4_1, /* 1850 */ + XML_SCHEMAV_CVC_ELT_4_2, /* 1851 */ + XML_SCHEMAV_CVC_ELT_4_3, /* 1852 */ + XML_SCHEMAV_CVC_ELT_5_1_1, /* 1853 */ + XML_SCHEMAV_CVC_ELT_5_1_2, /* 1854 */ + XML_SCHEMAV_CVC_ELT_5_2_1, /* 1855 */ + XML_SCHEMAV_CVC_ELT_5_2_2_1, /* 1856 */ + XML_SCHEMAV_CVC_ELT_5_2_2_2_1, /* 1857 */ + XML_SCHEMAV_CVC_ELT_5_2_2_2_2, /* 1858 */ + XML_SCHEMAV_CVC_ELT_6, /* 1859 */ + XML_SCHEMAV_CVC_ELT_7, /* 1860 */ + XML_SCHEMAV_CVC_ATTRIBUTE_1, /* 1861 */ + XML_SCHEMAV_CVC_ATTRIBUTE_2, /* 1862 */ + XML_SCHEMAV_CVC_ATTRIBUTE_3, /* 1863 */ + XML_SCHEMAV_CVC_ATTRIBUTE_4, /* 1864 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_3_1, /* 1865 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_3_2_1, /* 1866 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_3_2_2, /* 1867 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_4, /* 1868 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_5_1, /* 1869 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_5_2, /* 1870 */ + XML_SCHEMAV_ELEMENT_CONTENT, /* 1871 */ + XML_SCHEMAV_DOCUMENT_ELEMENT_MISSING, /* 1872 */ + XML_SCHEMAV_CVC_COMPLEX_TYPE_1, /* 1873 */ + XML_SCHEMAV_CVC_AU, /* 1874 */ + XML_SCHEMAV_CVC_TYPE_1, /* 1875 */ + XML_SCHEMAV_CVC_TYPE_2, /* 1876 */ + XML_SCHEMAV_CVC_IDC, /* 1877 */ + XML_SCHEMAV_CVC_WILDCARD, /* 1878 */ + XML_SCHEMAV_MISC, /* 1879 */ + XML_XPTR_UNKNOWN_SCHEME = 1900, + XML_XPTR_CHILDSEQ_START, /* 1901 */ + XML_XPTR_EVAL_FAILED, /* 1902 */ + XML_XPTR_EXTRA_OBJECTS, /* 1903 */ + XML_C14N_CREATE_CTXT = 1950, + XML_C14N_REQUIRES_UTF8, /* 1951 */ + XML_C14N_CREATE_STACK, /* 1952 */ + XML_C14N_INVALID_NODE, /* 1953 */ + XML_C14N_UNKNOW_NODE, /* 1954 */ + XML_C14N_RELATIVE_NAMESPACE, /* 1955 */ + XML_FTP_PASV_ANSWER = 2000, + XML_FTP_EPSV_ANSWER, /* 2001 */ + XML_FTP_ACCNT, /* 2002 */ + XML_FTP_URL_SYNTAX, /* 2003 */ + XML_HTTP_URL_SYNTAX = 2020, + XML_HTTP_USE_IP, /* 2021 */ + XML_HTTP_UNKNOWN_HOST, /* 2022 */ + XML_SCHEMAP_SRC_SIMPLE_TYPE_1 = 3000, + XML_SCHEMAP_SRC_SIMPLE_TYPE_2, /* 3001 */ + XML_SCHEMAP_SRC_SIMPLE_TYPE_3, /* 3002 */ + XML_SCHEMAP_SRC_SIMPLE_TYPE_4, /* 3003 */ + XML_SCHEMAP_SRC_RESOLVE, /* 3004 */ + XML_SCHEMAP_SRC_RESTRICTION_BASE_OR_SIMPLETYPE, /* 3005 */ + XML_SCHEMAP_SRC_LIST_ITEMTYPE_OR_SIMPLETYPE, /* 3006 */ + XML_SCHEMAP_SRC_UNION_MEMBERTYPES_OR_SIMPLETYPES, /* 3007 */ + XML_SCHEMAP_ST_PROPS_CORRECT_1, /* 3008 */ + XML_SCHEMAP_ST_PROPS_CORRECT_2, /* 3009 */ + XML_SCHEMAP_ST_PROPS_CORRECT_3, /* 3010 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_1, /* 3011 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_2, /* 3012 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_3_1, /* 3013 */ + XML_SCHEMAP_COS_ST_RESTRICTS_1_3_2, /* 3014 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_1, /* 3015 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_1_1, /* 3016 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_1_2, /* 3017 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_1, /* 3018 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_2, /* 3019 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_3, /* 3020 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_4, /* 3021 */ + XML_SCHEMAP_COS_ST_RESTRICTS_2_3_2_5, /* 3022 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_1, /* 3023 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_1, /* 3024 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_1_2, /* 3025 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_2, /* 3026 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_1, /* 3027 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_3, /* 3028 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_4, /* 3029 */ + XML_SCHEMAP_COS_ST_RESTRICTS_3_3_2_5, /* 3030 */ + XML_SCHEMAP_COS_ST_DERIVED_OK_2_1, /* 3031 */ + XML_SCHEMAP_COS_ST_DERIVED_OK_2_2, /* 3032 */ + XML_SCHEMAP_S4S_ELEM_NOT_ALLOWED, /* 3033 */ + XML_SCHEMAP_S4S_ELEM_MISSING, /* 3034 */ + XML_SCHEMAP_S4S_ATTR_NOT_ALLOWED, /* 3035 */ + XML_SCHEMAP_S4S_ATTR_MISSING, /* 3036 */ + XML_SCHEMAP_S4S_ATTR_INVALID_VALUE, /* 3037 */ + XML_SCHEMAP_SRC_ELEMENT_1, /* 3038 */ + XML_SCHEMAP_SRC_ELEMENT_2_1, /* 3039 */ + XML_SCHEMAP_SRC_ELEMENT_2_2, /* 3040 */ + XML_SCHEMAP_SRC_ELEMENT_3, /* 3041 */ + XML_SCHEMAP_P_PROPS_CORRECT_1, /* 3042 */ + XML_SCHEMAP_P_PROPS_CORRECT_2_1, /* 3043 */ + XML_SCHEMAP_P_PROPS_CORRECT_2_2, /* 3044 */ + XML_SCHEMAP_E_PROPS_CORRECT_2, /* 3045 */ + XML_SCHEMAP_E_PROPS_CORRECT_3, /* 3046 */ + XML_SCHEMAP_E_PROPS_CORRECT_4, /* 3047 */ + XML_SCHEMAP_E_PROPS_CORRECT_5, /* 3048 */ + XML_SCHEMAP_E_PROPS_CORRECT_6, /* 3049 */ + XML_SCHEMAP_SRC_INCLUDE, /* 3050 */ + XML_SCHEMAP_SRC_ATTRIBUTE_1, /* 3051 */ + XML_SCHEMAP_SRC_ATTRIBUTE_2, /* 3052 */ + XML_SCHEMAP_SRC_ATTRIBUTE_3_1, /* 3053 */ + XML_SCHEMAP_SRC_ATTRIBUTE_3_2, /* 3054 */ + XML_SCHEMAP_SRC_ATTRIBUTE_4, /* 3055 */ + XML_SCHEMAP_NO_XMLNS, /* 3056 */ + XML_SCHEMAP_NO_XSI, /* 3057 */ + XML_SCHEMAP_COS_VALID_DEFAULT_1, /* 3058 */ + XML_SCHEMAP_COS_VALID_DEFAULT_2_1, /* 3059 */ + XML_SCHEMAP_COS_VALID_DEFAULT_2_2_1, /* 3060 */ + XML_SCHEMAP_COS_VALID_DEFAULT_2_2_2, /* 3061 */ + XML_SCHEMAP_CVC_SIMPLE_TYPE, /* 3062 */ + XML_SCHEMAP_COS_CT_EXTENDS_1_1, /* 3063 */ + XML_SCHEMAP_SRC_IMPORT_1_1, /* 3064 */ + XML_SCHEMAP_SRC_IMPORT_1_2, /* 3065 */ + XML_SCHEMAP_SRC_IMPORT_2, /* 3066 */ + XML_SCHEMAP_SRC_IMPORT_2_1, /* 3067 */ + XML_SCHEMAP_SRC_IMPORT_2_2, /* 3068 */ + XML_SCHEMAP_INTERNAL, /* 3069 non-W3C */ + XML_SCHEMAP_NOT_DETERMINISTIC, /* 3070 non-W3C */ + XML_SCHEMAP_SRC_ATTRIBUTE_GROUP_1, /* 3071 */ + XML_SCHEMAP_SRC_ATTRIBUTE_GROUP_2, /* 3072 */ + XML_SCHEMAP_SRC_ATTRIBUTE_GROUP_3, /* 3073 */ + XML_SCHEMAP_MG_PROPS_CORRECT_1, /* 3074 */ + XML_SCHEMAP_MG_PROPS_CORRECT_2, /* 3075 */ + XML_SCHEMAP_SRC_CT_1, /* 3076 */ + XML_SCHEMAP_DERIVATION_OK_RESTRICTION_2_1_3, /* 3077 */ + XML_SCHEMAP_AU_PROPS_CORRECT_2, /* 3078 */ + XML_SCHEMAP_A_PROPS_CORRECT_2, /* 3079 */ + XML_SCHEMAP_C_PROPS_CORRECT, /* 3080 */ + XML_SCHEMAP_SRC_REDEFINE, /* 3081 */ + XML_SCHEMAP_SRC_IMPORT, /* 3082 */ + XML_SCHEMAP_WARN_SKIP_SCHEMA, /* 3083 */ + XML_SCHEMAP_WARN_UNLOCATED_SCHEMA, /* 3084 */ + XML_SCHEMAP_WARN_ATTR_REDECL_PROH, /* 3085 */ + XML_SCHEMAP_WARN_ATTR_POINTLESS_PROH, /* 3085 */ + XML_SCHEMAP_AG_PROPS_CORRECT, /* 3086 */ + XML_SCHEMAP_COS_CT_EXTENDS_1_2, /* 3087 */ + XML_SCHEMAP_AU_PROPS_CORRECT, /* 3088 */ + XML_SCHEMAP_A_PROPS_CORRECT_3, /* 3089 */ + XML_SCHEMAP_COS_ALL_LIMITED, /* 3090 */ + XML_SCHEMATRONV_ASSERT = 4000, /* 4000 */ + XML_SCHEMATRONV_REPORT, + XML_MODULE_OPEN = 4900, /* 4900 */ + XML_MODULE_CLOSE, /* 4901 */ + XML_CHECK_FOUND_ELEMENT = 5000, + XML_CHECK_FOUND_ATTRIBUTE, /* 5001 */ + XML_CHECK_FOUND_TEXT, /* 5002 */ + XML_CHECK_FOUND_CDATA, /* 5003 */ + XML_CHECK_FOUND_ENTITYREF, /* 5004 */ + XML_CHECK_FOUND_ENTITY, /* 5005 */ + XML_CHECK_FOUND_PI, /* 5006 */ + XML_CHECK_FOUND_COMMENT, /* 5007 */ + XML_CHECK_FOUND_DOCTYPE, /* 5008 */ + XML_CHECK_FOUND_FRAGMENT, /* 5009 */ + XML_CHECK_FOUND_NOTATION, /* 5010 */ + XML_CHECK_UNKNOWN_NODE, /* 5011 */ + XML_CHECK_ENTITY_TYPE, /* 5012 */ + XML_CHECK_NO_PARENT, /* 5013 */ + XML_CHECK_NO_DOC, /* 5014 */ + XML_CHECK_NO_NAME, /* 5015 */ + XML_CHECK_NO_ELEM, /* 5016 */ + XML_CHECK_WRONG_DOC, /* 5017 */ + XML_CHECK_NO_PREV, /* 5018 */ + XML_CHECK_WRONG_PREV, /* 5019 */ + XML_CHECK_NO_NEXT, /* 5020 */ + XML_CHECK_WRONG_NEXT, /* 5021 */ + XML_CHECK_NOT_DTD, /* 5022 */ + XML_CHECK_NOT_ATTR, /* 5023 */ + XML_CHECK_NOT_ATTR_DECL, /* 5024 */ + XML_CHECK_NOT_ELEM_DECL, /* 5025 */ + XML_CHECK_NOT_ENTITY_DECL, /* 5026 */ + XML_CHECK_NOT_NS_DECL, /* 5027 */ + XML_CHECK_NO_HREF, /* 5028 */ + XML_CHECK_WRONG_PARENT,/* 5029 */ + XML_CHECK_NS_SCOPE, /* 5030 */ + XML_CHECK_NS_ANCESTOR, /* 5031 */ + XML_CHECK_NOT_UTF8, /* 5032 */ + XML_CHECK_NO_DICT, /* 5033 */ + XML_CHECK_NOT_NCNAME, /* 5034 */ + XML_CHECK_OUTSIDE_DICT, /* 5035 */ + XML_CHECK_WRONG_NAME, /* 5036 */ + XML_CHECK_NAME_NOT_NULL, /* 5037 */ + XML_I18N_NO_NAME = 6000, + XML_I18N_NO_HANDLER, /* 6001 */ + XML_I18N_EXCESS_HANDLER, /* 6002 */ + XML_I18N_CONV_FAILED, /* 6003 */ + XML_I18N_NO_OUTPUT, /* 6004 */ + XML_BUF_OVERFLOW = 7000 +} xmlParserErrors; + +/** + * xmlGenericErrorFunc: + * @ctx: a parsing context + * @msg: the message + * @...: the extra arguments of the varargs to format the message + * + * Signature of the function to use when there is an error and + * no parsing or validity context available . + */ +typedef void (*xmlGenericErrorFunc) (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +/** + * xmlStructuredErrorFunc: + * @userData: user provided data for the error callback + * @error: the error being raised. + * + * Signature of the function to use when there is an error and + * the module handles the new error reporting mechanism. + */ +typedef void (*xmlStructuredErrorFunc) (void *userData, const xmlError *error); + +/** DOC_DISABLE */ +#define XML_GLOBALS_ERROR \ + XML_OP(xmlLastError, xmlError, XML_DEPRECATED) \ + XML_OP(xmlGenericError, xmlGenericErrorFunc, XML_NO_ATTR) \ + XML_OP(xmlGenericErrorContext, void *, XML_NO_ATTR) \ + XML_OP(xmlStructuredError, xmlStructuredErrorFunc, XML_NO_ATTR) \ + XML_OP(xmlStructuredErrorContext, void *, XML_NO_ATTR) + +#define XML_OP XML_DECLARE_GLOBAL +XML_GLOBALS_ERROR +#undef XML_OP + +#if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlLastError XML_GLOBAL_MACRO(xmlLastError) + #define xmlGenericError XML_GLOBAL_MACRO(xmlGenericError) + #define xmlGenericErrorContext XML_GLOBAL_MACRO(xmlGenericErrorContext) + #define xmlStructuredError XML_GLOBAL_MACRO(xmlStructuredError) + #define xmlStructuredErrorContext XML_GLOBAL_MACRO(xmlStructuredErrorContext) +#endif +/** DOC_ENABLE */ + +/* + * Use the following function to reset the two global variables + * xmlGenericError and xmlGenericErrorContext. + */ +XMLPUBFUN void + xmlSetGenericErrorFunc (void *ctx, + xmlGenericErrorFunc handler); +XMLPUBFUN void + xmlThrDefSetGenericErrorFunc(void *ctx, + xmlGenericErrorFunc handler); +XML_DEPRECATED +XMLPUBFUN void + initGenericErrorDefaultFunc (xmlGenericErrorFunc *handler); + +XMLPUBFUN void + xmlSetStructuredErrorFunc (void *ctx, + xmlStructuredErrorFunc handler); +XMLPUBFUN void + xmlThrDefSetStructuredErrorFunc(void *ctx, + xmlStructuredErrorFunc handler); +/* + * Default message routines used by SAX and Valid context for error + * and warning reporting. + */ +XMLPUBFUN void + xmlParserError (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +XMLPUBFUN void + xmlParserWarning (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +XMLPUBFUN void + xmlParserValidityError (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +XMLPUBFUN void + xmlParserValidityWarning (void *ctx, + const char *msg, + ...) LIBXML_ATTR_FORMAT(2,3); +struct _xmlParserInput; +XMLPUBFUN void + xmlParserPrintFileInfo (struct _xmlParserInput *input); +XMLPUBFUN void + xmlParserPrintFileContext (struct _xmlParserInput *input); + +/* + * Extended error information routines + */ +XMLPUBFUN const xmlError * + xmlGetLastError (void); +XMLPUBFUN void + xmlResetLastError (void); +XMLPUBFUN const xmlError * + xmlCtxtGetLastError (void *ctx); +XMLPUBFUN void + xmlCtxtResetLastError (void *ctx); +XMLPUBFUN void + xmlResetError (xmlErrorPtr err); +XMLPUBFUN int + xmlCopyError (const xmlError *from, + xmlErrorPtr to); + +#ifdef __cplusplus +} +#endif +#endif /* __XML_ERROR_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlmemory.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlmemory.h new file mode 100644 index 0000000000000000000000000000000000000000..097e3c8f5f6386423c8cf00daf47477a43847150 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlmemory.h @@ -0,0 +1,225 @@ +/* + * Summary: interface for the memory allocator + * Description: provides interfaces for the memory allocator, + * including debugging capabilities. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + + +#ifndef __DEBUG_MEMORY_ALLOC__ +#define __DEBUG_MEMORY_ALLOC__ + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * The XML memory wrapper support 4 basic overloadable functions. + */ +/** + * xmlFreeFunc: + * @mem: an already allocated block of memory + * + * Signature for a free() implementation. + */ +typedef void (*xmlFreeFunc)(void *mem); +/** + * xmlMallocFunc: + * @size: the size requested in bytes + * + * Signature for a malloc() implementation. + * + * Returns a pointer to the newly allocated block or NULL in case of error. + */ +typedef void *(LIBXML_ATTR_ALLOC_SIZE(1) *xmlMallocFunc)(size_t size); + +/** + * xmlReallocFunc: + * @mem: an already allocated block of memory + * @size: the new size requested in bytes + * + * Signature for a realloc() implementation. + * + * Returns a pointer to the newly reallocated block or NULL in case of error. + */ +typedef void *(*xmlReallocFunc)(void *mem, size_t size); + +/** + * xmlStrdupFunc: + * @str: a zero terminated string + * + * Signature for an strdup() implementation. + * + * Returns the copy of the string or NULL in case of error. + */ +typedef char *(*xmlStrdupFunc)(const char *str); + +/* + * In general the memory allocation entry points are not kept + * thread specific but this can be overridden by LIBXML_THREAD_ALLOC_ENABLED + * - xmlMalloc + * - xmlMallocAtomic + * - xmlRealloc + * - xmlMemStrdup + * - xmlFree + */ +/** DOC_DISABLE */ +#ifdef LIBXML_THREAD_ALLOC_ENABLED + #define XML_GLOBALS_ALLOC \ + XML_OP(xmlMalloc, xmlMallocFunc, XML_NO_ATTR) \ + XML_OP(xmlMallocAtomic, xmlMallocFunc, XML_NO_ATTR) \ + XML_OP(xmlRealloc, xmlReallocFunc, XML_NO_ATTR) \ + XML_OP(xmlFree, xmlFreeFunc, XML_NO_ATTR) \ + XML_OP(xmlMemStrdup, xmlStrdupFunc, XML_NO_ATTR) + #define XML_OP XML_DECLARE_GLOBAL + XML_GLOBALS_ALLOC + #undef XML_OP + #if defined(LIBXML_THREAD_ENABLED) && !defined(XML_GLOBALS_NO_REDEFINITION) + #define xmlMalloc XML_GLOBAL_MACRO(xmlMalloc) + #define xmlMallocAtomic XML_GLOBAL_MACRO(xmlMallocAtomic) + #define xmlRealloc XML_GLOBAL_MACRO(xmlRealloc) + #define xmlFree XML_GLOBAL_MACRO(xmlFree) + #define xmlMemStrdup XML_GLOBAL_MACRO(xmlMemStrdup) + #endif +#else + #define XML_GLOBALS_ALLOC +/** DOC_ENABLE */ + XMLPUBVAR xmlMallocFunc xmlMalloc; + XMLPUBVAR xmlMallocFunc xmlMallocAtomic; + XMLPUBVAR xmlReallocFunc xmlRealloc; + XMLPUBVAR xmlFreeFunc xmlFree; + XMLPUBVAR xmlStrdupFunc xmlMemStrdup; +#endif + +/* + * The way to overload the existing functions. + * The xmlGc function have an extra entry for atomic block + * allocations useful for garbage collected memory allocators + */ +XMLPUBFUN int + xmlMemSetup (xmlFreeFunc freeFunc, + xmlMallocFunc mallocFunc, + xmlReallocFunc reallocFunc, + xmlStrdupFunc strdupFunc); +XMLPUBFUN int + xmlMemGet (xmlFreeFunc *freeFunc, + xmlMallocFunc *mallocFunc, + xmlReallocFunc *reallocFunc, + xmlStrdupFunc *strdupFunc); +XMLPUBFUN int + xmlGcMemSetup (xmlFreeFunc freeFunc, + xmlMallocFunc mallocFunc, + xmlMallocFunc mallocAtomicFunc, + xmlReallocFunc reallocFunc, + xmlStrdupFunc strdupFunc); +XMLPUBFUN int + xmlGcMemGet (xmlFreeFunc *freeFunc, + xmlMallocFunc *mallocFunc, + xmlMallocFunc *mallocAtomicFunc, + xmlReallocFunc *reallocFunc, + xmlStrdupFunc *strdupFunc); + +/* + * Initialization of the memory layer. + */ +XML_DEPRECATED +XMLPUBFUN int + xmlInitMemory (void); + +/* + * Cleanup of the memory layer. + */ +XML_DEPRECATED +XMLPUBFUN void + xmlCleanupMemory (void); +/* + * These are specific to the XML debug memory wrapper. + */ +XMLPUBFUN size_t + xmlMemSize (void *ptr); +XMLPUBFUN int + xmlMemUsed (void); +XMLPUBFUN int + xmlMemBlocks (void); +XMLPUBFUN void + xmlMemDisplay (FILE *fp); +XMLPUBFUN void + xmlMemDisplayLast(FILE *fp, long nbBytes); +XMLPUBFUN void + xmlMemShow (FILE *fp, int nr); +XMLPUBFUN void + xmlMemoryDump (void); +XMLPUBFUN void * + xmlMemMalloc (size_t size) LIBXML_ATTR_ALLOC_SIZE(1); +XMLPUBFUN void * + xmlMemRealloc (void *ptr,size_t size); +XMLPUBFUN void + xmlMemFree (void *ptr); +XMLPUBFUN char * + xmlMemoryStrdup (const char *str); +XMLPUBFUN void * + xmlMallocLoc (size_t size, const char *file, int line) LIBXML_ATTR_ALLOC_SIZE(1); +XMLPUBFUN void * + xmlReallocLoc (void *ptr, size_t size, const char *file, int line); +XMLPUBFUN void * + xmlMallocAtomicLoc (size_t size, const char *file, int line) LIBXML_ATTR_ALLOC_SIZE(1); +XMLPUBFUN char * + xmlMemStrdupLoc (const char *str, const char *file, int line); + + +/** DOC_DISABLE */ +#ifdef DEBUG_MEMORY_LOCATION +/** + * xmlMalloc: + * @size: number of bytes to allocate + * + * Wrapper for the malloc() function used in the XML library. + * + * Returns the pointer to the allocated area or NULL in case of error. + */ +#define xmlMalloc(size) xmlMallocLoc((size), __FILE__, __LINE__) +/** + * xmlMallocAtomic: + * @size: number of bytes to allocate + * + * Wrapper for the malloc() function used in the XML library for allocation + * of block not containing pointers to other areas. + * + * Returns the pointer to the allocated area or NULL in case of error. + */ +#define xmlMallocAtomic(size) xmlMallocAtomicLoc((size), __FILE__, __LINE__) +/** + * xmlRealloc: + * @ptr: pointer to the existing allocated area + * @size: number of bytes to allocate + * + * Wrapper for the realloc() function used in the XML library. + * + * Returns the pointer to the allocated area or NULL in case of error. + */ +#define xmlRealloc(ptr, size) xmlReallocLoc((ptr), (size), __FILE__, __LINE__) +/** + * xmlMemStrdup: + * @str: pointer to the existing string + * + * Wrapper for the strdup() function, xmlStrdup() is usually preferred. + * + * Returns the pointer to the allocated area or NULL in case of error. + */ +#define xmlMemStrdup(str) xmlMemStrdupLoc((str), __FILE__, __LINE__) + +#endif /* DEBUG_MEMORY_LOCATION */ +/** DOC_ENABLE */ + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* __DEBUG_MEMORY_ALLOC__ */ + diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlmodule.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlmodule.h new file mode 100644 index 0000000000000000000000000000000000000000..279986c1a9fe9c4025aa3f98cf5235018e5a22de --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlmodule.h @@ -0,0 +1,57 @@ +/* + * Summary: dynamic module loading + * Description: basic API for dynamic module loading, used by + * libexslt added in 2.6.17 + * + * Copy: See Copyright for the status of this software. + * + * Author: Joel W. Reed + */ + +#ifndef __XML_MODULE_H__ +#define __XML_MODULE_H__ + +#include + +#ifdef LIBXML_MODULES_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlModulePtr: + * + * A handle to a dynamically loaded module + */ +typedef struct _xmlModule xmlModule; +typedef xmlModule *xmlModulePtr; + +/** + * xmlModuleOption: + * + * enumeration of options that can be passed down to xmlModuleOpen() + */ +typedef enum { + XML_MODULE_LAZY = 1, /* lazy binding */ + XML_MODULE_LOCAL= 2 /* local binding */ +} xmlModuleOption; + +XMLPUBFUN xmlModulePtr xmlModuleOpen (const char *filename, + int options); + +XMLPUBFUN int xmlModuleSymbol (xmlModulePtr module, + const char* name, + void **result); + +XMLPUBFUN int xmlModuleClose (xmlModulePtr module); + +XMLPUBFUN int xmlModuleFree (xmlModulePtr module); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_MODULES_ENABLED */ + +#endif /*__XML_MODULE_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlregexp.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlregexp.h new file mode 100644 index 0000000000000000000000000000000000000000..2d66437a55b547fc809941ee0a9bafec722a432d --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlregexp.h @@ -0,0 +1,215 @@ +/* + * Summary: regular expressions handling + * Description: basic API for libxml regular expressions handling used + * for XML Schemas and validation. + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_REGEXP_H__ +#define __XML_REGEXP_H__ + +#include +#include +#include + +#ifdef LIBXML_REGEXP_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * xmlRegexpPtr: + * + * A libxml regular expression, they can actually be far more complex + * thank the POSIX regex expressions. + */ +typedef struct _xmlRegexp xmlRegexp; +typedef xmlRegexp *xmlRegexpPtr; + +/** + * xmlRegExecCtxtPtr: + * + * A libxml progressive regular expression evaluation context + */ +typedef struct _xmlRegExecCtxt xmlRegExecCtxt; +typedef xmlRegExecCtxt *xmlRegExecCtxtPtr; + +/* + * The POSIX like API + */ +XMLPUBFUN xmlRegexpPtr + xmlRegexpCompile (const xmlChar *regexp); +XMLPUBFUN void xmlRegFreeRegexp(xmlRegexpPtr regexp); +XMLPUBFUN int + xmlRegexpExec (xmlRegexpPtr comp, + const xmlChar *value); +XMLPUBFUN void + xmlRegexpPrint (FILE *output, + xmlRegexpPtr regexp); +XMLPUBFUN int + xmlRegexpIsDeterminist(xmlRegexpPtr comp); + +/** + * xmlRegExecCallbacks: + * @exec: the regular expression context + * @token: the current token string + * @transdata: transition data + * @inputdata: input data + * + * Callback function when doing a transition in the automata + */ +typedef void (*xmlRegExecCallbacks) (xmlRegExecCtxtPtr exec, + const xmlChar *token, + void *transdata, + void *inputdata); + +/* + * The progressive API + */ +XMLPUBFUN xmlRegExecCtxtPtr + xmlRegNewExecCtxt (xmlRegexpPtr comp, + xmlRegExecCallbacks callback, + void *data); +XMLPUBFUN void + xmlRegFreeExecCtxt (xmlRegExecCtxtPtr exec); +XMLPUBFUN int + xmlRegExecPushString(xmlRegExecCtxtPtr exec, + const xmlChar *value, + void *data); +XMLPUBFUN int + xmlRegExecPushString2(xmlRegExecCtxtPtr exec, + const xmlChar *value, + const xmlChar *value2, + void *data); + +XMLPUBFUN int + xmlRegExecNextValues(xmlRegExecCtxtPtr exec, + int *nbval, + int *nbneg, + xmlChar **values, + int *terminal); +XMLPUBFUN int + xmlRegExecErrInfo (xmlRegExecCtxtPtr exec, + const xmlChar **string, + int *nbval, + int *nbneg, + xmlChar **values, + int *terminal); +#ifdef LIBXML_EXPR_ENABLED +/* + * Formal regular expression handling + * Its goal is to do some formal work on content models + */ + +/* expressions are used within a context */ +typedef struct _xmlExpCtxt xmlExpCtxt; +typedef xmlExpCtxt *xmlExpCtxtPtr; + +XMLPUBFUN void + xmlExpFreeCtxt (xmlExpCtxtPtr ctxt); +XMLPUBFUN xmlExpCtxtPtr + xmlExpNewCtxt (int maxNodes, + xmlDictPtr dict); + +XMLPUBFUN int + xmlExpCtxtNbNodes(xmlExpCtxtPtr ctxt); +XMLPUBFUN int + xmlExpCtxtNbCons(xmlExpCtxtPtr ctxt); + +/* Expressions are trees but the tree is opaque */ +typedef struct _xmlExpNode xmlExpNode; +typedef xmlExpNode *xmlExpNodePtr; + +typedef enum { + XML_EXP_EMPTY = 0, + XML_EXP_FORBID = 1, + XML_EXP_ATOM = 2, + XML_EXP_SEQ = 3, + XML_EXP_OR = 4, + XML_EXP_COUNT = 5 +} xmlExpNodeType; + +/* + * 2 core expressions shared by all for the empty language set + * and for the set with just the empty token + */ +XMLPUBVAR xmlExpNodePtr forbiddenExp; +XMLPUBVAR xmlExpNodePtr emptyExp; + +/* + * Expressions are reference counted internally + */ +XMLPUBFUN void + xmlExpFree (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr); +XMLPUBFUN void + xmlExpRef (xmlExpNodePtr expr); + +/* + * constructors can be either manual or from a string + */ +XMLPUBFUN xmlExpNodePtr + xmlExpParse (xmlExpCtxtPtr ctxt, + const char *expr); +XMLPUBFUN xmlExpNodePtr + xmlExpNewAtom (xmlExpCtxtPtr ctxt, + const xmlChar *name, + int len); +XMLPUBFUN xmlExpNodePtr + xmlExpNewOr (xmlExpCtxtPtr ctxt, + xmlExpNodePtr left, + xmlExpNodePtr right); +XMLPUBFUN xmlExpNodePtr + xmlExpNewSeq (xmlExpCtxtPtr ctxt, + xmlExpNodePtr left, + xmlExpNodePtr right); +XMLPUBFUN xmlExpNodePtr + xmlExpNewRange (xmlExpCtxtPtr ctxt, + xmlExpNodePtr subset, + int min, + int max); +/* + * The really interesting APIs + */ +XMLPUBFUN int + xmlExpIsNillable(xmlExpNodePtr expr); +XMLPUBFUN int + xmlExpMaxToken (xmlExpNodePtr expr); +XMLPUBFUN int + xmlExpGetLanguage(xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + const xmlChar**langList, + int len); +XMLPUBFUN int + xmlExpGetStart (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + const xmlChar**tokList, + int len); +XMLPUBFUN xmlExpNodePtr + xmlExpStringDerive(xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + const xmlChar *str, + int len); +XMLPUBFUN xmlExpNodePtr + xmlExpExpDerive (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + xmlExpNodePtr sub); +XMLPUBFUN int + xmlExpSubsume (xmlExpCtxtPtr ctxt, + xmlExpNodePtr expr, + xmlExpNodePtr sub); +XMLPUBFUN void + xmlExpDump (xmlBufferPtr buf, + xmlExpNodePtr expr); +#endif /* LIBXML_EXPR_ENABLED */ +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_REGEXP_ENABLED */ + +#endif /*__XML_REGEXP_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlunicode.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlunicode.h new file mode 100644 index 0000000000000000000000000000000000000000..2e50a49f9b24ace9717843f34b4348b494165120 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlunicode.h @@ -0,0 +1,202 @@ +/* + * Summary: Unicode character APIs + * Description: API for the Unicode character APIs + * + * This file is automatically generated from the + * UCS description files of the Unicode Character Database + * http://www.unicode.org/Public/4.0-Update1/UCD-4.0.1.html + * using the genUnicode.py Python script. + * + * Generation date: Mon Mar 27 11:09:52 2006 + * Sources: Blocks-4.0.1.txt UnicodeData-4.0.1.txt + * Author: Daniel Veillard + */ + +#ifndef __XML_UNICODE_H__ +#define __XML_UNICODE_H__ + +#include + +#ifdef LIBXML_UNICODE_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +XMLPUBFUN int xmlUCSIsAegeanNumbers (int code); +XMLPUBFUN int xmlUCSIsAlphabeticPresentationForms (int code); +XMLPUBFUN int xmlUCSIsArabic (int code); +XMLPUBFUN int xmlUCSIsArabicPresentationFormsA (int code); +XMLPUBFUN int xmlUCSIsArabicPresentationFormsB (int code); +XMLPUBFUN int xmlUCSIsArmenian (int code); +XMLPUBFUN int xmlUCSIsArrows (int code); +XMLPUBFUN int xmlUCSIsBasicLatin (int code); +XMLPUBFUN int xmlUCSIsBengali (int code); +XMLPUBFUN int xmlUCSIsBlockElements (int code); +XMLPUBFUN int xmlUCSIsBopomofo (int code); +XMLPUBFUN int xmlUCSIsBopomofoExtended (int code); +XMLPUBFUN int xmlUCSIsBoxDrawing (int code); +XMLPUBFUN int xmlUCSIsBraillePatterns (int code); +XMLPUBFUN int xmlUCSIsBuhid (int code); +XMLPUBFUN int xmlUCSIsByzantineMusicalSymbols (int code); +XMLPUBFUN int xmlUCSIsCJKCompatibility (int code); +XMLPUBFUN int xmlUCSIsCJKCompatibilityForms (int code); +XMLPUBFUN int xmlUCSIsCJKCompatibilityIdeographs (int code); +XMLPUBFUN int xmlUCSIsCJKCompatibilityIdeographsSupplement (int code); +XMLPUBFUN int xmlUCSIsCJKRadicalsSupplement (int code); +XMLPUBFUN int xmlUCSIsCJKSymbolsandPunctuation (int code); +XMLPUBFUN int xmlUCSIsCJKUnifiedIdeographs (int code); +XMLPUBFUN int xmlUCSIsCJKUnifiedIdeographsExtensionA (int code); +XMLPUBFUN int xmlUCSIsCJKUnifiedIdeographsExtensionB (int code); +XMLPUBFUN int xmlUCSIsCherokee (int code); +XMLPUBFUN int xmlUCSIsCombiningDiacriticalMarks (int code); +XMLPUBFUN int xmlUCSIsCombiningDiacriticalMarksforSymbols (int code); +XMLPUBFUN int xmlUCSIsCombiningHalfMarks (int code); +XMLPUBFUN int xmlUCSIsCombiningMarksforSymbols (int code); +XMLPUBFUN int xmlUCSIsControlPictures (int code); +XMLPUBFUN int xmlUCSIsCurrencySymbols (int code); +XMLPUBFUN int xmlUCSIsCypriotSyllabary (int code); +XMLPUBFUN int xmlUCSIsCyrillic (int code); +XMLPUBFUN int xmlUCSIsCyrillicSupplement (int code); +XMLPUBFUN int xmlUCSIsDeseret (int code); +XMLPUBFUN int xmlUCSIsDevanagari (int code); +XMLPUBFUN int xmlUCSIsDingbats (int code); +XMLPUBFUN int xmlUCSIsEnclosedAlphanumerics (int code); +XMLPUBFUN int xmlUCSIsEnclosedCJKLettersandMonths (int code); +XMLPUBFUN int xmlUCSIsEthiopic (int code); +XMLPUBFUN int xmlUCSIsGeneralPunctuation (int code); +XMLPUBFUN int xmlUCSIsGeometricShapes (int code); +XMLPUBFUN int xmlUCSIsGeorgian (int code); +XMLPUBFUN int xmlUCSIsGothic (int code); +XMLPUBFUN int xmlUCSIsGreek (int code); +XMLPUBFUN int xmlUCSIsGreekExtended (int code); +XMLPUBFUN int xmlUCSIsGreekandCoptic (int code); +XMLPUBFUN int xmlUCSIsGujarati (int code); +XMLPUBFUN int xmlUCSIsGurmukhi (int code); +XMLPUBFUN int xmlUCSIsHalfwidthandFullwidthForms (int code); +XMLPUBFUN int xmlUCSIsHangulCompatibilityJamo (int code); +XMLPUBFUN int xmlUCSIsHangulJamo (int code); +XMLPUBFUN int xmlUCSIsHangulSyllables (int code); +XMLPUBFUN int xmlUCSIsHanunoo (int code); +XMLPUBFUN int xmlUCSIsHebrew (int code); +XMLPUBFUN int xmlUCSIsHighPrivateUseSurrogates (int code); +XMLPUBFUN int xmlUCSIsHighSurrogates (int code); +XMLPUBFUN int xmlUCSIsHiragana (int code); +XMLPUBFUN int xmlUCSIsIPAExtensions (int code); +XMLPUBFUN int xmlUCSIsIdeographicDescriptionCharacters (int code); +XMLPUBFUN int xmlUCSIsKanbun (int code); +XMLPUBFUN int xmlUCSIsKangxiRadicals (int code); +XMLPUBFUN int xmlUCSIsKannada (int code); +XMLPUBFUN int xmlUCSIsKatakana (int code); +XMLPUBFUN int xmlUCSIsKatakanaPhoneticExtensions (int code); +XMLPUBFUN int xmlUCSIsKhmer (int code); +XMLPUBFUN int xmlUCSIsKhmerSymbols (int code); +XMLPUBFUN int xmlUCSIsLao (int code); +XMLPUBFUN int xmlUCSIsLatin1Supplement (int code); +XMLPUBFUN int xmlUCSIsLatinExtendedA (int code); +XMLPUBFUN int xmlUCSIsLatinExtendedB (int code); +XMLPUBFUN int xmlUCSIsLatinExtendedAdditional (int code); +XMLPUBFUN int xmlUCSIsLetterlikeSymbols (int code); +XMLPUBFUN int xmlUCSIsLimbu (int code); +XMLPUBFUN int xmlUCSIsLinearBIdeograms (int code); +XMLPUBFUN int xmlUCSIsLinearBSyllabary (int code); +XMLPUBFUN int xmlUCSIsLowSurrogates (int code); +XMLPUBFUN int xmlUCSIsMalayalam (int code); +XMLPUBFUN int xmlUCSIsMathematicalAlphanumericSymbols (int code); +XMLPUBFUN int xmlUCSIsMathematicalOperators (int code); +XMLPUBFUN int xmlUCSIsMiscellaneousMathematicalSymbolsA (int code); +XMLPUBFUN int xmlUCSIsMiscellaneousMathematicalSymbolsB (int code); +XMLPUBFUN int xmlUCSIsMiscellaneousSymbols (int code); +XMLPUBFUN int xmlUCSIsMiscellaneousSymbolsandArrows (int code); +XMLPUBFUN int xmlUCSIsMiscellaneousTechnical (int code); +XMLPUBFUN int xmlUCSIsMongolian (int code); +XMLPUBFUN int xmlUCSIsMusicalSymbols (int code); +XMLPUBFUN int xmlUCSIsMyanmar (int code); +XMLPUBFUN int xmlUCSIsNumberForms (int code); +XMLPUBFUN int xmlUCSIsOgham (int code); +XMLPUBFUN int xmlUCSIsOldItalic (int code); +XMLPUBFUN int xmlUCSIsOpticalCharacterRecognition (int code); +XMLPUBFUN int xmlUCSIsOriya (int code); +XMLPUBFUN int xmlUCSIsOsmanya (int code); +XMLPUBFUN int xmlUCSIsPhoneticExtensions (int code); +XMLPUBFUN int xmlUCSIsPrivateUse (int code); +XMLPUBFUN int xmlUCSIsPrivateUseArea (int code); +XMLPUBFUN int xmlUCSIsRunic (int code); +XMLPUBFUN int xmlUCSIsShavian (int code); +XMLPUBFUN int xmlUCSIsSinhala (int code); +XMLPUBFUN int xmlUCSIsSmallFormVariants (int code); +XMLPUBFUN int xmlUCSIsSpacingModifierLetters (int code); +XMLPUBFUN int xmlUCSIsSpecials (int code); +XMLPUBFUN int xmlUCSIsSuperscriptsandSubscripts (int code); +XMLPUBFUN int xmlUCSIsSupplementalArrowsA (int code); +XMLPUBFUN int xmlUCSIsSupplementalArrowsB (int code); +XMLPUBFUN int xmlUCSIsSupplementalMathematicalOperators (int code); +XMLPUBFUN int xmlUCSIsSupplementaryPrivateUseAreaA (int code); +XMLPUBFUN int xmlUCSIsSupplementaryPrivateUseAreaB (int code); +XMLPUBFUN int xmlUCSIsSyriac (int code); +XMLPUBFUN int xmlUCSIsTagalog (int code); +XMLPUBFUN int xmlUCSIsTagbanwa (int code); +XMLPUBFUN int xmlUCSIsTags (int code); +XMLPUBFUN int xmlUCSIsTaiLe (int code); +XMLPUBFUN int xmlUCSIsTaiXuanJingSymbols (int code); +XMLPUBFUN int xmlUCSIsTamil (int code); +XMLPUBFUN int xmlUCSIsTelugu (int code); +XMLPUBFUN int xmlUCSIsThaana (int code); +XMLPUBFUN int xmlUCSIsThai (int code); +XMLPUBFUN int xmlUCSIsTibetan (int code); +XMLPUBFUN int xmlUCSIsUgaritic (int code); +XMLPUBFUN int xmlUCSIsUnifiedCanadianAboriginalSyllabics (int code); +XMLPUBFUN int xmlUCSIsVariationSelectors (int code); +XMLPUBFUN int xmlUCSIsVariationSelectorsSupplement (int code); +XMLPUBFUN int xmlUCSIsYiRadicals (int code); +XMLPUBFUN int xmlUCSIsYiSyllables (int code); +XMLPUBFUN int xmlUCSIsYijingHexagramSymbols (int code); + +XMLPUBFUN int xmlUCSIsBlock (int code, const char *block); + +XMLPUBFUN int xmlUCSIsCatC (int code); +XMLPUBFUN int xmlUCSIsCatCc (int code); +XMLPUBFUN int xmlUCSIsCatCf (int code); +XMLPUBFUN int xmlUCSIsCatCo (int code); +XMLPUBFUN int xmlUCSIsCatCs (int code); +XMLPUBFUN int xmlUCSIsCatL (int code); +XMLPUBFUN int xmlUCSIsCatLl (int code); +XMLPUBFUN int xmlUCSIsCatLm (int code); +XMLPUBFUN int xmlUCSIsCatLo (int code); +XMLPUBFUN int xmlUCSIsCatLt (int code); +XMLPUBFUN int xmlUCSIsCatLu (int code); +XMLPUBFUN int xmlUCSIsCatM (int code); +XMLPUBFUN int xmlUCSIsCatMc (int code); +XMLPUBFUN int xmlUCSIsCatMe (int code); +XMLPUBFUN int xmlUCSIsCatMn (int code); +XMLPUBFUN int xmlUCSIsCatN (int code); +XMLPUBFUN int xmlUCSIsCatNd (int code); +XMLPUBFUN int xmlUCSIsCatNl (int code); +XMLPUBFUN int xmlUCSIsCatNo (int code); +XMLPUBFUN int xmlUCSIsCatP (int code); +XMLPUBFUN int xmlUCSIsCatPc (int code); +XMLPUBFUN int xmlUCSIsCatPd (int code); +XMLPUBFUN int xmlUCSIsCatPe (int code); +XMLPUBFUN int xmlUCSIsCatPf (int code); +XMLPUBFUN int xmlUCSIsCatPi (int code); +XMLPUBFUN int xmlUCSIsCatPo (int code); +XMLPUBFUN int xmlUCSIsCatPs (int code); +XMLPUBFUN int xmlUCSIsCatS (int code); +XMLPUBFUN int xmlUCSIsCatSc (int code); +XMLPUBFUN int xmlUCSIsCatSk (int code); +XMLPUBFUN int xmlUCSIsCatSm (int code); +XMLPUBFUN int xmlUCSIsCatSo (int code); +XMLPUBFUN int xmlUCSIsCatZ (int code); +XMLPUBFUN int xmlUCSIsCatZl (int code); +XMLPUBFUN int xmlUCSIsCatZp (int code); +XMLPUBFUN int xmlUCSIsCatZs (int code); + +XMLPUBFUN int xmlUCSIsCat (int code, const char *cat); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_UNICODE_ENABLED */ + +#endif /* __XML_UNICODE_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlversion.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlversion.h new file mode 100644 index 0000000000000000000000000000000000000000..a2f9fe607475fdc9a9e0fbfdefdcaa2f4a11580b --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xmlversion.h @@ -0,0 +1,511 @@ +/* + * Summary: compile-time version information + * Description: compile-time version information for the XML library + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_VERSION_H__ +#define __XML_VERSION_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * use those to be sure nothing nasty will happen if + * your library and includes mismatch + */ +#ifndef LIBXML2_COMPILING_MSCCDEF +XMLPUBFUN void xmlCheckVersion(int version); +#endif /* LIBXML2_COMPILING_MSCCDEF */ + +/** + * LIBXML_DOTTED_VERSION: + * + * the version string like "1.2.3" + */ +#define LIBXML_DOTTED_VERSION "2.12.6" + +/** + * LIBXML_VERSION: + * + * the version number: 1.2.3 value is 10203 + */ +#define LIBXML_VERSION 21206 + +/** + * LIBXML_VERSION_STRING: + * + * the version number string, 1.2.3 value is "10203" + */ +#define LIBXML_VERSION_STRING "21206" + +/** + * LIBXML_VERSION_EXTRA: + * + * extra version information, used to show a git commit description + */ +#define LIBXML_VERSION_EXTRA "" + +/** + * LIBXML_TEST_VERSION: + * + * Macro to check that the libxml version in use is compatible with + * the version the software has been compiled against + */ +#define LIBXML_TEST_VERSION xmlCheckVersion(21206); + +#ifndef VMS +#if 0 +/** + * WITH_TRIO: + * + * defined if the trio support need to be configured in + */ +#define WITH_TRIO +#else +/** + * WITHOUT_TRIO: + * + * defined if the trio support should not be configured in + */ +#define WITHOUT_TRIO +#endif +#else /* VMS */ +/** + * WITH_TRIO: + * + * defined if the trio support need to be configured in + */ +#define WITH_TRIO 1 +#endif /* VMS */ + +/** + * LIBXML_THREAD_ENABLED: + * + * Whether the thread support is configured in + */ +#if 1 +#define LIBXML_THREAD_ENABLED +#endif + +/** + * LIBXML_THREAD_ALLOC_ENABLED: + * + * Whether the allocation hooks are per-thread + */ +#if 0 +#define LIBXML_THREAD_ALLOC_ENABLED +#endif + +/** + * LIBXML_TREE_ENABLED: + * + * Whether the DOM like tree manipulation API support is configured in + */ +#if 1 +#define LIBXML_TREE_ENABLED +#endif + +/** + * LIBXML_OUTPUT_ENABLED: + * + * Whether the serialization/saving support is configured in + */ +#if 1 +#define LIBXML_OUTPUT_ENABLED +#endif + +/** + * LIBXML_PUSH_ENABLED: + * + * Whether the push parsing interfaces are configured in + */ +#if 1 +#define LIBXML_PUSH_ENABLED +#endif + +/** + * LIBXML_READER_ENABLED: + * + * Whether the xmlReader parsing interface is configured in + */ +#if 1 +#define LIBXML_READER_ENABLED +#endif + +/** + * LIBXML_PATTERN_ENABLED: + * + * Whether the xmlPattern node selection interface is configured in + */ +#if 1 +#define LIBXML_PATTERN_ENABLED +#endif + +/** + * LIBXML_WRITER_ENABLED: + * + * Whether the xmlWriter saving interface is configured in + */ +#if 1 +#define LIBXML_WRITER_ENABLED +#endif + +/** + * LIBXML_SAX1_ENABLED: + * + * Whether the older SAX1 interface is configured in + */ +#if 1 +#define LIBXML_SAX1_ENABLED +#endif + +/** + * LIBXML_FTP_ENABLED: + * + * Whether the FTP support is configured in + */ +#if 0 +#define LIBXML_FTP_ENABLED +#endif + +/** + * LIBXML_HTTP_ENABLED: + * + * Whether the HTTP support is configured in + */ +#if 1 +#define LIBXML_HTTP_ENABLED +#endif + +/** + * LIBXML_VALID_ENABLED: + * + * Whether the DTD validation support is configured in + */ +#if 1 +#define LIBXML_VALID_ENABLED +#endif + +/** + * LIBXML_HTML_ENABLED: + * + * Whether the HTML support is configured in + */ +#if 1 +#define LIBXML_HTML_ENABLED +#endif + +/** + * LIBXML_LEGACY_ENABLED: + * + * Whether the deprecated APIs are compiled in for compatibility + */ +#if 0 +#define LIBXML_LEGACY_ENABLED +#endif + +/** + * LIBXML_C14N_ENABLED: + * + * Whether the Canonicalization support is configured in + */ +#if 1 +#define LIBXML_C14N_ENABLED +#endif + +/** + * LIBXML_CATALOG_ENABLED: + * + * Whether the Catalog support is configured in + */ +#if 1 +#define LIBXML_CATALOG_ENABLED +#endif + +/** + * LIBXML_XPATH_ENABLED: + * + * Whether XPath is configured in + */ +#if 1 +#define LIBXML_XPATH_ENABLED +#endif + +/** + * LIBXML_XPTR_ENABLED: + * + * Whether XPointer is configured in + */ +#if 1 +#define LIBXML_XPTR_ENABLED +#endif + +/** + * LIBXML_XPTR_LOCS_ENABLED: + * + * Whether support for XPointer locations is configured in + */ +#if 0 +#define LIBXML_XPTR_LOCS_ENABLED +#endif + +/** + * LIBXML_XINCLUDE_ENABLED: + * + * Whether XInclude is configured in + */ +#if 1 +#define LIBXML_XINCLUDE_ENABLED +#endif + +/** + * LIBXML_ICONV_ENABLED: + * + * Whether iconv support is available + */ +#if 1 +#define LIBXML_ICONV_ENABLED +#endif + +/** + * LIBXML_ICU_ENABLED: + * + * Whether icu support is available + */ +#if 0 +#define LIBXML_ICU_ENABLED +#endif + +/** + * LIBXML_ISO8859X_ENABLED: + * + * Whether ISO-8859-* support is made available in case iconv is not + */ +#if 1 +#define LIBXML_ISO8859X_ENABLED +#endif + +/** + * LIBXML_DEBUG_ENABLED: + * + * Whether Debugging module is configured in + */ +#if 1 +#define LIBXML_DEBUG_ENABLED +#endif + +/** + * DEBUG_MEMORY_LOCATION: + * + * Whether the memory debugging is configured in + */ +#if 0 +#define DEBUG_MEMORY_LOCATION +#endif + +/** + * LIBXML_DEBUG_RUNTIME: + * + * Removed + */ +#if 0 +#define LIBXML_DEBUG_RUNTIME +#endif + +/** + * LIBXML_UNICODE_ENABLED: + * + * Whether the Unicode related interfaces are compiled in + */ +#if 1 +#define LIBXML_UNICODE_ENABLED +#endif + +/** + * LIBXML_REGEXP_ENABLED: + * + * Whether the regular expressions interfaces are compiled in + */ +#if 1 +#define LIBXML_REGEXP_ENABLED +#endif + +/** + * LIBXML_AUTOMATA_ENABLED: + * + * Whether the automata interfaces are compiled in + */ +#if 1 +#define LIBXML_AUTOMATA_ENABLED +#endif + +/** + * LIBXML_EXPR_ENABLED: + * + * Whether the formal expressions interfaces are compiled in + * + * This code is unused and disabled unconditionally for now. + */ +#if 0 +#define LIBXML_EXPR_ENABLED +#endif + +/** + * LIBXML_SCHEMAS_ENABLED: + * + * Whether the Schemas validation interfaces are compiled in + */ +#if 1 +#define LIBXML_SCHEMAS_ENABLED +#endif + +/** + * LIBXML_SCHEMATRON_ENABLED: + * + * Whether the Schematron validation interfaces are compiled in + */ +#if 1 +#define LIBXML_SCHEMATRON_ENABLED +#endif + +/** + * LIBXML_MODULES_ENABLED: + * + * Whether the module interfaces are compiled in + */ +#if 1 +#define LIBXML_MODULES_ENABLED +/** + * LIBXML_MODULE_EXTENSION: + * + * the string suffix used by dynamic modules (usually shared libraries) + */ +#define LIBXML_MODULE_EXTENSION ".so" +#endif + +/** + * LIBXML_ZLIB_ENABLED: + * + * Whether the Zlib support is compiled in + */ +#if 1 +#define LIBXML_ZLIB_ENABLED +#endif + +/** + * LIBXML_LZMA_ENABLED: + * + * Whether the Lzma support is compiled in + */ +#if 0 +#define LIBXML_LZMA_ENABLED +#endif + +#ifdef __GNUC__ +/** DOC_DISABLE */ + +#ifndef ATTRIBUTE_UNUSED +# if ((__GNUC__ > 2) || ((__GNUC__ == 2) && (__GNUC_MINOR__ >= 7))) +# define ATTRIBUTE_UNUSED __attribute__((unused)) +# else +# define ATTRIBUTE_UNUSED +# endif +#endif + +#ifndef LIBXML_ATTR_ALLOC_SIZE +# if (!defined(__clang__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ >= 3)))) +# define LIBXML_ATTR_ALLOC_SIZE(x) __attribute__((alloc_size(x))) +# else +# define LIBXML_ATTR_ALLOC_SIZE(x) +# endif +#else +# define LIBXML_ATTR_ALLOC_SIZE(x) +#endif + +#ifndef LIBXML_ATTR_FORMAT +# if ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3))) +# define LIBXML_ATTR_FORMAT(fmt,args) __attribute__((__format__(__printf__,fmt,args))) +# else +# define LIBXML_ATTR_FORMAT(fmt,args) +# endif +#else +# define LIBXML_ATTR_FORMAT(fmt,args) +#endif + +#ifndef XML_DEPRECATED +# if defined (IN_LIBXML) || (__GNUC__ * 100 + __GNUC_MINOR__ < 301) +# define XML_DEPRECATED +/* Available since at least GCC 3.1 */ +# else +# define XML_DEPRECATED __attribute__((deprecated)) +# endif +#endif + +#if defined(__clang__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 406) + #if defined(__clang__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 800) + #define XML_IGNORE_FPTR_CAST_WARNINGS \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wpedantic\"") \ + _Pragma("GCC diagnostic ignored \"-Wcast-function-type\"") + #else + #define XML_IGNORE_FPTR_CAST_WARNINGS \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wpedantic\"") + #endif + #define XML_POP_WARNINGS \ + _Pragma("GCC diagnostic pop") +#else + #define XML_IGNORE_FPTR_CAST_WARNINGS + #define XML_POP_WARNINGS +#endif + +#else /* ! __GNUC__ */ +#define ATTRIBUTE_UNUSED +#define LIBXML_ATTR_ALLOC_SIZE(x) +#define LIBXML_ATTR_FORMAT(fmt,args) +#ifndef XML_DEPRECATED +# if defined (IN_LIBXML) || !defined (_MSC_VER) +# define XML_DEPRECATED +/* Available since Visual Studio 2005 */ +# elif defined (_MSC_VER) && (_MSC_VER >= 1400) +# define XML_DEPRECATED __declspec(deprecated) +# endif +#endif +#if defined (_MSC_VER) && (_MSC_VER >= 1400) +# define XML_IGNORE_FPTR_CAST_WARNINGS __pragma(warning(push)) +#else +# define XML_IGNORE_FPTR_CAST_WARNINGS +#endif +#ifndef XML_POP_WARNINGS +# if defined (_MSC_VER) && (_MSC_VER >= 1400) +# define XML_POP_WARNINGS __pragma(warning(pop)) +# else +# define XML_POP_WARNINGS +# endif +#endif +#endif /* __GNUC__ */ + +#define XML_NO_ATTR + +#ifdef LIBXML_THREAD_ENABLED + #define XML_DECLARE_GLOBAL(name, type, attrs) \ + attrs XMLPUBFUN type *__##name(void); + #define XML_GLOBAL_MACRO(name) (*__##name()) +#else + #define XML_DECLARE_GLOBAL(name, type, attrs) \ + attrs XMLPUBVAR type name; +#endif + +#ifdef __cplusplus +} +#endif /* __cplusplus */ +#endif + + diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpath.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpath.h new file mode 100644 index 0000000000000000000000000000000000000000..6dae0780d830c4725f05e6323f64f2fc1c33ad1b --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpath.h @@ -0,0 +1,575 @@ +/* + * Summary: XML Path Language implementation + * Description: API for the XML Path Language implementation + * + * XML Path Language implementation + * XPath is a language for addressing parts of an XML document, + * designed to be used by both XSLT and XPointer + * http://www.w3.org/TR/xpath + * + * Implements + * W3C Recommendation 16 November 1999 + * http://www.w3.org/TR/1999/REC-xpath-19991116 + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XPATH_H__ +#define __XML_XPATH_H__ + +#include + +#ifdef LIBXML_XPATH_ENABLED + +#include +#include +#include +#endif /* LIBXML_XPATH_ENABLED */ + +#if defined(LIBXML_XPATH_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +#ifdef __cplusplus +extern "C" { +#endif +#endif /* LIBXML_XPATH_ENABLED or LIBXML_SCHEMAS_ENABLED */ + +#ifdef LIBXML_XPATH_ENABLED + +typedef struct _xmlXPathContext xmlXPathContext; +typedef xmlXPathContext *xmlXPathContextPtr; +typedef struct _xmlXPathParserContext xmlXPathParserContext; +typedef xmlXPathParserContext *xmlXPathParserContextPtr; + +/** + * The set of XPath error codes. + */ + +typedef enum { + XPATH_EXPRESSION_OK = 0, + XPATH_NUMBER_ERROR, + XPATH_UNFINISHED_LITERAL_ERROR, + XPATH_START_LITERAL_ERROR, + XPATH_VARIABLE_REF_ERROR, + XPATH_UNDEF_VARIABLE_ERROR, + XPATH_INVALID_PREDICATE_ERROR, + XPATH_EXPR_ERROR, + XPATH_UNCLOSED_ERROR, + XPATH_UNKNOWN_FUNC_ERROR, + XPATH_INVALID_OPERAND, + XPATH_INVALID_TYPE, + XPATH_INVALID_ARITY, + XPATH_INVALID_CTXT_SIZE, + XPATH_INVALID_CTXT_POSITION, + XPATH_MEMORY_ERROR, + XPTR_SYNTAX_ERROR, + XPTR_RESOURCE_ERROR, + XPTR_SUB_RESOURCE_ERROR, + XPATH_UNDEF_PREFIX_ERROR, + XPATH_ENCODING_ERROR, + XPATH_INVALID_CHAR_ERROR, + XPATH_INVALID_CTXT, + XPATH_STACK_ERROR, + XPATH_FORBID_VARIABLE_ERROR, + XPATH_OP_LIMIT_EXCEEDED, + XPATH_RECURSION_LIMIT_EXCEEDED +} xmlXPathError; + +/* + * A node-set (an unordered collection of nodes without duplicates). + */ +typedef struct _xmlNodeSet xmlNodeSet; +typedef xmlNodeSet *xmlNodeSetPtr; +struct _xmlNodeSet { + int nodeNr; /* number of nodes in the set */ + int nodeMax; /* size of the array as allocated */ + xmlNodePtr *nodeTab; /* array of nodes in no particular order */ + /* @@ with_ns to check whether namespace nodes should be looked at @@ */ +}; + +/* + * An expression is evaluated to yield an object, which + * has one of the following four basic types: + * - node-set + * - boolean + * - number + * - string + * + * @@ XPointer will add more types ! + */ + +typedef enum { + XPATH_UNDEFINED = 0, + XPATH_NODESET = 1, + XPATH_BOOLEAN = 2, + XPATH_NUMBER = 3, + XPATH_STRING = 4, +#ifdef LIBXML_XPTR_LOCS_ENABLED + XPATH_POINT = 5, + XPATH_RANGE = 6, + XPATH_LOCATIONSET = 7, +#endif + XPATH_USERS = 8, + XPATH_XSLT_TREE = 9 /* An XSLT value tree, non modifiable */ +} xmlXPathObjectType; + +#ifndef LIBXML_XPTR_LOCS_ENABLED +/** DOC_DISABLE */ +#define XPATH_POINT 5 +#define XPATH_RANGE 6 +#define XPATH_LOCATIONSET 7 +/** DOC_ENABLE */ +#endif + +typedef struct _xmlXPathObject xmlXPathObject; +typedef xmlXPathObject *xmlXPathObjectPtr; +struct _xmlXPathObject { + xmlXPathObjectType type; + xmlNodeSetPtr nodesetval; + int boolval; + double floatval; + xmlChar *stringval; + void *user; + int index; + void *user2; + int index2; +}; + +/** + * xmlXPathConvertFunc: + * @obj: an XPath object + * @type: the number of the target type + * + * A conversion function is associated to a type and used to cast + * the new type to primitive values. + * + * Returns -1 in case of error, 0 otherwise + */ +typedef int (*xmlXPathConvertFunc) (xmlXPathObjectPtr obj, int type); + +/* + * Extra type: a name and a conversion function. + */ + +typedef struct _xmlXPathType xmlXPathType; +typedef xmlXPathType *xmlXPathTypePtr; +struct _xmlXPathType { + const xmlChar *name; /* the type name */ + xmlXPathConvertFunc func; /* the conversion function */ +}; + +/* + * Extra variable: a name and a value. + */ + +typedef struct _xmlXPathVariable xmlXPathVariable; +typedef xmlXPathVariable *xmlXPathVariablePtr; +struct _xmlXPathVariable { + const xmlChar *name; /* the variable name */ + xmlXPathObjectPtr value; /* the value */ +}; + +/** + * xmlXPathEvalFunc: + * @ctxt: an XPath parser context + * @nargs: the number of arguments passed to the function + * + * An XPath evaluation function, the parameters are on the XPath context stack. + */ + +typedef void (*xmlXPathEvalFunc)(xmlXPathParserContextPtr ctxt, + int nargs); + +/* + * Extra function: a name and a evaluation function. + */ + +typedef struct _xmlXPathFunct xmlXPathFunct; +typedef xmlXPathFunct *xmlXPathFuncPtr; +struct _xmlXPathFunct { + const xmlChar *name; /* the function name */ + xmlXPathEvalFunc func; /* the evaluation function */ +}; + +/** + * xmlXPathAxisFunc: + * @ctxt: the XPath interpreter context + * @cur: the previous node being explored on that axis + * + * An axis traversal function. To traverse an axis, the engine calls + * the first time with cur == NULL and repeat until the function returns + * NULL indicating the end of the axis traversal. + * + * Returns the next node in that axis or NULL if at the end of the axis. + */ + +typedef xmlXPathObjectPtr (*xmlXPathAxisFunc) (xmlXPathParserContextPtr ctxt, + xmlXPathObjectPtr cur); + +/* + * Extra axis: a name and an axis function. + */ + +typedef struct _xmlXPathAxis xmlXPathAxis; +typedef xmlXPathAxis *xmlXPathAxisPtr; +struct _xmlXPathAxis { + const xmlChar *name; /* the axis name */ + xmlXPathAxisFunc func; /* the search function */ +}; + +/** + * xmlXPathFunction: + * @ctxt: the XPath interprestation context + * @nargs: the number of arguments + * + * An XPath function. + * The arguments (if any) are popped out from the context stack + * and the result is pushed on the stack. + */ + +typedef void (*xmlXPathFunction) (xmlXPathParserContextPtr ctxt, int nargs); + +/* + * Function and Variable Lookup. + */ + +/** + * xmlXPathVariableLookupFunc: + * @ctxt: an XPath context + * @name: name of the variable + * @ns_uri: the namespace name hosting this variable + * + * Prototype for callbacks used to plug variable lookup in the XPath + * engine. + * + * Returns the XPath object value or NULL if not found. + */ +typedef xmlXPathObjectPtr (*xmlXPathVariableLookupFunc) (void *ctxt, + const xmlChar *name, + const xmlChar *ns_uri); + +/** + * xmlXPathFuncLookupFunc: + * @ctxt: an XPath context + * @name: name of the function + * @ns_uri: the namespace name hosting this function + * + * Prototype for callbacks used to plug function lookup in the XPath + * engine. + * + * Returns the XPath function or NULL if not found. + */ +typedef xmlXPathFunction (*xmlXPathFuncLookupFunc) (void *ctxt, + const xmlChar *name, + const xmlChar *ns_uri); + +/** + * xmlXPathFlags: + * Flags for XPath engine compilation and runtime + */ +/** + * XML_XPATH_CHECKNS: + * + * check namespaces at compilation + */ +#define XML_XPATH_CHECKNS (1<<0) +/** + * XML_XPATH_NOVAR: + * + * forbid variables in expression + */ +#define XML_XPATH_NOVAR (1<<1) + +/** + * xmlXPathContext: + * + * Expression evaluation occurs with respect to a context. + * he context consists of: + * - a node (the context node) + * - a node list (the context node list) + * - a set of variable bindings + * - a function library + * - the set of namespace declarations in scope for the expression + * Following the switch to hash tables, this need to be trimmed up at + * the next binary incompatible release. + * The node may be modified when the context is passed to libxml2 + * for an XPath evaluation so you may need to initialize it again + * before the next call. + */ + +struct _xmlXPathContext { + xmlDocPtr doc; /* The current document */ + xmlNodePtr node; /* The current node */ + + int nb_variables_unused; /* unused (hash table) */ + int max_variables_unused; /* unused (hash table) */ + xmlHashTablePtr varHash; /* Hash table of defined variables */ + + int nb_types; /* number of defined types */ + int max_types; /* max number of types */ + xmlXPathTypePtr types; /* Array of defined types */ + + int nb_funcs_unused; /* unused (hash table) */ + int max_funcs_unused; /* unused (hash table) */ + xmlHashTablePtr funcHash; /* Hash table of defined funcs */ + + int nb_axis; /* number of defined axis */ + int max_axis; /* max number of axis */ + xmlXPathAxisPtr axis; /* Array of defined axis */ + + /* the namespace nodes of the context node */ + xmlNsPtr *namespaces; /* Array of namespaces */ + int nsNr; /* number of namespace in scope */ + void *user; /* function to free */ + + /* extra variables */ + int contextSize; /* the context size */ + int proximityPosition; /* the proximity position */ + + /* extra stuff for XPointer */ + int xptr; /* is this an XPointer context? */ + xmlNodePtr here; /* for here() */ + xmlNodePtr origin; /* for origin() */ + + /* the set of namespace declarations in scope for the expression */ + xmlHashTablePtr nsHash; /* The namespaces hash table */ + xmlXPathVariableLookupFunc varLookupFunc;/* variable lookup func */ + void *varLookupData; /* variable lookup data */ + + /* Possibility to link in an extra item */ + void *extra; /* needed for XSLT */ + + /* The function name and URI when calling a function */ + const xmlChar *function; + const xmlChar *functionURI; + + /* function lookup function and data */ + xmlXPathFuncLookupFunc funcLookupFunc;/* function lookup func */ + void *funcLookupData; /* function lookup data */ + + /* temporary namespace lists kept for walking the namespace axis */ + xmlNsPtr *tmpNsList; /* Array of namespaces */ + int tmpNsNr; /* number of namespaces in scope */ + + /* error reporting mechanism */ + void *userData; /* user specific data block */ + xmlStructuredErrorFunc error; /* the callback in case of errors */ + xmlError lastError; /* the last error */ + xmlNodePtr debugNode; /* the source node XSLT */ + + /* dictionary */ + xmlDictPtr dict; /* dictionary if any */ + + int flags; /* flags to control compilation */ + + /* Cache for reusal of XPath objects */ + void *cache; + + /* Resource limits */ + unsigned long opLimit; + unsigned long opCount; + int depth; +}; + +/* + * The structure of a compiled expression form is not public. + */ + +typedef struct _xmlXPathCompExpr xmlXPathCompExpr; +typedef xmlXPathCompExpr *xmlXPathCompExprPtr; + +/** + * xmlXPathParserContext: + * + * An XPath parser context. It contains pure parsing information, + * an xmlXPathContext, and the stack of objects. + */ +struct _xmlXPathParserContext { + const xmlChar *cur; /* the current char being parsed */ + const xmlChar *base; /* the full expression */ + + int error; /* error code */ + + xmlXPathContextPtr context; /* the evaluation context */ + xmlXPathObjectPtr value; /* the current value */ + int valueNr; /* number of values stacked */ + int valueMax; /* max number of values stacked */ + xmlXPathObjectPtr *valueTab; /* stack of values */ + + xmlXPathCompExprPtr comp; /* the precompiled expression */ + int xptr; /* it this an XPointer expression */ + xmlNodePtr ancestor; /* used for walking preceding axis */ + + int valueFrame; /* always zero for compatibility */ +}; + +/************************************************************************ + * * + * Public API * + * * + ************************************************************************/ + +/** + * Objects and Nodesets handling + */ + +XMLPUBVAR double xmlXPathNAN; +XMLPUBVAR double xmlXPathPINF; +XMLPUBVAR double xmlXPathNINF; + +/* These macros may later turn into functions */ +/** + * xmlXPathNodeSetGetLength: + * @ns: a node-set + * + * Implement a functionality similar to the DOM NodeList.length. + * + * Returns the number of nodes in the node-set. + */ +#define xmlXPathNodeSetGetLength(ns) ((ns) ? (ns)->nodeNr : 0) +/** + * xmlXPathNodeSetItem: + * @ns: a node-set + * @index: index of a node in the set + * + * Implements a functionality similar to the DOM NodeList.item(). + * + * Returns the xmlNodePtr at the given @index in @ns or NULL if + * @index is out of range (0 to length-1) + */ +#define xmlXPathNodeSetItem(ns, index) \ + ((((ns) != NULL) && \ + ((index) >= 0) && ((index) < (ns)->nodeNr)) ? \ + (ns)->nodeTab[(index)] \ + : NULL) +/** + * xmlXPathNodeSetIsEmpty: + * @ns: a node-set + * + * Checks whether @ns is empty or not. + * + * Returns %TRUE if @ns is an empty node-set. + */ +#define xmlXPathNodeSetIsEmpty(ns) \ + (((ns) == NULL) || ((ns)->nodeNr == 0) || ((ns)->nodeTab == NULL)) + + +XMLPUBFUN void + xmlXPathFreeObject (xmlXPathObjectPtr obj); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeSetCreate (xmlNodePtr val); +XMLPUBFUN void + xmlXPathFreeNodeSetList (xmlXPathObjectPtr obj); +XMLPUBFUN void + xmlXPathFreeNodeSet (xmlNodeSetPtr obj); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathObjectCopy (xmlXPathObjectPtr val); +XMLPUBFUN int + xmlXPathCmpNodes (xmlNodePtr node1, + xmlNodePtr node2); +/** + * Conversion functions to basic types. + */ +XMLPUBFUN int + xmlXPathCastNumberToBoolean (double val); +XMLPUBFUN int + xmlXPathCastStringToBoolean (const xmlChar * val); +XMLPUBFUN int + xmlXPathCastNodeSetToBoolean(xmlNodeSetPtr ns); +XMLPUBFUN int + xmlXPathCastToBoolean (xmlXPathObjectPtr val); + +XMLPUBFUN double + xmlXPathCastBooleanToNumber (int val); +XMLPUBFUN double + xmlXPathCastStringToNumber (const xmlChar * val); +XMLPUBFUN double + xmlXPathCastNodeToNumber (xmlNodePtr node); +XMLPUBFUN double + xmlXPathCastNodeSetToNumber (xmlNodeSetPtr ns); +XMLPUBFUN double + xmlXPathCastToNumber (xmlXPathObjectPtr val); + +XMLPUBFUN xmlChar * + xmlXPathCastBooleanToString (int val); +XMLPUBFUN xmlChar * + xmlXPathCastNumberToString (double val); +XMLPUBFUN xmlChar * + xmlXPathCastNodeToString (xmlNodePtr node); +XMLPUBFUN xmlChar * + xmlXPathCastNodeSetToString (xmlNodeSetPtr ns); +XMLPUBFUN xmlChar * + xmlXPathCastToString (xmlXPathObjectPtr val); + +XMLPUBFUN xmlXPathObjectPtr + xmlXPathConvertBoolean (xmlXPathObjectPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathConvertNumber (xmlXPathObjectPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathConvertString (xmlXPathObjectPtr val); + +/** + * Context handling. + */ +XMLPUBFUN xmlXPathContextPtr + xmlXPathNewContext (xmlDocPtr doc); +XMLPUBFUN void + xmlXPathFreeContext (xmlXPathContextPtr ctxt); +XMLPUBFUN int + xmlXPathContextSetCache(xmlXPathContextPtr ctxt, + int active, + int value, + int options); +/** + * Evaluation functions. + */ +XMLPUBFUN long + xmlXPathOrderDocElems (xmlDocPtr doc); +XMLPUBFUN int + xmlXPathSetContextNode (xmlNodePtr node, + xmlXPathContextPtr ctx); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNodeEval (xmlNodePtr node, + const xmlChar *str, + xmlXPathContextPtr ctx); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathEval (const xmlChar *str, + xmlXPathContextPtr ctx); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathEvalExpression (const xmlChar *str, + xmlXPathContextPtr ctxt); +XMLPUBFUN int + xmlXPathEvalPredicate (xmlXPathContextPtr ctxt, + xmlXPathObjectPtr res); +/** + * Separate compilation/evaluation entry points. + */ +XMLPUBFUN xmlXPathCompExprPtr + xmlXPathCompile (const xmlChar *str); +XMLPUBFUN xmlXPathCompExprPtr + xmlXPathCtxtCompile (xmlXPathContextPtr ctxt, + const xmlChar *str); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathCompiledEval (xmlXPathCompExprPtr comp, + xmlXPathContextPtr ctx); +XMLPUBFUN int + xmlXPathCompiledEvalToBoolean(xmlXPathCompExprPtr comp, + xmlXPathContextPtr ctxt); +XMLPUBFUN void + xmlXPathFreeCompExpr (xmlXPathCompExprPtr comp); +#endif /* LIBXML_XPATH_ENABLED */ +#if defined(LIBXML_XPATH_ENABLED) || defined(LIBXML_SCHEMAS_ENABLED) +XML_DEPRECATED +XMLPUBFUN void + xmlXPathInit (void); +XMLPUBFUN int + xmlXPathIsNaN (double val); +XMLPUBFUN int + xmlXPathIsInf (double val); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPATH_ENABLED or LIBXML_SCHEMAS_ENABLED*/ +#endif /* ! __XML_XPATH_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpathInternals.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpathInternals.h new file mode 100644 index 0000000000000000000000000000000000000000..d1c90dff2aac24ba418503d433e7609661233060 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpathInternals.h @@ -0,0 +1,633 @@ +/* + * Summary: internal interfaces for XML Path Language implementation + * Description: internal interfaces for XML Path Language implementation + * used to build new modules on top of XPath like XPointer and + * XSLT + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XPATH_INTERNALS_H__ +#define __XML_XPATH_INTERNALS_H__ + +#include +#include +#include + +#ifdef LIBXML_XPATH_ENABLED + +#ifdef __cplusplus +extern "C" { +#endif + +/************************************************************************ + * * + * Helpers * + * * + ************************************************************************/ + +/* + * Many of these macros may later turn into functions. They + * shouldn't be used in #ifdef's preprocessor instructions. + */ +/** + * xmlXPathSetError: + * @ctxt: an XPath parser context + * @err: an xmlXPathError code + * + * Raises an error. + */ +#define xmlXPathSetError(ctxt, err) \ + { xmlXPatherror((ctxt), __FILE__, __LINE__, (err)); \ + if ((ctxt) != NULL) (ctxt)->error = (err); } + +/** + * xmlXPathSetArityError: + * @ctxt: an XPath parser context + * + * Raises an XPATH_INVALID_ARITY error. + */ +#define xmlXPathSetArityError(ctxt) \ + xmlXPathSetError((ctxt), XPATH_INVALID_ARITY) + +/** + * xmlXPathSetTypeError: + * @ctxt: an XPath parser context + * + * Raises an XPATH_INVALID_TYPE error. + */ +#define xmlXPathSetTypeError(ctxt) \ + xmlXPathSetError((ctxt), XPATH_INVALID_TYPE) + +/** + * xmlXPathGetError: + * @ctxt: an XPath parser context + * + * Get the error code of an XPath context. + * + * Returns the context error. + */ +#define xmlXPathGetError(ctxt) ((ctxt)->error) + +/** + * xmlXPathCheckError: + * @ctxt: an XPath parser context + * + * Check if an XPath error was raised. + * + * Returns true if an error has been raised, false otherwise. + */ +#define xmlXPathCheckError(ctxt) ((ctxt)->error != XPATH_EXPRESSION_OK) + +/** + * xmlXPathGetDocument: + * @ctxt: an XPath parser context + * + * Get the document of an XPath context. + * + * Returns the context document. + */ +#define xmlXPathGetDocument(ctxt) ((ctxt)->context->doc) + +/** + * xmlXPathGetContextNode: + * @ctxt: an XPath parser context + * + * Get the context node of an XPath context. + * + * Returns the context node. + */ +#define xmlXPathGetContextNode(ctxt) ((ctxt)->context->node) + +XMLPUBFUN int + xmlXPathPopBoolean (xmlXPathParserContextPtr ctxt); +XMLPUBFUN double + xmlXPathPopNumber (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlChar * + xmlXPathPopString (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlNodeSetPtr + xmlXPathPopNodeSet (xmlXPathParserContextPtr ctxt); +XMLPUBFUN void * + xmlXPathPopExternal (xmlXPathParserContextPtr ctxt); + +/** + * xmlXPathReturnBoolean: + * @ctxt: an XPath parser context + * @val: a boolean + * + * Pushes the boolean @val on the context stack. + */ +#define xmlXPathReturnBoolean(ctxt, val) \ + valuePush((ctxt), xmlXPathNewBoolean(val)) + +/** + * xmlXPathReturnTrue: + * @ctxt: an XPath parser context + * + * Pushes true on the context stack. + */ +#define xmlXPathReturnTrue(ctxt) xmlXPathReturnBoolean((ctxt), 1) + +/** + * xmlXPathReturnFalse: + * @ctxt: an XPath parser context + * + * Pushes false on the context stack. + */ +#define xmlXPathReturnFalse(ctxt) xmlXPathReturnBoolean((ctxt), 0) + +/** + * xmlXPathReturnNumber: + * @ctxt: an XPath parser context + * @val: a double + * + * Pushes the double @val on the context stack. + */ +#define xmlXPathReturnNumber(ctxt, val) \ + valuePush((ctxt), xmlXPathNewFloat(val)) + +/** + * xmlXPathReturnString: + * @ctxt: an XPath parser context + * @str: a string + * + * Pushes the string @str on the context stack. + */ +#define xmlXPathReturnString(ctxt, str) \ + valuePush((ctxt), xmlXPathWrapString(str)) + +/** + * xmlXPathReturnEmptyString: + * @ctxt: an XPath parser context + * + * Pushes an empty string on the stack. + */ +#define xmlXPathReturnEmptyString(ctxt) \ + valuePush((ctxt), xmlXPathNewCString("")) + +/** + * xmlXPathReturnNodeSet: + * @ctxt: an XPath parser context + * @ns: a node-set + * + * Pushes the node-set @ns on the context stack. + */ +#define xmlXPathReturnNodeSet(ctxt, ns) \ + valuePush((ctxt), xmlXPathWrapNodeSet(ns)) + +/** + * xmlXPathReturnEmptyNodeSet: + * @ctxt: an XPath parser context + * + * Pushes an empty node-set on the context stack. + */ +#define xmlXPathReturnEmptyNodeSet(ctxt) \ + valuePush((ctxt), xmlXPathNewNodeSet(NULL)) + +/** + * xmlXPathReturnExternal: + * @ctxt: an XPath parser context + * @val: user data + * + * Pushes user data on the context stack. + */ +#define xmlXPathReturnExternal(ctxt, val) \ + valuePush((ctxt), xmlXPathWrapExternal(val)) + +/** + * xmlXPathStackIsNodeSet: + * @ctxt: an XPath parser context + * + * Check if the current value on the XPath stack is a node set or + * an XSLT value tree. + * + * Returns true if the current object on the stack is a node-set. + */ +#define xmlXPathStackIsNodeSet(ctxt) \ + (((ctxt)->value != NULL) \ + && (((ctxt)->value->type == XPATH_NODESET) \ + || ((ctxt)->value->type == XPATH_XSLT_TREE))) + +/** + * xmlXPathStackIsExternal: + * @ctxt: an XPath parser context + * + * Checks if the current value on the XPath stack is an external + * object. + * + * Returns true if the current object on the stack is an external + * object. + */ +#define xmlXPathStackIsExternal(ctxt) \ + ((ctxt->value != NULL) && (ctxt->value->type == XPATH_USERS)) + +/** + * xmlXPathEmptyNodeSet: + * @ns: a node-set + * + * Empties a node-set. + */ +#define xmlXPathEmptyNodeSet(ns) \ + { while ((ns)->nodeNr > 0) (ns)->nodeTab[--(ns)->nodeNr] = NULL; } + +/** + * CHECK_ERROR: + * + * Macro to return from the function if an XPath error was detected. + */ +#define CHECK_ERROR \ + if (ctxt->error != XPATH_EXPRESSION_OK) return + +/** + * CHECK_ERROR0: + * + * Macro to return 0 from the function if an XPath error was detected. + */ +#define CHECK_ERROR0 \ + if (ctxt->error != XPATH_EXPRESSION_OK) return(0) + +/** + * XP_ERROR: + * @X: the error code + * + * Macro to raise an XPath error and return. + */ +#define XP_ERROR(X) \ + { xmlXPathErr(ctxt, X); return; } + +/** + * XP_ERROR0: + * @X: the error code + * + * Macro to raise an XPath error and return 0. + */ +#define XP_ERROR0(X) \ + { xmlXPathErr(ctxt, X); return(0); } + +/** + * CHECK_TYPE: + * @typeval: the XPath type + * + * Macro to check that the value on top of the XPath stack is of a given + * type. + */ +#define CHECK_TYPE(typeval) \ + if ((ctxt->value == NULL) || (ctxt->value->type != typeval)) \ + XP_ERROR(XPATH_INVALID_TYPE) + +/** + * CHECK_TYPE0: + * @typeval: the XPath type + * + * Macro to check that the value on top of the XPath stack is of a given + * type. Return(0) in case of failure + */ +#define CHECK_TYPE0(typeval) \ + if ((ctxt->value == NULL) || (ctxt->value->type != typeval)) \ + XP_ERROR0(XPATH_INVALID_TYPE) + +/** + * CHECK_ARITY: + * @x: the number of expected args + * + * Macro to check that the number of args passed to an XPath function matches. + */ +#define CHECK_ARITY(x) \ + if (ctxt == NULL) return; \ + if (nargs != (x)) \ + XP_ERROR(XPATH_INVALID_ARITY); \ + if (ctxt->valueNr < (x)) \ + XP_ERROR(XPATH_STACK_ERROR); + +/** + * CAST_TO_STRING: + * + * Macro to try to cast the value on the top of the XPath stack to a string. + */ +#define CAST_TO_STRING \ + if ((ctxt->value != NULL) && (ctxt->value->type != XPATH_STRING)) \ + xmlXPathStringFunction(ctxt, 1); + +/** + * CAST_TO_NUMBER: + * + * Macro to try to cast the value on the top of the XPath stack to a number. + */ +#define CAST_TO_NUMBER \ + if ((ctxt->value != NULL) && (ctxt->value->type != XPATH_NUMBER)) \ + xmlXPathNumberFunction(ctxt, 1); + +/** + * CAST_TO_BOOLEAN: + * + * Macro to try to cast the value on the top of the XPath stack to a boolean. + */ +#define CAST_TO_BOOLEAN \ + if ((ctxt->value != NULL) && (ctxt->value->type != XPATH_BOOLEAN)) \ + xmlXPathBooleanFunction(ctxt, 1); + +/* + * Variable Lookup forwarding. + */ + +XMLPUBFUN void + xmlXPathRegisterVariableLookup (xmlXPathContextPtr ctxt, + xmlXPathVariableLookupFunc f, + void *data); + +/* + * Function Lookup forwarding. + */ + +XMLPUBFUN void + xmlXPathRegisterFuncLookup (xmlXPathContextPtr ctxt, + xmlXPathFuncLookupFunc f, + void *funcCtxt); + +/* + * Error reporting. + */ +XMLPUBFUN void + xmlXPatherror (xmlXPathParserContextPtr ctxt, + const char *file, + int line, + int no); + +XMLPUBFUN void + xmlXPathErr (xmlXPathParserContextPtr ctxt, + int error); + +#ifdef LIBXML_DEBUG_ENABLED +XMLPUBFUN void + xmlXPathDebugDumpObject (FILE *output, + xmlXPathObjectPtr cur, + int depth); +XMLPUBFUN void + xmlXPathDebugDumpCompExpr(FILE *output, + xmlXPathCompExprPtr comp, + int depth); +#endif +/** + * NodeSet handling. + */ +XMLPUBFUN int + xmlXPathNodeSetContains (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN xmlNodeSetPtr + xmlXPathDifference (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); +XMLPUBFUN xmlNodeSetPtr + xmlXPathIntersection (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + +XMLPUBFUN xmlNodeSetPtr + xmlXPathDistinctSorted (xmlNodeSetPtr nodes); +XMLPUBFUN xmlNodeSetPtr + xmlXPathDistinct (xmlNodeSetPtr nodes); + +XMLPUBFUN int + xmlXPathHasSameNodes (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeLeadingSorted (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathLeadingSorted (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeLeading (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathLeading (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeTrailingSorted (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathTrailingSorted (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeTrailing (xmlNodeSetPtr nodes, + xmlNodePtr node); +XMLPUBFUN xmlNodeSetPtr + xmlXPathTrailing (xmlNodeSetPtr nodes1, + xmlNodeSetPtr nodes2); + + +/** + * Extending a context. + */ + +XMLPUBFUN int + xmlXPathRegisterNs (xmlXPathContextPtr ctxt, + const xmlChar *prefix, + const xmlChar *ns_uri); +XMLPUBFUN const xmlChar * + xmlXPathNsLookup (xmlXPathContextPtr ctxt, + const xmlChar *prefix); +XMLPUBFUN void + xmlXPathRegisteredNsCleanup (xmlXPathContextPtr ctxt); + +XMLPUBFUN int + xmlXPathRegisterFunc (xmlXPathContextPtr ctxt, + const xmlChar *name, + xmlXPathFunction f); +XMLPUBFUN int + xmlXPathRegisterFuncNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri, + xmlXPathFunction f); +XMLPUBFUN int + xmlXPathRegisterVariable (xmlXPathContextPtr ctxt, + const xmlChar *name, + xmlXPathObjectPtr value); +XMLPUBFUN int + xmlXPathRegisterVariableNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri, + xmlXPathObjectPtr value); +XMLPUBFUN xmlXPathFunction + xmlXPathFunctionLookup (xmlXPathContextPtr ctxt, + const xmlChar *name); +XMLPUBFUN xmlXPathFunction + xmlXPathFunctionLookupNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri); +XMLPUBFUN void + xmlXPathRegisteredFuncsCleanup (xmlXPathContextPtr ctxt); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathVariableLookup (xmlXPathContextPtr ctxt, + const xmlChar *name); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathVariableLookupNS (xmlXPathContextPtr ctxt, + const xmlChar *name, + const xmlChar *ns_uri); +XMLPUBFUN void + xmlXPathRegisteredVariablesCleanup(xmlXPathContextPtr ctxt); + +/** + * Utilities to extend XPath. + */ +XMLPUBFUN xmlXPathParserContextPtr + xmlXPathNewParserContext (const xmlChar *str, + xmlXPathContextPtr ctxt); +XMLPUBFUN void + xmlXPathFreeParserContext (xmlXPathParserContextPtr ctxt); + +/* TODO: remap to xmlXPathValuePop and Push. */ +XMLPUBFUN xmlXPathObjectPtr + valuePop (xmlXPathParserContextPtr ctxt); +XMLPUBFUN int + valuePush (xmlXPathParserContextPtr ctxt, + xmlXPathObjectPtr value); + +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewString (const xmlChar *val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewCString (const char *val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapString (xmlChar *val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapCString (char * val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewFloat (double val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewBoolean (int val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewNodeSet (xmlNodePtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewValueTree (xmlNodePtr val); +XMLPUBFUN int + xmlXPathNodeSetAdd (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN int + xmlXPathNodeSetAddUnique (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN int + xmlXPathNodeSetAddNs (xmlNodeSetPtr cur, + xmlNodePtr node, + xmlNsPtr ns); +XMLPUBFUN void + xmlXPathNodeSetSort (xmlNodeSetPtr set); + +XMLPUBFUN void + xmlXPathRoot (xmlXPathParserContextPtr ctxt); +XMLPUBFUN void + xmlXPathEvalExpr (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlChar * + xmlXPathParseName (xmlXPathParserContextPtr ctxt); +XMLPUBFUN xmlChar * + xmlXPathParseNCName (xmlXPathParserContextPtr ctxt); + +/* + * Existing functions. + */ +XMLPUBFUN double + xmlXPathStringEvalNumber (const xmlChar *str); +XMLPUBFUN int + xmlXPathEvaluatePredicateResult (xmlXPathParserContextPtr ctxt, + xmlXPathObjectPtr res); +XMLPUBFUN void + xmlXPathRegisterAllFunctions (xmlXPathContextPtr ctxt); +XMLPUBFUN xmlNodeSetPtr + xmlXPathNodeSetMerge (xmlNodeSetPtr val1, + xmlNodeSetPtr val2); +XMLPUBFUN void + xmlXPathNodeSetDel (xmlNodeSetPtr cur, + xmlNodePtr val); +XMLPUBFUN void + xmlXPathNodeSetRemove (xmlNodeSetPtr cur, + int val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathNewNodeSetList (xmlNodeSetPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapNodeSet (xmlNodeSetPtr val); +XMLPUBFUN xmlXPathObjectPtr + xmlXPathWrapExternal (void *val); + +XMLPUBFUN int xmlXPathEqualValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN int xmlXPathNotEqualValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN int xmlXPathCompareValues(xmlXPathParserContextPtr ctxt, int inf, int strict); +XMLPUBFUN void xmlXPathValueFlipSign(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathAddValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathSubValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathMultValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathDivValues(xmlXPathParserContextPtr ctxt); +XMLPUBFUN void xmlXPathModValues(xmlXPathParserContextPtr ctxt); + +XMLPUBFUN int xmlXPathIsNodeType(const xmlChar *name); + +/* + * Some of the axis navigation routines. + */ +XMLPUBFUN xmlNodePtr xmlXPathNextSelf(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextChild(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextDescendant(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextDescendantOrSelf(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextParent(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextAncestorOrSelf(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextFollowingSibling(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextFollowing(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextNamespace(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextAttribute(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextPreceding(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextAncestor(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +XMLPUBFUN xmlNodePtr xmlXPathNextPrecedingSibling(xmlXPathParserContextPtr ctxt, + xmlNodePtr cur); +/* + * The official core of XPath functions. + */ +XMLPUBFUN void xmlXPathLastFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathPositionFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathCountFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathIdFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathLocalNameFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNamespaceURIFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathStringFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathStringLengthFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathConcatFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathContainsFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathStartsWithFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSubstringFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSubstringBeforeFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSubstringAfterFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNormalizeFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathTranslateFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNotFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathTrueFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathFalseFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathLangFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathNumberFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathSumFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathFloorFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathCeilingFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathRoundFunction(xmlXPathParserContextPtr ctxt, int nargs); +XMLPUBFUN void xmlXPathBooleanFunction(xmlXPathParserContextPtr ctxt, int nargs); + +/** + * Really internal functions + */ +XMLPUBFUN void xmlXPathNodeSetFreeNs(xmlNsPtr ns); + +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPATH_ENABLED */ +#endif /* ! __XML_XPATH_INTERNALS_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpointer.h b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpointer.h new file mode 100644 index 0000000000000000000000000000000000000000..a5260008fc09875d409c59d8c6099f97110ebd6f --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/lxml/includes/libxml/xpointer.h @@ -0,0 +1,138 @@ +/* + * Summary: API to handle XML Pointers + * Description: API to handle XML Pointers + * Base implementation was made accordingly to + * W3C Candidate Recommendation 7 June 2000 + * http://www.w3.org/TR/2000/CR-xptr-20000607 + * + * Added support for the element() scheme described in: + * W3C Proposed Recommendation 13 November 2002 + * http://www.w3.org/TR/2002/PR-xptr-element-20021113/ + * + * Copy: See Copyright for the status of this software. + * + * Author: Daniel Veillard + */ + +#ifndef __XML_XPTR_H__ +#define __XML_XPTR_H__ + +#include + +#ifdef LIBXML_XPTR_ENABLED + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(LIBXML_XPTR_LOCS_ENABLED) +/* + * A Location Set + */ +typedef struct _xmlLocationSet xmlLocationSet; +typedef xmlLocationSet *xmlLocationSetPtr; +struct _xmlLocationSet { + int locNr; /* number of locations in the set */ + int locMax; /* size of the array as allocated */ + xmlXPathObjectPtr *locTab;/* array of locations */ +}; + +/* + * Handling of location sets. + */ + +XML_DEPRECATED +XMLPUBFUN xmlLocationSetPtr + xmlXPtrLocationSetCreate (xmlXPathObjectPtr val); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrFreeLocationSet (xmlLocationSetPtr obj); +XML_DEPRECATED +XMLPUBFUN xmlLocationSetPtr + xmlXPtrLocationSetMerge (xmlLocationSetPtr val1, + xmlLocationSetPtr val2); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRange (xmlNodePtr start, + int startindex, + xmlNodePtr end, + int endindex); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangePoints (xmlXPathObjectPtr start, + xmlXPathObjectPtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangeNodePoint (xmlNodePtr start, + xmlXPathObjectPtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangePointNode (xmlXPathObjectPtr start, + xmlNodePtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangeNodes (xmlNodePtr start, + xmlNodePtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewLocationSetNodes (xmlNodePtr start, + xmlNodePtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewLocationSetNodeSet(xmlNodeSetPtr set); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewRangeNodeObject (xmlNodePtr start, + xmlXPathObjectPtr end); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrNewCollapsedRange (xmlNodePtr start); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrLocationSetAdd (xmlLocationSetPtr cur, + xmlXPathObjectPtr val); +XML_DEPRECATED +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrWrapLocationSet (xmlLocationSetPtr val); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrLocationSetDel (xmlLocationSetPtr cur, + xmlXPathObjectPtr val); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrLocationSetRemove (xmlLocationSetPtr cur, + int val); +#endif /* defined(LIBXML_XPTR_LOCS_ENABLED) */ + +/* + * Functions. + */ +XMLPUBFUN xmlXPathContextPtr + xmlXPtrNewContext (xmlDocPtr doc, + xmlNodePtr here, + xmlNodePtr origin); +XMLPUBFUN xmlXPathObjectPtr + xmlXPtrEval (const xmlChar *str, + xmlXPathContextPtr ctx); + +#if defined(LIBXML_XPTR_LOCS_ENABLED) +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrRangeToFunction (xmlXPathParserContextPtr ctxt, + int nargs); +XML_DEPRECATED +XMLPUBFUN xmlNodePtr + xmlXPtrBuildNodeList (xmlXPathObjectPtr obj); +XML_DEPRECATED +XMLPUBFUN void + xmlXPtrEvalRangePredicate (xmlXPathParserContextPtr ctxt); +#endif /* defined(LIBXML_XPTR_LOCS_ENABLED) */ +#ifdef __cplusplus +} +#endif + +#endif /* LIBXML_XPTR_ENABLED */ +#endif /* __XML_XPTR_H__ */ diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/INSTALLER b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/LICENSE b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..68b7d66c97d66c58de883ed0c451af2b3183e6f3 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/LICENSE @@ -0,0 +1,203 @@ +Copyright 2018- The Hugging Face team. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/METADATA b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..eba5ca4d48278b929238257b6ad1adb625e4d017 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/METADATA @@ -0,0 +1,1045 @@ +Metadata-Version: 2.1 +Name: transformers +Version: 4.40.1 +Summary: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow +Home-page: https://github.com/huggingface/transformers +Author: The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/transformers/graphs/contributors) +Author-email: transformers@huggingface.co +License: Apache 2.0 License +Keywords: NLP vision speech deep learning transformer pytorch tensorflow jax BERT GPT-2 Wav2Vec2 ViT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.8.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock +Requires-Dist: huggingface-hub (<1.0,>=0.19.3) +Requires-Dist: numpy (>=1.17) +Requires-Dist: packaging (>=20.0) +Requires-Dist: pyyaml (>=5.1) +Requires-Dist: regex (!=2019.12.17) +Requires-Dist: requests +Requires-Dist: tokenizers (<0.20,>=0.19) +Requires-Dist: safetensors (>=0.4.1) +Requires-Dist: tqdm (>=4.27) +Provides-Extra: accelerate +Requires-Dist: accelerate (>=0.21.0) ; extra == 'accelerate' +Provides-Extra: agents +Requires-Dist: diffusers ; extra == 'agents' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'agents' +Requires-Dist: datasets (!=2.5.0) ; extra == 'agents' +Requires-Dist: torch ; extra == 'agents' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'agents' +Requires-Dist: opencv-python ; extra == 'agents' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'agents' +Provides-Extra: all +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'all' +Requires-Dist: onnxconverter-common ; extra == 'all' +Requires-Dist: tf2onnx ; extra == 'all' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'all' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'all' +Requires-Dist: torch ; extra == 'all' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'all' +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'all' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'all' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'all' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'all' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'all' +Requires-Dist: protobuf ; extra == 'all' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'all' +Requires-Dist: torchaudio ; extra == 'all' +Requires-Dist: librosa ; extra == 'all' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'all' +Requires-Dist: phonemizer ; extra == 'all' +Requires-Dist: kenlm ; extra == 'all' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'all' +Requires-Dist: optuna ; extra == 'all' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'all' +Requires-Dist: sigopt ; extra == 'all' +Requires-Dist: timm ; extra == 'all' +Requires-Dist: torchvision ; extra == 'all' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'all' +Requires-Dist: decord (==0.6.0) ; extra == 'all' +Requires-Dist: av (==9.2.0) ; extra == 'all' +Provides-Extra: audio +Requires-Dist: librosa ; extra == 'audio' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'audio' +Requires-Dist: phonemizer ; extra == 'audio' +Requires-Dist: kenlm ; extra == 'audio' +Provides-Extra: codecarbon +Requires-Dist: codecarbon (==1.2.0) ; extra == 'codecarbon' +Provides-Extra: deepspeed +Requires-Dist: deepspeed (>=0.9.3) ; extra == 'deepspeed' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'deepspeed' +Provides-Extra: deepspeed-testing +Requires-Dist: deepspeed (>=0.9.3) ; extra == 'deepspeed-testing' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'deepspeed-testing' +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'deepspeed-testing' +Requires-Dist: pytest-xdist ; extra == 'deepspeed-testing' +Requires-Dist: timeout-decorator ; extra == 'deepspeed-testing' +Requires-Dist: parameterized ; extra == 'deepspeed-testing' +Requires-Dist: psutil ; extra == 'deepspeed-testing' +Requires-Dist: datasets (!=2.5.0) ; extra == 'deepspeed-testing' +Requires-Dist: dill (<0.3.5) ; extra == 'deepspeed-testing' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'deepspeed-testing' +Requires-Dist: pytest-timeout ; extra == 'deepspeed-testing' +Requires-Dist: ruff (==0.1.5) ; extra == 'deepspeed-testing' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'deepspeed-testing' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'deepspeed-testing' +Requires-Dist: nltk ; extra == 'deepspeed-testing' +Requires-Dist: GitPython (<3.1.19) ; extra == 'deepspeed-testing' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'deepspeed-testing' +Requires-Dist: protobuf ; extra == 'deepspeed-testing' +Requires-Dist: sacremoses ; extra == 'deepspeed-testing' +Requires-Dist: rjieba ; extra == 'deepspeed-testing' +Requires-Dist: beautifulsoup4 ; extra == 'deepspeed-testing' +Requires-Dist: tensorboard ; extra == 'deepspeed-testing' +Requires-Dist: pydantic ; extra == 'deepspeed-testing' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'deepspeed-testing' +Requires-Dist: faiss-cpu ; extra == 'deepspeed-testing' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'deepspeed-testing' +Requires-Dist: optuna ; extra == 'deepspeed-testing' +Provides-Extra: dev +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'dev' +Requires-Dist: onnxconverter-common ; extra == 'dev' +Requires-Dist: tf2onnx ; extra == 'dev' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'dev' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'dev' +Requires-Dist: torch ; extra == 'dev' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'dev' +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'dev' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'dev' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'dev' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'dev' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'dev' +Requires-Dist: protobuf ; extra == 'dev' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'dev' +Requires-Dist: torchaudio ; extra == 'dev' +Requires-Dist: librosa ; extra == 'dev' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'dev' +Requires-Dist: phonemizer ; extra == 'dev' +Requires-Dist: kenlm ; extra == 'dev' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'dev' +Requires-Dist: optuna ; extra == 'dev' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'dev' +Requires-Dist: sigopt ; extra == 'dev' +Requires-Dist: timm ; extra == 'dev' +Requires-Dist: torchvision ; extra == 'dev' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'dev' +Requires-Dist: decord (==0.6.0) ; extra == 'dev' +Requires-Dist: av (==9.2.0) ; extra == 'dev' +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'dev' +Requires-Dist: pytest-xdist ; extra == 'dev' +Requires-Dist: timeout-decorator ; extra == 'dev' +Requires-Dist: parameterized ; extra == 'dev' +Requires-Dist: psutil ; extra == 'dev' +Requires-Dist: datasets (!=2.5.0) ; extra == 'dev' +Requires-Dist: dill (<0.3.5) ; extra == 'dev' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'dev' +Requires-Dist: pytest-timeout ; extra == 'dev' +Requires-Dist: ruff (==0.1.5) ; extra == 'dev' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'dev' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'dev' +Requires-Dist: nltk ; extra == 'dev' +Requires-Dist: GitPython (<3.1.19) ; extra == 'dev' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev' +Requires-Dist: sacremoses ; extra == 'dev' +Requires-Dist: rjieba ; extra == 'dev' +Requires-Dist: beautifulsoup4 ; extra == 'dev' +Requires-Dist: tensorboard ; extra == 'dev' +Requires-Dist: pydantic ; extra == 'dev' +Requires-Dist: faiss-cpu ; extra == 'dev' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'dev' +Requires-Dist: isort (>=5.5.4) ; extra == 'dev' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev' +Requires-Dist: fugashi (>=1.0) ; extra == 'dev' +Requires-Dist: ipadic (<2.0,>=1.0.0) ; extra == 'dev' +Requires-Dist: unidic-lite (>=1.0.7) ; extra == 'dev' +Requires-Dist: unidic (>=1.0.2) ; extra == 'dev' +Requires-Dist: sudachipy (>=0.6.6) ; extra == 'dev' +Requires-Dist: sudachidict-core (>=20220729) ; extra == 'dev' +Requires-Dist: rhoknp (<1.3.1,>=1.1.0) ; extra == 'dev' +Requires-Dist: hf-doc-builder ; extra == 'dev' +Requires-Dist: scikit-learn ; extra == 'dev' +Provides-Extra: dev-tensorflow +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'dev-tensorflow' +Requires-Dist: pytest-xdist ; extra == 'dev-tensorflow' +Requires-Dist: timeout-decorator ; extra == 'dev-tensorflow' +Requires-Dist: parameterized ; extra == 'dev-tensorflow' +Requires-Dist: psutil ; extra == 'dev-tensorflow' +Requires-Dist: datasets (!=2.5.0) ; extra == 'dev-tensorflow' +Requires-Dist: dill (<0.3.5) ; extra == 'dev-tensorflow' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'dev-tensorflow' +Requires-Dist: pytest-timeout ; extra == 'dev-tensorflow' +Requires-Dist: ruff (==0.1.5) ; extra == 'dev-tensorflow' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'dev-tensorflow' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'dev-tensorflow' +Requires-Dist: nltk ; extra == 'dev-tensorflow' +Requires-Dist: GitPython (<3.1.19) ; extra == 'dev-tensorflow' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev-tensorflow' +Requires-Dist: protobuf ; extra == 'dev-tensorflow' +Requires-Dist: sacremoses ; extra == 'dev-tensorflow' +Requires-Dist: rjieba ; extra == 'dev-tensorflow' +Requires-Dist: beautifulsoup4 ; extra == 'dev-tensorflow' +Requires-Dist: tensorboard ; extra == 'dev-tensorflow' +Requires-Dist: pydantic ; extra == 'dev-tensorflow' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'dev-tensorflow' +Requires-Dist: faiss-cpu ; extra == 'dev-tensorflow' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'dev-tensorflow' +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'dev-tensorflow' +Requires-Dist: onnxconverter-common ; extra == 'dev-tensorflow' +Requires-Dist: tf2onnx ; extra == 'dev-tensorflow' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'dev-tensorflow' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'dev-tensorflow' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'dev-tensorflow' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'dev-tensorflow' +Requires-Dist: isort (>=5.5.4) ; extra == 'dev-tensorflow' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev-tensorflow' +Requires-Dist: hf-doc-builder ; extra == 'dev-tensorflow' +Requires-Dist: scikit-learn ; extra == 'dev-tensorflow' +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'dev-tensorflow' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'dev-tensorflow' +Requires-Dist: librosa ; extra == 'dev-tensorflow' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'dev-tensorflow' +Requires-Dist: phonemizer ; extra == 'dev-tensorflow' +Requires-Dist: kenlm ; extra == 'dev-tensorflow' +Provides-Extra: dev-torch +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'dev-torch' +Requires-Dist: pytest-xdist ; extra == 'dev-torch' +Requires-Dist: timeout-decorator ; extra == 'dev-torch' +Requires-Dist: parameterized ; extra == 'dev-torch' +Requires-Dist: psutil ; extra == 'dev-torch' +Requires-Dist: datasets (!=2.5.0) ; extra == 'dev-torch' +Requires-Dist: dill (<0.3.5) ; extra == 'dev-torch' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'dev-torch' +Requires-Dist: pytest-timeout ; extra == 'dev-torch' +Requires-Dist: ruff (==0.1.5) ; extra == 'dev-torch' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'dev-torch' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'dev-torch' +Requires-Dist: nltk ; extra == 'dev-torch' +Requires-Dist: GitPython (<3.1.19) ; extra == 'dev-torch' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev-torch' +Requires-Dist: protobuf ; extra == 'dev-torch' +Requires-Dist: sacremoses ; extra == 'dev-torch' +Requires-Dist: rjieba ; extra == 'dev-torch' +Requires-Dist: beautifulsoup4 ; extra == 'dev-torch' +Requires-Dist: tensorboard ; extra == 'dev-torch' +Requires-Dist: pydantic ; extra == 'dev-torch' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'dev-torch' +Requires-Dist: faiss-cpu ; extra == 'dev-torch' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'dev-torch' +Requires-Dist: torch ; extra == 'dev-torch' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'dev-torch' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'dev-torch' +Requires-Dist: torchaudio ; extra == 'dev-torch' +Requires-Dist: librosa ; extra == 'dev-torch' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'dev-torch' +Requires-Dist: phonemizer ; extra == 'dev-torch' +Requires-Dist: kenlm ; extra == 'dev-torch' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'dev-torch' +Requires-Dist: optuna ; extra == 'dev-torch' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'dev-torch' +Requires-Dist: sigopt ; extra == 'dev-torch' +Requires-Dist: timm ; extra == 'dev-torch' +Requires-Dist: torchvision ; extra == 'dev-torch' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'dev-torch' +Requires-Dist: isort (>=5.5.4) ; extra == 'dev-torch' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev-torch' +Requires-Dist: fugashi (>=1.0) ; extra == 'dev-torch' +Requires-Dist: ipadic (<2.0,>=1.0.0) ; extra == 'dev-torch' +Requires-Dist: unidic-lite (>=1.0.7) ; extra == 'dev-torch' +Requires-Dist: unidic (>=1.0.2) ; extra == 'dev-torch' +Requires-Dist: sudachipy (>=0.6.6) ; extra == 'dev-torch' +Requires-Dist: sudachidict-core (>=20220729) ; extra == 'dev-torch' +Requires-Dist: rhoknp (<1.3.1,>=1.1.0) ; extra == 'dev-torch' +Requires-Dist: hf-doc-builder ; extra == 'dev-torch' +Requires-Dist: scikit-learn ; extra == 'dev-torch' +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'dev-torch' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'dev-torch' +Provides-Extra: docs +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'docs' +Requires-Dist: onnxconverter-common ; extra == 'docs' +Requires-Dist: tf2onnx ; extra == 'docs' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'docs' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'docs' +Requires-Dist: torch ; extra == 'docs' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'docs' +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'docs' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'docs' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'docs' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'docs' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'docs' +Requires-Dist: protobuf ; extra == 'docs' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'docs' +Requires-Dist: torchaudio ; extra == 'docs' +Requires-Dist: librosa ; extra == 'docs' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'docs' +Requires-Dist: phonemizer ; extra == 'docs' +Requires-Dist: kenlm ; extra == 'docs' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'docs' +Requires-Dist: optuna ; extra == 'docs' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'docs' +Requires-Dist: sigopt ; extra == 'docs' +Requires-Dist: timm ; extra == 'docs' +Requires-Dist: torchvision ; extra == 'docs' +Requires-Dist: codecarbon (==1.2.0) ; extra == 'docs' +Requires-Dist: decord (==0.6.0) ; extra == 'docs' +Requires-Dist: av (==9.2.0) ; extra == 'docs' +Requires-Dist: hf-doc-builder ; extra == 'docs' +Provides-Extra: docs_specific +Requires-Dist: hf-doc-builder ; extra == 'docs_specific' +Provides-Extra: flax +Requires-Dist: jax (<=0.4.13,>=0.4.1) ; extra == 'flax' +Requires-Dist: jaxlib (<=0.4.13,>=0.4.1) ; extra == 'flax' +Requires-Dist: flax (<=0.7.0,>=0.4.1) ; extra == 'flax' +Requires-Dist: optax (<=0.1.4,>=0.0.8) ; extra == 'flax' +Provides-Extra: flax-speech +Requires-Dist: librosa ; extra == 'flax-speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'flax-speech' +Requires-Dist: phonemizer ; extra == 'flax-speech' +Requires-Dist: kenlm ; extra == 'flax-speech' +Provides-Extra: ftfy +Requires-Dist: ftfy ; extra == 'ftfy' +Provides-Extra: integrations +Requires-Dist: optuna ; extra == 'integrations' +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'integrations' +Requires-Dist: sigopt ; extra == 'integrations' +Provides-Extra: ja +Requires-Dist: fugashi (>=1.0) ; extra == 'ja' +Requires-Dist: ipadic (<2.0,>=1.0.0) ; extra == 'ja' +Requires-Dist: unidic-lite (>=1.0.7) ; extra == 'ja' +Requires-Dist: unidic (>=1.0.2) ; extra == 'ja' +Requires-Dist: sudachipy (>=0.6.6) ; extra == 'ja' +Requires-Dist: sudachidict-core (>=20220729) ; extra == 'ja' +Requires-Dist: rhoknp (<1.3.1,>=1.1.0) ; extra == 'ja' +Provides-Extra: modelcreation +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'modelcreation' +Provides-Extra: natten +Requires-Dist: natten (<0.15.0,>=0.14.6) ; extra == 'natten' +Provides-Extra: onnx +Requires-Dist: onnxconverter-common ; extra == 'onnx' +Requires-Dist: tf2onnx ; extra == 'onnx' +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'onnx' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'onnx' +Provides-Extra: onnxruntime +Requires-Dist: onnxruntime (>=1.4.0) ; extra == 'onnxruntime' +Requires-Dist: onnxruntime-tools (>=1.4.2) ; extra == 'onnxruntime' +Provides-Extra: optuna +Requires-Dist: optuna ; extra == 'optuna' +Provides-Extra: quality +Requires-Dist: datasets (!=2.5.0) ; extra == 'quality' +Requires-Dist: isort (>=5.5.4) ; extra == 'quality' +Requires-Dist: ruff (==0.1.5) ; extra == 'quality' +Requires-Dist: GitPython (<3.1.19) ; extra == 'quality' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'quality' +Requires-Dist: urllib3 (<2.0.0) ; extra == 'quality' +Provides-Extra: ray +Requires-Dist: ray[tune] (>=2.7.0) ; extra == 'ray' +Provides-Extra: retrieval +Requires-Dist: faiss-cpu ; extra == 'retrieval' +Requires-Dist: datasets (!=2.5.0) ; extra == 'retrieval' +Provides-Extra: sagemaker +Requires-Dist: sagemaker (>=2.31.0) ; extra == 'sagemaker' +Provides-Extra: sentencepiece +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'sentencepiece' +Requires-Dist: protobuf ; extra == 'sentencepiece' +Provides-Extra: serving +Requires-Dist: pydantic ; extra == 'serving' +Requires-Dist: uvicorn ; extra == 'serving' +Requires-Dist: fastapi ; extra == 'serving' +Requires-Dist: starlette ; extra == 'serving' +Provides-Extra: sigopt +Requires-Dist: sigopt ; extra == 'sigopt' +Provides-Extra: sklearn +Requires-Dist: scikit-learn ; extra == 'sklearn' +Provides-Extra: speech +Requires-Dist: torchaudio ; extra == 'speech' +Requires-Dist: librosa ; extra == 'speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'speech' +Requires-Dist: phonemizer ; extra == 'speech' +Requires-Dist: kenlm ; extra == 'speech' +Provides-Extra: testing +Requires-Dist: pytest (<8.0.0,>=7.2.0) ; extra == 'testing' +Requires-Dist: pytest-xdist ; extra == 'testing' +Requires-Dist: timeout-decorator ; extra == 'testing' +Requires-Dist: parameterized ; extra == 'testing' +Requires-Dist: psutil ; extra == 'testing' +Requires-Dist: datasets (!=2.5.0) ; extra == 'testing' +Requires-Dist: dill (<0.3.5) ; extra == 'testing' +Requires-Dist: evaluate (>=0.2.0) ; extra == 'testing' +Requires-Dist: pytest-timeout ; extra == 'testing' +Requires-Dist: ruff (==0.1.5) ; extra == 'testing' +Requires-Dist: sacrebleu (<2.0.0,>=1.4.12) ; extra == 'testing' +Requires-Dist: rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1) ; extra == 'testing' +Requires-Dist: nltk ; extra == 'testing' +Requires-Dist: GitPython (<3.1.19) ; extra == 'testing' +Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'testing' +Requires-Dist: protobuf ; extra == 'testing' +Requires-Dist: sacremoses ; extra == 'testing' +Requires-Dist: rjieba ; extra == 'testing' +Requires-Dist: beautifulsoup4 ; extra == 'testing' +Requires-Dist: tensorboard ; extra == 'testing' +Requires-Dist: pydantic ; extra == 'testing' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'testing' +Requires-Dist: faiss-cpu ; extra == 'testing' +Requires-Dist: cookiecutter (==1.7.3) ; extra == 'testing' +Provides-Extra: tf +Requires-Dist: tensorflow (<2.16,>=2.6) ; extra == 'tf' +Requires-Dist: onnxconverter-common ; extra == 'tf' +Requires-Dist: tf2onnx ; extra == 'tf' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'tf' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'tf' +Provides-Extra: tf-cpu +Requires-Dist: tensorflow-cpu (<2.16,>=2.6) ; extra == 'tf-cpu' +Requires-Dist: onnxconverter-common ; extra == 'tf-cpu' +Requires-Dist: tf2onnx ; extra == 'tf-cpu' +Requires-Dist: tensorflow-text (<2.16) ; extra == 'tf-cpu' +Requires-Dist: keras-nlp (>=0.3.1) ; extra == 'tf-cpu' +Provides-Extra: tf-speech +Requires-Dist: librosa ; extra == 'tf-speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'tf-speech' +Requires-Dist: phonemizer ; extra == 'tf-speech' +Requires-Dist: kenlm ; extra == 'tf-speech' +Provides-Extra: timm +Requires-Dist: timm ; extra == 'timm' +Provides-Extra: tokenizers +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'tokenizers' +Provides-Extra: torch +Requires-Dist: torch ; extra == 'torch' +Requires-Dist: accelerate (>=0.21.0) ; extra == 'torch' +Provides-Extra: torch-speech +Requires-Dist: torchaudio ; extra == 'torch-speech' +Requires-Dist: librosa ; extra == 'torch-speech' +Requires-Dist: pyctcdecode (>=0.4.0) ; extra == 'torch-speech' +Requires-Dist: phonemizer ; extra == 'torch-speech' +Requires-Dist: kenlm ; extra == 'torch-speech' +Provides-Extra: torch-vision +Requires-Dist: torchvision ; extra == 'torch-vision' +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'torch-vision' +Provides-Extra: torchhub +Requires-Dist: filelock ; extra == 'torchhub' +Requires-Dist: huggingface-hub (<1.0,>=0.19.3) ; extra == 'torchhub' +Requires-Dist: importlib-metadata ; extra == 'torchhub' +Requires-Dist: numpy (>=1.17) ; extra == 'torchhub' +Requires-Dist: packaging (>=20.0) ; extra == 'torchhub' +Requires-Dist: protobuf ; extra == 'torchhub' +Requires-Dist: regex (!=2019.12.17) ; extra == 'torchhub' +Requires-Dist: requests ; extra == 'torchhub' +Requires-Dist: sentencepiece (!=0.1.92,>=0.1.91) ; extra == 'torchhub' +Requires-Dist: torch ; extra == 'torchhub' +Requires-Dist: tokenizers (<0.20,>=0.19) ; extra == 'torchhub' +Requires-Dist: tqdm (>=4.27) ; extra == 'torchhub' +Provides-Extra: video +Requires-Dist: decord (==0.6.0) ; extra == 'video' +Requires-Dist: av (==9.2.0) ; extra == 'video' +Provides-Extra: vision +Requires-Dist: Pillow (<=15.0,>=10.0.1) ; extra == 'vision' + + + +

+ + + + Hugging Face Transformers Library + +
+
+

+ +

+ + Build + + + GitHub + + + Documentation + + + GitHub release + + + Contributor Covenant + + DOI +

+ +

+

+ English | + 简体中文 | + 繁體中文 | + 한국어 | + Español | + 日本語 | + हिन्दी | + Русский | + Рortuguês | + తెలుగు | + Français | + Deutsch | + Tiếng Việt | +

+

+ +

+

State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow

+

+ +

+ +

+ +🤗 Transformers provides thousands of pretrained models to perform tasks on different modalities such as text, vision, and audio. + +These models can be applied on: + +* 📝 Text, for tasks like text classification, information extraction, question answering, summarization, translation, and text generation, in over 100 languages. +* 🖼️ Images, for tasks like image classification, object detection, and segmentation. +* 🗣️ Audio, for tasks like speech recognition and audio classification. + +Transformer models can also perform tasks on **several modalities combined**, such as table question answering, optical character recognition, information extraction from scanned documents, video classification, and visual question answering. + +🤗 Transformers provides APIs to quickly download and use those pretrained models on a given text, fine-tune them on your own datasets and then share them with the community on our [model hub](https://huggingface.co/models). At the same time, each python module defining an architecture is fully standalone and can be modified to enable quick research experiments. + +🤗 Transformers is backed by the three most popular deep learning libraries — [Jax](https://jax.readthedocs.io/en/latest/), [PyTorch](https://pytorch.org/) and [TensorFlow](https://www.tensorflow.org/) — with a seamless integration between them. It's straightforward to train your models with one before loading them for inference with the other. + +## Online demos + +You can test most of our models directly on their pages from the [model hub](https://huggingface.co/models). We also offer [private model hosting, versioning, & an inference API](https://huggingface.co/pricing) for public and private models. + +Here are a few examples: + +In Natural Language Processing: +- [Masked word completion with BERT](https://huggingface.co/google-bert/bert-base-uncased?text=Paris+is+the+%5BMASK%5D+of+France) +- [Named Entity Recognition with Electra](https://huggingface.co/dbmdz/electra-large-discriminator-finetuned-conll03-english?text=My+name+is+Sarah+and+I+live+in+London+city) +- [Text generation with Mistral](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2) +- [Natural Language Inference with RoBERTa](https://huggingface.co/FacebookAI/roberta-large-mnli?text=The+dog+was+lost.+Nobody+lost+any+animal) +- [Summarization with BART](https://huggingface.co/facebook/bart-large-cnn?text=The+tower+is+324+metres+%281%2C063+ft%29+tall%2C+about+the+same+height+as+an+81-storey+building%2C+and+the+tallest+structure+in+Paris.+Its+base+is+square%2C+measuring+125+metres+%28410+ft%29+on+each+side.+During+its+construction%2C+the+Eiffel+Tower+surpassed+the+Washington+Monument+to+become+the+tallest+man-made+structure+in+the+world%2C+a+title+it+held+for+41+years+until+the+Chrysler+Building+in+New+York+City+was+finished+in+1930.+It+was+the+first+structure+to+reach+a+height+of+300+metres.+Due+to+the+addition+of+a+broadcasting+aerial+at+the+top+of+the+tower+in+1957%2C+it+is+now+taller+than+the+Chrysler+Building+by+5.2+metres+%2817+ft%29.+Excluding+transmitters%2C+the+Eiffel+Tower+is+the+second+tallest+free-standing+structure+in+France+after+the+Millau+Viaduct) +- [Question answering with DistilBERT](https://huggingface.co/distilbert/distilbert-base-uncased-distilled-squad?text=Which+name+is+also+used+to+describe+the+Amazon+rainforest+in+English%3F&context=The+Amazon+rainforest+%28Portuguese%3A+Floresta+Amaz%C3%B4nica+or+Amaz%C3%B4nia%3B+Spanish%3A+Selva+Amaz%C3%B3nica%2C+Amazon%C3%ADa+or+usually+Amazonia%3B+French%3A+For%C3%AAt+amazonienne%3B+Dutch%3A+Amazoneregenwoud%29%2C+also+known+in+English+as+Amazonia+or+the+Amazon+Jungle%2C+is+a+moist+broadleaf+forest+that+covers+most+of+the+Amazon+basin+of+South+America.+This+basin+encompasses+7%2C000%2C000+square+kilometres+%282%2C700%2C000+sq+mi%29%2C+of+which+5%2C500%2C000+square+kilometres+%282%2C100%2C000+sq+mi%29+are+covered+by+the+rainforest.+This+region+includes+territory+belonging+to+nine+nations.+The+majority+of+the+forest+is+contained+within+Brazil%2C+with+60%25+of+the+rainforest%2C+followed+by+Peru+with+13%25%2C+Colombia+with+10%25%2C+and+with+minor+amounts+in+Venezuela%2C+Ecuador%2C+Bolivia%2C+Guyana%2C+Suriname+and+French+Guiana.+States+or+departments+in+four+nations+contain+%22Amazonas%22+in+their+names.+The+Amazon+represents+over+half+of+the+planet%27s+remaining+rainforests%2C+and+comprises+the+largest+and+most+biodiverse+tract+of+tropical+rainforest+in+the+world%2C+with+an+estimated+390+billion+individual+trees+divided+into+16%2C000+species) +- [Translation with T5](https://huggingface.co/google-t5/t5-base?text=My+name+is+Wolfgang+and+I+live+in+Berlin) + +In Computer Vision: +- [Image classification with ViT](https://huggingface.co/google/vit-base-patch16-224) +- [Object Detection with DETR](https://huggingface.co/facebook/detr-resnet-50) +- [Semantic Segmentation with SegFormer](https://huggingface.co/nvidia/segformer-b0-finetuned-ade-512-512) +- [Panoptic Segmentation with Mask2Former](https://huggingface.co/facebook/mask2former-swin-large-coco-panoptic) +- [Depth Estimation with Depth Anything](https://huggingface.co/docs/transformers/main/model_doc/depth_anything) +- [Video Classification with VideoMAE](https://huggingface.co/docs/transformers/model_doc/videomae) +- [Universal Segmentation with OneFormer](https://huggingface.co/shi-labs/oneformer_ade20k_dinat_large) + +In Audio: +- [Automatic Speech Recognition with Whisper](https://huggingface.co/openai/whisper-large-v3) +- [Keyword Spotting with Wav2Vec2](https://huggingface.co/superb/wav2vec2-base-superb-ks) +- [Audio Classification with Audio Spectrogram Transformer](https://huggingface.co/MIT/ast-finetuned-audioset-10-10-0.4593) + +In Multimodal tasks: +- [Table Question Answering with TAPAS](https://huggingface.co/google/tapas-base-finetuned-wtq) +- [Visual Question Answering with ViLT](https://huggingface.co/dandelin/vilt-b32-finetuned-vqa) +- [Image captioning with LLaVa](https://huggingface.co/llava-hf/llava-1.5-7b-hf) +- [Zero-shot Image Classification with SigLIP](https://huggingface.co/google/siglip-so400m-patch14-384) +- [Document Question Answering with LayoutLM](https://huggingface.co/impira/layoutlm-document-qa) +- [Zero-shot Video Classification with X-CLIP](https://huggingface.co/docs/transformers/model_doc/xclip) +- [Zero-shot Object Detection with OWLv2](https://huggingface.co/docs/transformers/en/model_doc/owlv2) +- [Zero-shot Image Segmentation with CLIPSeg](https://huggingface.co/docs/transformers/model_doc/clipseg) +- [Automatic Mask Generation with SAM](https://huggingface.co/docs/transformers/model_doc/sam) + + +## 100 projects using Transformers + +Transformers is more than a toolkit to use pretrained models: it's a community of projects built around it and the +Hugging Face Hub. We want Transformers to enable developers, researchers, students, professors, engineers, and anyone +else to build their dream projects. + +In order to celebrate the 100,000 stars of transformers, we have decided to put the spotlight on the +community, and we have created the [awesome-transformers](./awesome-transformers.md) page which lists 100 +incredible projects built in the vicinity of transformers. + +If you own or use a project that you believe should be part of the list, please open a PR to add it! + +## If you are looking for custom support from the Hugging Face team + + + HuggingFace Expert Acceleration Program +
+ +## Quick tour + +To immediately use a model on a given input (text, image, audio, ...), we provide the `pipeline` API. Pipelines group together a pretrained model with the preprocessing that was used during that model's training. Here is how to quickly use a pipeline to classify positive versus negative texts: + +```python +>>> from transformers import pipeline + +# Allocate a pipeline for sentiment-analysis +>>> classifier = pipeline('sentiment-analysis') +>>> classifier('We are very happy to introduce pipeline to the transformers repository.') +[{'label': 'POSITIVE', 'score': 0.9996980428695679}] +``` + +The second line of code downloads and caches the pretrained model used by the pipeline, while the third evaluates it on the given text. Here, the answer is "positive" with a confidence of 99.97%. + +Many tasks have a pre-trained `pipeline` ready to go, in NLP but also in computer vision and speech. For example, we can easily extract detected objects in an image: + +``` python +>>> import requests +>>> from PIL import Image +>>> from transformers import pipeline + +# Download an image with cute cats +>>> url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png" +>>> image_data = requests.get(url, stream=True).raw +>>> image = Image.open(image_data) + +# Allocate a pipeline for object detection +>>> object_detector = pipeline('object-detection') +>>> object_detector(image) +[{'score': 0.9982201457023621, + 'label': 'remote', + 'box': {'xmin': 40, 'ymin': 70, 'xmax': 175, 'ymax': 117}}, + {'score': 0.9960021376609802, + 'label': 'remote', + 'box': {'xmin': 333, 'ymin': 72, 'xmax': 368, 'ymax': 187}}, + {'score': 0.9954745173454285, + 'label': 'couch', + 'box': {'xmin': 0, 'ymin': 1, 'xmax': 639, 'ymax': 473}}, + {'score': 0.9988006353378296, + 'label': 'cat', + 'box': {'xmin': 13, 'ymin': 52, 'xmax': 314, 'ymax': 470}}, + {'score': 0.9986783862113953, + 'label': 'cat', + 'box': {'xmin': 345, 'ymin': 23, 'xmax': 640, 'ymax': 368}}] +``` + +Here, we get a list of objects detected in the image, with a box surrounding the object and a confidence score. Here is the original image on the left, with the predictions displayed on the right: + +

+ + +

+ +You can learn more about the tasks supported by the `pipeline` API in [this tutorial](https://huggingface.co/docs/transformers/task_summary). + +In addition to `pipeline`, to download and use any of the pretrained models on your given task, all it takes is three lines of code. Here is the PyTorch version: +```python +>>> from transformers import AutoTokenizer, AutoModel + +>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") +>>> model = AutoModel.from_pretrained("google-bert/bert-base-uncased") + +>>> inputs = tokenizer("Hello world!", return_tensors="pt") +>>> outputs = model(**inputs) +``` + +And here is the equivalent code for TensorFlow: +```python +>>> from transformers import AutoTokenizer, TFAutoModel + +>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") +>>> model = TFAutoModel.from_pretrained("google-bert/bert-base-uncased") + +>>> inputs = tokenizer("Hello world!", return_tensors="tf") +>>> outputs = model(**inputs) +``` + +The tokenizer is responsible for all the preprocessing the pretrained model expects and can be called directly on a single string (as in the above examples) or a list. It will output a dictionary that you can use in downstream code or simply directly pass to your model using the ** argument unpacking operator. + +The model itself is a regular [Pytorch `nn.Module`](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) or a [TensorFlow `tf.keras.Model`](https://www.tensorflow.org/api_docs/python/tf/keras/Model) (depending on your backend) which you can use as usual. [This tutorial](https://huggingface.co/docs/transformers/training) explains how to integrate such a model into a classic PyTorch or TensorFlow training loop, or how to use our `Trainer` API to quickly fine-tune on a new dataset. + +## Why should I use transformers? + +1. Easy-to-use state-of-the-art models: + - High performance on natural language understanding & generation, computer vision, and audio tasks. + - Low barrier to entry for educators and practitioners. + - Few user-facing abstractions with just three classes to learn. + - A unified API for using all our pretrained models. + +1. Lower compute costs, smaller carbon footprint: + - Researchers can share trained models instead of always retraining. + - Practitioners can reduce compute time and production costs. + - Dozens of architectures with over 400,000 pretrained models across all modalities. + +1. Choose the right framework for every part of a model's lifetime: + - Train state-of-the-art models in 3 lines of code. + - Move a single model between TF2.0/PyTorch/JAX frameworks at will. + - Seamlessly pick the right framework for training, evaluation, and production. + +1. Easily customize a model or an example to your needs: + - We provide examples for each architecture to reproduce the results published by its original authors. + - Model internals are exposed as consistently as possible. + - Model files can be used independently of the library for quick experiments. + +## Why shouldn't I use transformers? + +- This library is not a modular toolbox of building blocks for neural nets. The code in the model files is not refactored with additional abstractions on purpose, so that researchers can quickly iterate on each of the models without diving into additional abstractions/files. +- The training API is not intended to work on any model but is optimized to work with the models provided by the library. For generic machine learning loops, you should use another library (possibly, [Accelerate](https://huggingface.co/docs/accelerate)). +- While we strive to present as many use cases as possible, the scripts in our [examples folder](https://github.com/huggingface/transformers/tree/main/examples) are just that: examples. It is expected that they won't work out-of-the-box on your specific problem and that you will be required to change a few lines of code to adapt them to your needs. + +## Installation + +### With pip + +This repository is tested on Python 3.8+, Flax 0.4.1+, PyTorch 1.11+, and TensorFlow 2.6+. + +You should install 🤗 Transformers in a [virtual environment](https://docs.python.org/3/library/venv.html). If you're unfamiliar with Python virtual environments, check out the [user guide](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/). + +First, create a virtual environment with the version of Python you're going to use and activate it. + +Then, you will need to install at least one of Flax, PyTorch, or TensorFlow. +Please refer to [TensorFlow installation page](https://www.tensorflow.org/install/), [PyTorch installation page](https://pytorch.org/get-started/locally/#start-locally) and/or [Flax](https://github.com/google/flax#quick-install) and [Jax](https://github.com/google/jax#installation) installation pages regarding the specific installation command for your platform. + +When one of those backends has been installed, 🤗 Transformers can be installed using pip as follows: + +```bash +pip install transformers +``` + +If you'd like to play with the examples or need the bleeding edge of the code and can't wait for a new release, you must [install the library from source](https://huggingface.co/docs/transformers/installation#installing-from-source). + +### With conda + +🤗 Transformers can be installed using conda as follows: + +```shell script +conda install conda-forge::transformers +``` + +> **_NOTE:_** Installing `transformers` from the `huggingface` channel is deprecated. + +Follow the installation pages of Flax, PyTorch or TensorFlow to see how to install them with conda. + +> **_NOTE:_** On Windows, you may be prompted to activate Developer Mode in order to benefit from caching. If this is not an option for you, please let us know in [this issue](https://github.com/huggingface/huggingface_hub/issues/1062). + +## Model architectures + +**[All the model checkpoints](https://huggingface.co/models)** provided by 🤗 Transformers are seamlessly integrated from the huggingface.co [model hub](https://huggingface.co/models), where they are uploaded directly by [users](https://huggingface.co/users) and [organizations](https://huggingface.co/organizations). + +Current number of checkpoints: ![](https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/models&color=brightgreen) + +🤗 Transformers currently provides the following architectures (see [here](https://huggingface.co/docs/transformers/model_summary) for a high-level summary of each them): + +1. **[ALBERT](https://huggingface.co/docs/transformers/model_doc/albert)** (from Google Research and the Toyota Technological Institute at Chicago) released with the paper [ALBERT: A Lite BERT for Self-supervised Learning of Language Representations](https://arxiv.org/abs/1909.11942), by Zhenzhong Lan, Mingda Chen, Sebastian Goodman, Kevin Gimpel, Piyush Sharma, Radu Soricut. +1. **[ALIGN](https://huggingface.co/docs/transformers/model_doc/align)** (from Google Research) released with the paper [Scaling Up Visual and Vision-Language Representation Learning With Noisy Text Supervision](https://arxiv.org/abs/2102.05918) by Chao Jia, Yinfei Yang, Ye Xia, Yi-Ting Chen, Zarana Parekh, Hieu Pham, Quoc V. Le, Yunhsuan Sung, Zhen Li, Tom Duerig. +1. **[AltCLIP](https://huggingface.co/docs/transformers/model_doc/altclip)** (from BAAI) released with the paper [AltCLIP: Altering the Language Encoder in CLIP for Extended Language Capabilities](https://arxiv.org/abs/2211.06679) by Chen, Zhongzhi and Liu, Guang and Zhang, Bo-Wen and Ye, Fulong and Yang, Qinghong and Wu, Ledell. +1. **[Audio Spectrogram Transformer](https://huggingface.co/docs/transformers/model_doc/audio-spectrogram-transformer)** (from MIT) released with the paper [AST: Audio Spectrogram Transformer](https://arxiv.org/abs/2104.01778) by Yuan Gong, Yu-An Chung, James Glass. +1. **[Autoformer](https://huggingface.co/docs/transformers/model_doc/autoformer)** (from Tsinghua University) released with the paper [Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting](https://arxiv.org/abs/2106.13008) by Haixu Wu, Jiehui Xu, Jianmin Wang, Mingsheng Long. +1. **[Bark](https://huggingface.co/docs/transformers/model_doc/bark)** (from Suno) released in the repository [suno-ai/bark](https://github.com/suno-ai/bark) by Suno AI team. +1. **[BART](https://huggingface.co/docs/transformers/model_doc/bart)** (from Facebook) released with the paper [BART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension](https://arxiv.org/abs/1910.13461) by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov, and Luke Zettlemoyer. +1. **[BARThez](https://huggingface.co/docs/transformers/model_doc/barthez)** (from École polytechnique) released with the paper [BARThez: a Skilled Pretrained French Sequence-to-Sequence Model](https://arxiv.org/abs/2010.12321) by Moussa Kamal Eddine, Antoine J.-P. Tixier, Michalis Vazirgiannis. +1. **[BARTpho](https://huggingface.co/docs/transformers/model_doc/bartpho)** (from VinAI Research) released with the paper [BARTpho: Pre-trained Sequence-to-Sequence Models for Vietnamese](https://arxiv.org/abs/2109.09701) by Nguyen Luong Tran, Duong Minh Le and Dat Quoc Nguyen. +1. **[BEiT](https://huggingface.co/docs/transformers/model_doc/beit)** (from Microsoft) released with the paper [BEiT: BERT Pre-Training of Image Transformers](https://arxiv.org/abs/2106.08254) by Hangbo Bao, Li Dong, Furu Wei. +1. **[BERT](https://huggingface.co/docs/transformers/model_doc/bert)** (from Google) released with the paper [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) by Jacob Devlin, Ming-Wei Chang, Kenton Lee, and Kristina Toutanova. +1. **[BERT For Sequence Generation](https://huggingface.co/docs/transformers/model_doc/bert-generation)** (from Google) released with the paper [Leveraging Pre-trained Checkpoints for Sequence Generation Tasks](https://arxiv.org/abs/1907.12461) by Sascha Rothe, Shashi Narayan, Aliaksei Severyn. +1. **[BERTweet](https://huggingface.co/docs/transformers/model_doc/bertweet)** (from VinAI Research) released with the paper [BERTweet: A pre-trained language model for English Tweets](https://aclanthology.org/2020.emnlp-demos.2/) by Dat Quoc Nguyen, Thanh Vu and Anh Tuan Nguyen. +1. **[BigBird-Pegasus](https://huggingface.co/docs/transformers/model_doc/bigbird_pegasus)** (from Google Research) released with the paper [Big Bird: Transformers for Longer Sequences](https://arxiv.org/abs/2007.14062) by Manzil Zaheer, Guru Guruganesh, Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, Amr Ahmed. +1. **[BigBird-RoBERTa](https://huggingface.co/docs/transformers/model_doc/big_bird)** (from Google Research) released with the paper [Big Bird: Transformers for Longer Sequences](https://arxiv.org/abs/2007.14062) by Manzil Zaheer, Guru Guruganesh, Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, Amr Ahmed. +1. **[BioGpt](https://huggingface.co/docs/transformers/model_doc/biogpt)** (from Microsoft Research AI4Science) released with the paper [BioGPT: generative pre-trained transformer for biomedical text generation and mining](https://academic.oup.com/bib/advance-article/doi/10.1093/bib/bbac409/6713511?guestAccessKey=a66d9b5d-4f83-4017-bb52-405815c907b9) by Renqian Luo, Liai Sun, Yingce Xia, Tao Qin, Sheng Zhang, Hoifung Poon and Tie-Yan Liu. +1. **[BiT](https://huggingface.co/docs/transformers/model_doc/bit)** (from Google AI) released with the paper [Big Transfer (BiT): General Visual Representation Learning](https://arxiv.org/abs/1912.11370) by Alexander Kolesnikov, Lucas Beyer, Xiaohua Zhai, Joan Puigcerver, Jessica Yung, Sylvain Gelly, Neil Houlsby. +1. **[Blenderbot](https://huggingface.co/docs/transformers/model_doc/blenderbot)** (from Facebook) released with the paper [Recipes for building an open-domain chatbot](https://arxiv.org/abs/2004.13637) by Stephen Roller, Emily Dinan, Naman Goyal, Da Ju, Mary Williamson, Yinhan Liu, Jing Xu, Myle Ott, Kurt Shuster, Eric M. Smith, Y-Lan Boureau, Jason Weston. +1. **[BlenderbotSmall](https://huggingface.co/docs/transformers/model_doc/blenderbot-small)** (from Facebook) released with the paper [Recipes for building an open-domain chatbot](https://arxiv.org/abs/2004.13637) by Stephen Roller, Emily Dinan, Naman Goyal, Da Ju, Mary Williamson, Yinhan Liu, Jing Xu, Myle Ott, Kurt Shuster, Eric M. Smith, Y-Lan Boureau, Jason Weston. +1. **[BLIP](https://huggingface.co/docs/transformers/model_doc/blip)** (from Salesforce) released with the paper [BLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation](https://arxiv.org/abs/2201.12086) by Junnan Li, Dongxu Li, Caiming Xiong, Steven Hoi. +1. **[BLIP-2](https://huggingface.co/docs/transformers/model_doc/blip-2)** (from Salesforce) released with the paper [BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models](https://arxiv.org/abs/2301.12597) by Junnan Li, Dongxu Li, Silvio Savarese, Steven Hoi. +1. **[BLOOM](https://huggingface.co/docs/transformers/model_doc/bloom)** (from BigScience workshop) released by the [BigScience Workshop](https://bigscience.huggingface.co/). +1. **[BORT](https://huggingface.co/docs/transformers/model_doc/bort)** (from Alexa) released with the paper [Optimal Subarchitecture Extraction For BERT](https://arxiv.org/abs/2010.10499) by Adrian de Wynter and Daniel J. Perry. +1. **[BridgeTower](https://huggingface.co/docs/transformers/model_doc/bridgetower)** (from Harbin Institute of Technology/Microsoft Research Asia/Intel Labs) released with the paper [BridgeTower: Building Bridges Between Encoders in Vision-Language Representation Learning](https://arxiv.org/abs/2206.08657) by Xiao Xu, Chenfei Wu, Shachar Rosenman, Vasudev Lal, Wanxiang Che, Nan Duan. +1. **[BROS](https://huggingface.co/docs/transformers/model_doc/bros)** (from NAVER CLOVA) released with the paper [BROS: A Pre-trained Language Model Focusing on Text and Layout for Better Key Information Extraction from Documents](https://arxiv.org/abs/2108.04539) by Teakgyu Hong, Donghyun Kim, Mingi Ji, Wonseok Hwang, Daehyun Nam, Sungrae Park. +1. **[ByT5](https://huggingface.co/docs/transformers/model_doc/byt5)** (from Google Research) released with the paper [ByT5: Towards a token-free future with pre-trained byte-to-byte models](https://arxiv.org/abs/2105.13626) by Linting Xue, Aditya Barua, Noah Constant, Rami Al-Rfou, Sharan Narang, Mihir Kale, Adam Roberts, Colin Raffel. +1. **[CamemBERT](https://huggingface.co/docs/transformers/model_doc/camembert)** (from Inria/Facebook/Sorbonne) released with the paper [CamemBERT: a Tasty French Language Model](https://arxiv.org/abs/1911.03894) by Louis Martin*, Benjamin Muller*, Pedro Javier Ortiz Suárez*, Yoann Dupont, Laurent Romary, Éric Villemonte de la Clergerie, Djamé Seddah and Benoît Sagot. +1. **[CANINE](https://huggingface.co/docs/transformers/model_doc/canine)** (from Google Research) released with the paper [CANINE: Pre-training an Efficient Tokenization-Free Encoder for Language Representation](https://arxiv.org/abs/2103.06874) by Jonathan H. Clark, Dan Garrette, Iulia Turc, John Wieting. +1. **[Chinese-CLIP](https://huggingface.co/docs/transformers/model_doc/chinese_clip)** (from OFA-Sys) released with the paper [Chinese CLIP: Contrastive Vision-Language Pretraining in Chinese](https://arxiv.org/abs/2211.01335) by An Yang, Junshu Pan, Junyang Lin, Rui Men, Yichang Zhang, Jingren Zhou, Chang Zhou. +1. **[CLAP](https://huggingface.co/docs/transformers/model_doc/clap)** (from LAION-AI) released with the paper [Large-scale Contrastive Language-Audio Pretraining with Feature Fusion and Keyword-to-Caption Augmentation](https://arxiv.org/abs/2211.06687) by Yusong Wu, Ke Chen, Tianyu Zhang, Yuchen Hui, Taylor Berg-Kirkpatrick, Shlomo Dubnov. +1. **[CLIP](https://huggingface.co/docs/transformers/model_doc/clip)** (from OpenAI) released with the paper [Learning Transferable Visual Models From Natural Language Supervision](https://arxiv.org/abs/2103.00020) by Alec Radford, Jong Wook Kim, Chris Hallacy, Aditya Ramesh, Gabriel Goh, Sandhini Agarwal, Girish Sastry, Amanda Askell, Pamela Mishkin, Jack Clark, Gretchen Krueger, Ilya Sutskever. +1. **[CLIPSeg](https://huggingface.co/docs/transformers/model_doc/clipseg)** (from University of Göttingen) released with the paper [Image Segmentation Using Text and Image Prompts](https://arxiv.org/abs/2112.10003) by Timo Lüddecke and Alexander Ecker. +1. **[CLVP](https://huggingface.co/docs/transformers/model_doc/clvp)** released with the paper [Better speech synthesis through scaling](https://arxiv.org/abs/2305.07243) by James Betker. +1. **[CodeGen](https://huggingface.co/docs/transformers/model_doc/codegen)** (from Salesforce) released with the paper [A Conversational Paradigm for Program Synthesis](https://arxiv.org/abs/2203.13474) by Erik Nijkamp, Bo Pang, Hiroaki Hayashi, Lifu Tu, Huan Wang, Yingbo Zhou, Silvio Savarese, Caiming Xiong. +1. **[CodeLlama](https://huggingface.co/docs/transformers/model_doc/llama_code)** (from MetaAI) released with the paper [Code Llama: Open Foundation Models for Code](https://ai.meta.com/research/publications/code-llama-open-foundation-models-for-code/) by Baptiste Rozière, Jonas Gehring, Fabian Gloeckle, Sten Sootla, Itai Gat, Xiaoqing Ellen Tan, Yossi Adi, Jingyu Liu, Tal Remez, Jérémy Rapin, Artyom Kozhevnikov, Ivan Evtimov, Joanna Bitton, Manish Bhatt, Cristian Canton Ferrer, Aaron Grattafiori, Wenhan Xiong, Alexandre Défossez, Jade Copet, Faisal Azhar, Hugo Touvron, Louis Martin, Nicolas Usunier, Thomas Scialom, Gabriel Synnaeve. +1. **[Cohere](https://huggingface.co/docs/transformers/model_doc/cohere)** (from Cohere) released with the paper [Command-R: Retrieval Augmented Generation at Production Scale]() by Cohere. +1. **[Conditional DETR](https://huggingface.co/docs/transformers/model_doc/conditional_detr)** (from Microsoft Research Asia) released with the paper [Conditional DETR for Fast Training Convergence](https://arxiv.org/abs/2108.06152) by Depu Meng, Xiaokang Chen, Zejia Fan, Gang Zeng, Houqiang Li, Yuhui Yuan, Lei Sun, Jingdong Wang. +1. **[ConvBERT](https://huggingface.co/docs/transformers/model_doc/convbert)** (from YituTech) released with the paper [ConvBERT: Improving BERT with Span-based Dynamic Convolution](https://arxiv.org/abs/2008.02496) by Zihang Jiang, Weihao Yu, Daquan Zhou, Yunpeng Chen, Jiashi Feng, Shuicheng Yan. +1. **[ConvNeXT](https://huggingface.co/docs/transformers/model_doc/convnext)** (from Facebook AI) released with the paper [A ConvNet for the 2020s](https://arxiv.org/abs/2201.03545) by Zhuang Liu, Hanzi Mao, Chao-Yuan Wu, Christoph Feichtenhofer, Trevor Darrell, Saining Xie. +1. **[ConvNeXTV2](https://huggingface.co/docs/transformers/model_doc/convnextv2)** (from Facebook AI) released with the paper [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](https://arxiv.org/abs/2301.00808) by Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon, Saining Xie. +1. **[CPM](https://huggingface.co/docs/transformers/model_doc/cpm)** (from Tsinghua University) released with the paper [CPM: A Large-scale Generative Chinese Pre-trained Language Model](https://arxiv.org/abs/2012.00413) by Zhengyan Zhang, Xu Han, Hao Zhou, Pei Ke, Yuxian Gu, Deming Ye, Yujia Qin, Yusheng Su, Haozhe Ji, Jian Guan, Fanchao Qi, Xiaozhi Wang, Yanan Zheng, Guoyang Zeng, Huanqi Cao, Shengqi Chen, Daixuan Li, Zhenbo Sun, Zhiyuan Liu, Minlie Huang, Wentao Han, Jie Tang, Juanzi Li, Xiaoyan Zhu, Maosong Sun. +1. **[CPM-Ant](https://huggingface.co/docs/transformers/model_doc/cpmant)** (from OpenBMB) released by the [OpenBMB](https://www.openbmb.org/). +1. **[CTRL](https://huggingface.co/docs/transformers/model_doc/ctrl)** (from Salesforce) released with the paper [CTRL: A Conditional Transformer Language Model for Controllable Generation](https://arxiv.org/abs/1909.05858) by Nitish Shirish Keskar*, Bryan McCann*, Lav R. Varshney, Caiming Xiong and Richard Socher. +1. **[CvT](https://huggingface.co/docs/transformers/model_doc/cvt)** (from Microsoft) released with the paper [CvT: Introducing Convolutions to Vision Transformers](https://arxiv.org/abs/2103.15808) by Haiping Wu, Bin Xiao, Noel Codella, Mengchen Liu, Xiyang Dai, Lu Yuan, Lei Zhang. +1. **[Data2Vec](https://huggingface.co/docs/transformers/model_doc/data2vec)** (from Facebook) released with the paper [Data2Vec: A General Framework for Self-supervised Learning in Speech, Vision and Language](https://arxiv.org/abs/2202.03555) by Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu, Michael Auli. +1. **[DBRX](https://huggingface.co/docs/transformers/main/model_doc/dbrx)** (from Databricks) released with the paper [Introducing DBRX: A New State-of-the-Art Open LLM](https://www.databricks.com/blog/introducing-dbrx-new-state-art-open-llm) by the Mosaic Research Team. +1. **[DeBERTa](https://huggingface.co/docs/transformers/model_doc/deberta)** (from Microsoft) released with the paper [DeBERTa: Decoding-enhanced BERT with Disentangled Attention](https://arxiv.org/abs/2006.03654) by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. +1. **[DeBERTa-v2](https://huggingface.co/docs/transformers/model_doc/deberta-v2)** (from Microsoft) released with the paper [DeBERTa: Decoding-enhanced BERT with Disentangled Attention](https://arxiv.org/abs/2006.03654) by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. +1. **[Decision Transformer](https://huggingface.co/docs/transformers/model_doc/decision_transformer)** (from Berkeley/Facebook/Google) released with the paper [Decision Transformer: Reinforcement Learning via Sequence Modeling](https://arxiv.org/abs/2106.01345) by Lili Chen, Kevin Lu, Aravind Rajeswaran, Kimin Lee, Aditya Grover, Michael Laskin, Pieter Abbeel, Aravind Srinivas, Igor Mordatch. +1. **[Deformable DETR](https://huggingface.co/docs/transformers/model_doc/deformable_detr)** (from SenseTime Research) released with the paper [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159) by Xizhou Zhu, Weijie Su, Lewei Lu, Bin Li, Xiaogang Wang, Jifeng Dai. +1. **[DeiT](https://huggingface.co/docs/transformers/model_doc/deit)** (from Facebook) released with the paper [Training data-efficient image transformers & distillation through attention](https://arxiv.org/abs/2012.12877) by Hugo Touvron, Matthieu Cord, Matthijs Douze, Francisco Massa, Alexandre Sablayrolles, Hervé Jégou. +1. **[DePlot](https://huggingface.co/docs/transformers/model_doc/deplot)** (from Google AI) released with the paper [DePlot: One-shot visual language reasoning by plot-to-table translation](https://arxiv.org/abs/2212.10505) by Fangyu Liu, Julian Martin Eisenschlos, Francesco Piccinno, Syrine Krichene, Chenxi Pang, Kenton Lee, Mandar Joshi, Wenhu Chen, Nigel Collier, Yasemin Altun. +1. **[Depth Anything](https://huggingface.co/docs/transformers/model_doc/depth_anything)** (from University of Hong Kong and TikTok) released with the paper [Depth Anything: Unleashing the Power of Large-Scale Unlabeled Data](https://arxiv.org/abs/2401.10891) by Lihe Yang, Bingyi Kang, Zilong Huang, Xiaogang Xu, Jiashi Feng, Hengshuang Zhao. +1. **[DETA](https://huggingface.co/docs/transformers/model_doc/deta)** (from The University of Texas at Austin) released with the paper [NMS Strikes Back](https://arxiv.org/abs/2212.06137) by Jeffrey Ouyang-Zhang, Jang Hyun Cho, Xingyi Zhou, Philipp Krähenbühl. +1. **[DETR](https://huggingface.co/docs/transformers/model_doc/detr)** (from Facebook) released with the paper [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872) by Nicolas Carion, Francisco Massa, Gabriel Synnaeve, Nicolas Usunier, Alexander Kirillov, Sergey Zagoruyko. +1. **[DialoGPT](https://huggingface.co/docs/transformers/model_doc/dialogpt)** (from Microsoft Research) released with the paper [DialoGPT: Large-Scale Generative Pre-training for Conversational Response Generation](https://arxiv.org/abs/1911.00536) by Yizhe Zhang, Siqi Sun, Michel Galley, Yen-Chun Chen, Chris Brockett, Xiang Gao, Jianfeng Gao, Jingjing Liu, Bill Dolan. +1. **[DiNAT](https://huggingface.co/docs/transformers/model_doc/dinat)** (from SHI Labs) released with the paper [Dilated Neighborhood Attention Transformer](https://arxiv.org/abs/2209.15001) by Ali Hassani and Humphrey Shi. +1. **[DINOv2](https://huggingface.co/docs/transformers/model_doc/dinov2)** (from Meta AI) released with the paper [DINOv2: Learning Robust Visual Features without Supervision](https://arxiv.org/abs/2304.07193) by Maxime Oquab, Timothée Darcet, Théo Moutakanni, Huy Vo, Marc Szafraniec, Vasil Khalidov, Pierre Fernandez, Daniel Haziza, Francisco Massa, Alaaeldin El-Nouby, Mahmoud Assran, Nicolas Ballas, Wojciech Galuba, Russell Howes, Po-Yao Huang, Shang-Wen Li, Ishan Misra, Michael Rabbat, Vasu Sharma, Gabriel Synnaeve, Hu Xu, Hervé Jegou, Julien Mairal, Patrick Labatut, Armand Joulin, Piotr Bojanowski. +1. **[DistilBERT](https://huggingface.co/docs/transformers/model_doc/distilbert)** (from HuggingFace), released together with the paper [DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter](https://arxiv.org/abs/1910.01108) by Victor Sanh, Lysandre Debut and Thomas Wolf. The same method has been applied to compress GPT2 into [DistilGPT2](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation), RoBERTa into [DistilRoBERTa](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation), Multilingual BERT into [DistilmBERT](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation) and a German version of DistilBERT. +1. **[DiT](https://huggingface.co/docs/transformers/model_doc/dit)** (from Microsoft Research) released with the paper [DiT: Self-supervised Pre-training for Document Image Transformer](https://arxiv.org/abs/2203.02378) by Junlong Li, Yiheng Xu, Tengchao Lv, Lei Cui, Cha Zhang, Furu Wei. +1. **[Donut](https://huggingface.co/docs/transformers/model_doc/donut)** (from NAVER), released together with the paper [OCR-free Document Understanding Transformer](https://arxiv.org/abs/2111.15664) by Geewook Kim, Teakgyu Hong, Moonbin Yim, Jeongyeon Nam, Jinyoung Park, Jinyeong Yim, Wonseok Hwang, Sangdoo Yun, Dongyoon Han, Seunghyun Park. +1. **[DPR](https://huggingface.co/docs/transformers/model_doc/dpr)** (from Facebook) released with the paper [Dense Passage Retrieval for Open-Domain Question Answering](https://arxiv.org/abs/2004.04906) by Vladimir Karpukhin, Barlas Oğuz, Sewon Min, Patrick Lewis, Ledell Wu, Sergey Edunov, Danqi Chen, and Wen-tau Yih. +1. **[DPT](https://huggingface.co/docs/transformers/master/model_doc/dpt)** (from Intel Labs) released with the paper [Vision Transformers for Dense Prediction](https://arxiv.org/abs/2103.13413) by René Ranftl, Alexey Bochkovskiy, Vladlen Koltun. +1. **[EfficientFormer](https://huggingface.co/docs/transformers/model_doc/efficientformer)** (from Snap Research) released with the paper [EfficientFormer: Vision Transformers at MobileNetSpeed](https://arxiv.org/abs/2206.01191) by Yanyu Li, Geng Yuan, Yang Wen, Ju Hu, Georgios Evangelidis, Sergey Tulyakov, Yanzhi Wang, Jian Ren. +1. **[EfficientNet](https://huggingface.co/docs/transformers/model_doc/efficientnet)** (from Google Brain) released with the paper [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](https://arxiv.org/abs/1905.11946) by Mingxing Tan, Quoc V. Le. +1. **[ELECTRA](https://huggingface.co/docs/transformers/model_doc/electra)** (from Google Research/Stanford University) released with the paper [ELECTRA: Pre-training text encoders as discriminators rather than generators](https://arxiv.org/abs/2003.10555) by Kevin Clark, Minh-Thang Luong, Quoc V. Le, Christopher D. Manning. +1. **[EnCodec](https://huggingface.co/docs/transformers/model_doc/encodec)** (from Meta AI) released with the paper [High Fidelity Neural Audio Compression](https://arxiv.org/abs/2210.13438) by Alexandre Défossez, Jade Copet, Gabriel Synnaeve, Yossi Adi. +1. **[EncoderDecoder](https://huggingface.co/docs/transformers/model_doc/encoder-decoder)** (from Google Research) released with the paper [Leveraging Pre-trained Checkpoints for Sequence Generation Tasks](https://arxiv.org/abs/1907.12461) by Sascha Rothe, Shashi Narayan, Aliaksei Severyn. +1. **[ERNIE](https://huggingface.co/docs/transformers/model_doc/ernie)** (from Baidu) released with the paper [ERNIE: Enhanced Representation through Knowledge Integration](https://arxiv.org/abs/1904.09223) by Yu Sun, Shuohuan Wang, Yukun Li, Shikun Feng, Xuyi Chen, Han Zhang, Xin Tian, Danxiang Zhu, Hao Tian, Hua Wu. +1. **[ErnieM](https://huggingface.co/docs/transformers/model_doc/ernie_m)** (from Baidu) released with the paper [ERNIE-M: Enhanced Multilingual Representation by Aligning Cross-lingual Semantics with Monolingual Corpora](https://arxiv.org/abs/2012.15674) by Xuan Ouyang, Shuohuan Wang, Chao Pang, Yu Sun, Hao Tian, Hua Wu, Haifeng Wang. +1. **[ESM](https://huggingface.co/docs/transformers/model_doc/esm)** (from Meta AI) are transformer protein language models. **ESM-1b** was released with the paper [Biological structure and function emerge from scaling unsupervised learning to 250 million protein sequences](https://www.pnas.org/content/118/15/e2016239118) by Alexander Rives, Joshua Meier, Tom Sercu, Siddharth Goyal, Zeming Lin, Jason Liu, Demi Guo, Myle Ott, C. Lawrence Zitnick, Jerry Ma, and Rob Fergus. **ESM-1v** was released with the paper [Language models enable zero-shot prediction of the effects of mutations on protein function](https://doi.org/10.1101/2021.07.09.450648) by Joshua Meier, Roshan Rao, Robert Verkuil, Jason Liu, Tom Sercu and Alexander Rives. **ESM-2 and ESMFold** were released with the paper [Language models of protein sequences at the scale of evolution enable accurate structure prediction](https://doi.org/10.1101/2022.07.20.500902) by Zeming Lin, Halil Akin, Roshan Rao, Brian Hie, Zhongkai Zhu, Wenting Lu, Allan dos Santos Costa, Maryam Fazel-Zarandi, Tom Sercu, Sal Candido, Alexander Rives. +1. **[Falcon](https://huggingface.co/docs/transformers/model_doc/falcon)** (from Technology Innovation Institute) by Almazrouei, Ebtesam and Alobeidli, Hamza and Alshamsi, Abdulaziz and Cappelli, Alessandro and Cojocaru, Ruxandra and Debbah, Merouane and Goffinet, Etienne and Heslow, Daniel and Launay, Julien and Malartic, Quentin and Noune, Badreddine and Pannier, Baptiste and Penedo, Guilherme. +1. **[FastSpeech2Conformer](https://huggingface.co/docs/transformers/model_doc/fastspeech2_conformer)** (from ESPnet) released with the paper [Recent Developments On Espnet Toolkit Boosted By Conformer](https://arxiv.org/abs/2010.13956) by Pengcheng Guo, Florian Boyer, Xuankai Chang, Tomoki Hayashi, Yosuke Higuchi, Hirofumi Inaguma, Naoyuki Kamo, Chenda Li, Daniel Garcia-Romero, Jiatong Shi, Jing Shi, Shinji Watanabe, Kun Wei, Wangyou Zhang, and Yuekai Zhang. +1. **[FLAN-T5](https://huggingface.co/docs/transformers/model_doc/flan-t5)** (from Google AI) released in the repository [google-research/t5x](https://github.com/google-research/t5x/blob/main/docs/models.md#flan-t5-checkpoints) by Hyung Won Chung, Le Hou, Shayne Longpre, Barret Zoph, Yi Tay, William Fedus, Eric Li, Xuezhi Wang, Mostafa Dehghani, Siddhartha Brahma, Albert Webson, Shixiang Shane Gu, Zhuyun Dai, Mirac Suzgun, Xinyun Chen, Aakanksha Chowdhery, Sharan Narang, Gaurav Mishra, Adams Yu, Vincent Zhao, Yanping Huang, Andrew Dai, Hongkun Yu, Slav Petrov, Ed H. Chi, Jeff Dean, Jacob Devlin, Adam Roberts, Denny Zhou, Quoc V. Le, and Jason Wei +1. **[FLAN-UL2](https://huggingface.co/docs/transformers/model_doc/flan-ul2)** (from Google AI) released in the repository [google-research/t5x](https://github.com/google-research/t5x/blob/main/docs/models.md#flan-ul2-checkpoints) by Hyung Won Chung, Le Hou, Shayne Longpre, Barret Zoph, Yi Tay, William Fedus, Eric Li, Xuezhi Wang, Mostafa Dehghani, Siddhartha Brahma, Albert Webson, Shixiang Shane Gu, Zhuyun Dai, Mirac Suzgun, Xinyun Chen, Aakanksha Chowdhery, Sharan Narang, Gaurav Mishra, Adams Yu, Vincent Zhao, Yanping Huang, Andrew Dai, Hongkun Yu, Slav Petrov, Ed H. Chi, Jeff Dean, Jacob Devlin, Adam Roberts, Denny Zhou, Quoc V. Le, and Jason Wei +1. **[FlauBERT](https://huggingface.co/docs/transformers/model_doc/flaubert)** (from CNRS) released with the paper [FlauBERT: Unsupervised Language Model Pre-training for French](https://arxiv.org/abs/1912.05372) by Hang Le, Loïc Vial, Jibril Frej, Vincent Segonne, Maximin Coavoux, Benjamin Lecouteux, Alexandre Allauzen, Benoît Crabbé, Laurent Besacier, Didier Schwab. +1. **[FLAVA](https://huggingface.co/docs/transformers/model_doc/flava)** (from Facebook AI) released with the paper [FLAVA: A Foundational Language And Vision Alignment Model](https://arxiv.org/abs/2112.04482) by Amanpreet Singh, Ronghang Hu, Vedanuj Goswami, Guillaume Couairon, Wojciech Galuba, Marcus Rohrbach, and Douwe Kiela. +1. **[FNet](https://huggingface.co/docs/transformers/model_doc/fnet)** (from Google Research) released with the paper [FNet: Mixing Tokens with Fourier Transforms](https://arxiv.org/abs/2105.03824) by James Lee-Thorp, Joshua Ainslie, Ilya Eckstein, Santiago Ontanon. +1. **[FocalNet](https://huggingface.co/docs/transformers/model_doc/focalnet)** (from Microsoft Research) released with the paper [Focal Modulation Networks](https://arxiv.org/abs/2203.11926) by Jianwei Yang, Chunyuan Li, Xiyang Dai, Lu Yuan, Jianfeng Gao. +1. **[Funnel Transformer](https://huggingface.co/docs/transformers/model_doc/funnel)** (from CMU/Google Brain) released with the paper [Funnel-Transformer: Filtering out Sequential Redundancy for Efficient Language Processing](https://arxiv.org/abs/2006.03236) by Zihang Dai, Guokun Lai, Yiming Yang, Quoc V. Le. +1. **[Fuyu](https://huggingface.co/docs/transformers/model_doc/fuyu)** (from ADEPT) Rohan Bavishi, Erich Elsen, Curtis Hawthorne, Maxwell Nye, Augustus Odena, Arushi Somani, Sağnak Taşırlar. Released with the paper [blog post](https://www.adept.ai/blog/fuyu-8b) +1. **[Gemma](https://huggingface.co/docs/transformers/model_doc/gemma)** (from Google) released with the paper [Gemma: Open Models Based on Gemini Technology and Research](https://blog.google/technology/developers/gemma-open-models/) by the Gemma Google team. +1. **[GIT](https://huggingface.co/docs/transformers/model_doc/git)** (from Microsoft Research) released with the paper [GIT: A Generative Image-to-text Transformer for Vision and Language](https://arxiv.org/abs/2205.14100) by Jianfeng Wang, Zhengyuan Yang, Xiaowei Hu, Linjie Li, Kevin Lin, Zhe Gan, Zicheng Liu, Ce Liu, Lijuan Wang. +1. **[GLPN](https://huggingface.co/docs/transformers/model_doc/glpn)** (from KAIST) released with the paper [Global-Local Path Networks for Monocular Depth Estimation with Vertical CutDepth](https://arxiv.org/abs/2201.07436) by Doyeon Kim, Woonghyun Ga, Pyungwhan Ahn, Donggyu Joo, Sehwan Chun, Junmo Kim. +1. **[GPT](https://huggingface.co/docs/transformers/model_doc/openai-gpt)** (from OpenAI) released with the paper [Improving Language Understanding by Generative Pre-Training](https://openai.com/research/language-unsupervised/) by Alec Radford, Karthik Narasimhan, Tim Salimans and Ilya Sutskever. +1. **[GPT Neo](https://huggingface.co/docs/transformers/model_doc/gpt_neo)** (from EleutherAI) released in the repository [EleutherAI/gpt-neo](https://github.com/EleutherAI/gpt-neo) by Sid Black, Stella Biderman, Leo Gao, Phil Wang and Connor Leahy. +1. **[GPT NeoX](https://huggingface.co/docs/transformers/model_doc/gpt_neox)** (from EleutherAI) released with the paper [GPT-NeoX-20B: An Open-Source Autoregressive Language Model](https://arxiv.org/abs/2204.06745) by Sid Black, Stella Biderman, Eric Hallahan, Quentin Anthony, Leo Gao, Laurence Golding, Horace He, Connor Leahy, Kyle McDonell, Jason Phang, Michael Pieler, USVSN Sai Prashanth, Shivanshu Purohit, Laria Reynolds, Jonathan Tow, Ben Wang, Samuel Weinbach +1. **[GPT NeoX Japanese](https://huggingface.co/docs/transformers/model_doc/gpt_neox_japanese)** (from ABEJA) released by Shinya Otani, Takayoshi Makabe, Anuj Arora, and Kyo Hattori. +1. **[GPT-2](https://huggingface.co/docs/transformers/model_doc/gpt2)** (from OpenAI) released with the paper [Language Models are Unsupervised Multitask Learners](https://openai.com/research/better-language-models/) by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever. +1. **[GPT-J](https://huggingface.co/docs/transformers/model_doc/gptj)** (from EleutherAI) released in the repository [kingoflolz/mesh-transformer-jax](https://github.com/kingoflolz/mesh-transformer-jax/) by Ben Wang and Aran Komatsuzaki. +1. **[GPT-Sw3](https://huggingface.co/docs/transformers/model_doc/gpt-sw3)** (from AI-Sweden) released with the paper [Lessons Learned from GPT-SW3: Building the First Large-Scale Generative Language Model for Swedish](http://www.lrec-conf.org/proceedings/lrec2022/pdf/2022.lrec-1.376.pdf) by Ariel Ekgren, Amaru Cuba Gyllensten, Evangelia Gogoulou, Alice Heiman, Severine Verlinden, Joey Öhman, Fredrik Carlsson, Magnus Sahlgren. +1. **[GPTBigCode](https://huggingface.co/docs/transformers/model_doc/gpt_bigcode)** (from BigCode) released with the paper [SantaCoder: don't reach for the stars!](https://arxiv.org/abs/2301.03988) by Loubna Ben Allal, Raymond Li, Denis Kocetkov, Chenghao Mou, Christopher Akiki, Carlos Munoz Ferrandis, Niklas Muennighoff, Mayank Mishra, Alex Gu, Manan Dey, Logesh Kumar Umapathi, Carolyn Jane Anderson, Yangtian Zi, Joel Lamy Poirier, Hailey Schoelkopf, Sergey Troshin, Dmitry Abulkhanov, Manuel Romero, Michael Lappert, Francesco De Toni, Bernardo García del Río, Qian Liu, Shamik Bose, Urvashi Bhattacharyya, Terry Yue Zhuo, Ian Yu, Paulo Villegas, Marco Zocca, Sourab Mangrulkar, David Lansky, Huu Nguyen, Danish Contractor, Luis Villa, Jia Li, Dzmitry Bahdanau, Yacine Jernite, Sean Hughes, Daniel Fried, Arjun Guha, Harm de Vries, Leandro von Werra. +1. **[GPTSAN-japanese](https://huggingface.co/docs/transformers/model_doc/gptsan-japanese)** released in the repository [tanreinama/GPTSAN](https://github.com/tanreinama/GPTSAN/blob/main/report/model.md) by Toshiyuki Sakamoto(tanreinama). +1. **[Graphormer](https://huggingface.co/docs/transformers/model_doc/graphormer)** (from Microsoft) released with the paper [Do Transformers Really Perform Bad for Graph Representation?](https://arxiv.org/abs/2106.05234) by Chengxuan Ying, Tianle Cai, Shengjie Luo, Shuxin Zheng, Guolin Ke, Di He, Yanming Shen, Tie-Yan Liu. +1. **[Grounding DINO](https://huggingface.co/docs/transformers/model_doc/grounding-dino)** (from Institute for AI, Tsinghua-Bosch Joint Center for ML, Tsinghua University, IDEA Research and others) released with the paper [Grounding DINO: Marrying DINO with Grounded Pre-Training for Open-Set Object Detection](https://arxiv.org/abs/2303.05499) by Shilong Liu, Zhaoyang Zeng, Tianhe Ren, Feng Li, Hao Zhang, Jie Yang, Chunyuan Li, Jianwei Yang, Hang Su, Jun Zhu, Lei Zhang. +1. **[GroupViT](https://huggingface.co/docs/transformers/model_doc/groupvit)** (from UCSD, NVIDIA) released with the paper [GroupViT: Semantic Segmentation Emerges from Text Supervision](https://arxiv.org/abs/2202.11094) by Jiarui Xu, Shalini De Mello, Sifei Liu, Wonmin Byeon, Thomas Breuel, Jan Kautz, Xiaolong Wang. +1. **[HerBERT](https://huggingface.co/docs/transformers/model_doc/herbert)** (from Allegro.pl, AGH University of Science and Technology) released with the paper [KLEJ: Comprehensive Benchmark for Polish Language Understanding](https://www.aclweb.org/anthology/2020.acl-main.111.pdf) by Piotr Rybak, Robert Mroczkowski, Janusz Tracz, Ireneusz Gawlik. +1. **[Hubert](https://huggingface.co/docs/transformers/model_doc/hubert)** (from Facebook) released with the paper [HuBERT: Self-Supervised Speech Representation Learning by Masked Prediction of Hidden Units](https://arxiv.org/abs/2106.07447) by Wei-Ning Hsu, Benjamin Bolte, Yao-Hung Hubert Tsai, Kushal Lakhotia, Ruslan Salakhutdinov, Abdelrahman Mohamed. +1. **[I-BERT](https://huggingface.co/docs/transformers/model_doc/ibert)** (from Berkeley) released with the paper [I-BERT: Integer-only BERT Quantization](https://arxiv.org/abs/2101.01321) by Sehoon Kim, Amir Gholami, Zhewei Yao, Michael W. Mahoney, Kurt Keutzer. +1. **[IDEFICS](https://huggingface.co/docs/transformers/model_doc/idefics)** (from HuggingFace) released with the paper [OBELICS: An Open Web-Scale Filtered Dataset of Interleaved Image-Text Documents](https://huggingface.co/papers/2306.16527) by Hugo Laurençon, Lucile Saulnier, Léo Tronchon, Stas Bekman, Amanpreet Singh, Anton Lozhkov, Thomas Wang, Siddharth Karamcheti, Alexander M. Rush, Douwe Kiela, Matthieu Cord, Victor Sanh. +1. **[Idefics2](https://huggingface.co/docs/transformers/model_doc/idefics2)** (from Hugging Face) released with the blog [IDEFICS2](https://huggingface.co/blog/idefics2) by Léo Tronchon, Hugo Laurencon, Victor Sanh. +1. **[ImageGPT](https://huggingface.co/docs/transformers/model_doc/imagegpt)** (from OpenAI) released with the paper [Generative Pretraining from Pixels](https://openai.com/blog/image-gpt/) by Mark Chen, Alec Radford, Rewon Child, Jeffrey Wu, Heewoo Jun, David Luan, Ilya Sutskever. +1. **[Informer](https://huggingface.co/docs/transformers/model_doc/informer)** (from Beihang University, UC Berkeley, Rutgers University, SEDD Company) released with the paper [Informer: Beyond Efficient Transformer for Long Sequence Time-Series Forecasting](https://arxiv.org/abs/2012.07436) by Haoyi Zhou, Shanghang Zhang, Jieqi Peng, Shuai Zhang, Jianxin Li, Hui Xiong, and Wancai Zhang. +1. **[InstructBLIP](https://huggingface.co/docs/transformers/model_doc/instructblip)** (from Salesforce) released with the paper [InstructBLIP: Towards General-purpose Vision-Language Models with Instruction Tuning](https://arxiv.org/abs/2305.06500) by Wenliang Dai, Junnan Li, Dongxu Li, Anthony Meng Huat Tiong, Junqi Zhao, Weisheng Wang, Boyang Li, Pascale Fung, Steven Hoi. +1. **[Jamba](https://huggingface.co/docs/transformers/model_doc/jamba)** (from AI21 Labs Ltd.) released with the paper [Jamba: A Hybrid Transformer-Mamba Language Model](https://arxiv.org/abs/2403.19887) by Opher Lieber, Barak Lenz, Hofit Bata, Gal Cohen, Jhonathan Osin, Itay Dalmedigos, Erez Safahi, Shaked Meirom, Yonatan Belinkov, Shai Shalev-Shwartz, Omri Abend, Raz Alon, Tomer Asida, Amir Bergman, Roman Glozman, Michael Gokhman, Avshalom Manevich, Nir Ratner, Noam Rozen, Erez Shwartz, Mor Zusman, Yoav Shoham. +1. **[Jukebox](https://huggingface.co/docs/transformers/model_doc/jukebox)** (from OpenAI) released with the paper [Jukebox: A Generative Model for Music](https://arxiv.org/pdf/2005.00341.pdf) by Prafulla Dhariwal, Heewoo Jun, Christine Payne, Jong Wook Kim, Alec Radford, Ilya Sutskever. +1. **[KOSMOS-2](https://huggingface.co/docs/transformers/model_doc/kosmos-2)** (from Microsoft Research Asia) released with the paper [Kosmos-2: Grounding Multimodal Large Language Models to the World](https://arxiv.org/abs/2306.14824) by Zhiliang Peng, Wenhui Wang, Li Dong, Yaru Hao, Shaohan Huang, Shuming Ma, Furu Wei. +1. **[LayoutLM](https://huggingface.co/docs/transformers/model_doc/layoutlm)** (from Microsoft Research Asia) released with the paper [LayoutLM: Pre-training of Text and Layout for Document Image Understanding](https://arxiv.org/abs/1912.13318) by Yiheng Xu, Minghao Li, Lei Cui, Shaohan Huang, Furu Wei, Ming Zhou. +1. **[LayoutLMv2](https://huggingface.co/docs/transformers/model_doc/layoutlmv2)** (from Microsoft Research Asia) released with the paper [LayoutLMv2: Multi-modal Pre-training for Visually-Rich Document Understanding](https://arxiv.org/abs/2012.14740) by Yang Xu, Yiheng Xu, Tengchao Lv, Lei Cui, Furu Wei, Guoxin Wang, Yijuan Lu, Dinei Florencio, Cha Zhang, Wanxiang Che, Min Zhang, Lidong Zhou. +1. **[LayoutLMv3](https://huggingface.co/docs/transformers/model_doc/layoutlmv3)** (from Microsoft Research Asia) released with the paper [LayoutLMv3: Pre-training for Document AI with Unified Text and Image Masking](https://arxiv.org/abs/2204.08387) by Yupan Huang, Tengchao Lv, Lei Cui, Yutong Lu, Furu Wei. +1. **[LayoutXLM](https://huggingface.co/docs/transformers/model_doc/layoutxlm)** (from Microsoft Research Asia) released with the paper [LayoutXLM: Multimodal Pre-training for Multilingual Visually-rich Document Understanding](https://arxiv.org/abs/2104.08836) by Yiheng Xu, Tengchao Lv, Lei Cui, Guoxin Wang, Yijuan Lu, Dinei Florencio, Cha Zhang, Furu Wei. +1. **[LED](https://huggingface.co/docs/transformers/model_doc/led)** (from AllenAI) released with the paper [Longformer: The Long-Document Transformer](https://arxiv.org/abs/2004.05150) by Iz Beltagy, Matthew E. Peters, Arman Cohan. +1. **[LeViT](https://huggingface.co/docs/transformers/model_doc/levit)** (from Meta AI) released with the paper [LeViT: A Vision Transformer in ConvNet's Clothing for Faster Inference](https://arxiv.org/abs/2104.01136) by Ben Graham, Alaaeldin El-Nouby, Hugo Touvron, Pierre Stock, Armand Joulin, Hervé Jégou, Matthijs Douze. +1. **[LiLT](https://huggingface.co/docs/transformers/model_doc/lilt)** (from South China University of Technology) released with the paper [LiLT: A Simple yet Effective Language-Independent Layout Transformer for Structured Document Understanding](https://arxiv.org/abs/2202.13669) by Jiapeng Wang, Lianwen Jin, Kai Ding. +1. **[LLaMA](https://huggingface.co/docs/transformers/model_doc/llama)** (from The FAIR team of Meta AI) released with the paper [LLaMA: Open and Efficient Foundation Language Models](https://arxiv.org/abs/2302.13971) by Hugo Touvron, Thibaut Lavril, Gautier Izacard, Xavier Martinet, Marie-Anne Lachaux, Timothée Lacroix, Baptiste Rozière, Naman Goyal, Eric Hambro, Faisal Azhar, Aurelien Rodriguez, Armand Joulin, Edouard Grave, Guillaume Lample. +1. **[Llama2](https://huggingface.co/docs/transformers/model_doc/llama2)** (from The FAIR team of Meta AI) released with the paper [Llama2: Open Foundation and Fine-Tuned Chat Models](https://ai.meta.com/research/publications/llama-2-open-foundation-and-fine-tuned-chat-models/) by Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushka rMishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing EllenTan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu, Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom. +1. **[LLaVa](https://huggingface.co/docs/transformers/model_doc/llava)** (from Microsoft Research & University of Wisconsin-Madison) released with the paper [Visual Instruction Tuning](https://arxiv.org/abs/2304.08485) by Haotian Liu, Chunyuan Li, Yuheng Li and Yong Jae Lee. +1. **[LLaVA-NeXT](https://huggingface.co/docs/transformers/model_doc/llava_next)** (from Microsoft Research & University of Wisconsin-Madison) released with the paper [Improved Baselines with Visual Instruction Tuning](https://arxiv.org/abs/2310.03744) by Haotian Liu, Chunyuan Li, Yuheng Li and Yong Jae Lee. +1. **[Longformer](https://huggingface.co/docs/transformers/model_doc/longformer)** (from AllenAI) released with the paper [Longformer: The Long-Document Transformer](https://arxiv.org/abs/2004.05150) by Iz Beltagy, Matthew E. Peters, Arman Cohan. +1. **[LongT5](https://huggingface.co/docs/transformers/model_doc/longt5)** (from Google AI) released with the paper [LongT5: Efficient Text-To-Text Transformer for Long Sequences](https://arxiv.org/abs/2112.07916) by Mandy Guo, Joshua Ainslie, David Uthus, Santiago Ontanon, Jianmo Ni, Yun-Hsuan Sung, Yinfei Yang. +1. **[LUKE](https://huggingface.co/docs/transformers/model_doc/luke)** (from Studio Ousia) released with the paper [LUKE: Deep Contextualized Entity Representations with Entity-aware Self-attention](https://arxiv.org/abs/2010.01057) by Ikuya Yamada, Akari Asai, Hiroyuki Shindo, Hideaki Takeda, Yuji Matsumoto. +1. **[LXMERT](https://huggingface.co/docs/transformers/model_doc/lxmert)** (from UNC Chapel Hill) released with the paper [LXMERT: Learning Cross-Modality Encoder Representations from Transformers for Open-Domain Question Answering](https://arxiv.org/abs/1908.07490) by Hao Tan and Mohit Bansal. +1. **[M-CTC-T](https://huggingface.co/docs/transformers/model_doc/mctct)** (from Facebook) released with the paper [Pseudo-Labeling For Massively Multilingual Speech Recognition](https://arxiv.org/abs/2111.00161) by Loren Lugosch, Tatiana Likhomanenko, Gabriel Synnaeve, and Ronan Collobert. +1. **[M2M100](https://huggingface.co/docs/transformers/model_doc/m2m_100)** (from Facebook) released with the paper [Beyond English-Centric Multilingual Machine Translation](https://arxiv.org/abs/2010.11125) by Angela Fan, Shruti Bhosale, Holger Schwenk, Zhiyi Ma, Ahmed El-Kishky, Siddharth Goyal, Mandeep Baines, Onur Celebi, Guillaume Wenzek, Vishrav Chaudhary, Naman Goyal, Tom Birch, Vitaliy Liptchinsky, Sergey Edunov, Edouard Grave, Michael Auli, Armand Joulin. +1. **[MADLAD-400](https://huggingface.co/docs/transformers/model_doc/madlad-400)** (from Google) released with the paper [MADLAD-400: A Multilingual And Document-Level Large Audited Dataset](https://arxiv.org/abs/2309.04662) by Sneha Kudugunta, Isaac Caswell, Biao Zhang, Xavier Garcia, Christopher A. Choquette-Choo, Katherine Lee, Derrick Xin, Aditya Kusupati, Romi Stella, Ankur Bapna, Orhan Firat. +1. **[Mamba](https://huggingface.co/docs/transformers/model_doc/mamba)** (from Albert Gu and Tri Dao) released with the paper [Mamba: Linear-Time Sequence Modeling with Selective State Spaces](https://arxiv.org/abs/2312.00752) by Albert Gu and Tri Dao. +1. **[MarianMT](https://huggingface.co/docs/transformers/model_doc/marian)** Machine translation models trained using [OPUS](http://opus.nlpl.eu/) data by Jörg Tiedemann. The [Marian Framework](https://marian-nmt.github.io/) is being developed by the Microsoft Translator Team. +1. **[MarkupLM](https://huggingface.co/docs/transformers/model_doc/markuplm)** (from Microsoft Research Asia) released with the paper [MarkupLM: Pre-training of Text and Markup Language for Visually-rich Document Understanding](https://arxiv.org/abs/2110.08518) by Junlong Li, Yiheng Xu, Lei Cui, Furu Wei. +1. **[Mask2Former](https://huggingface.co/docs/transformers/model_doc/mask2former)** (from FAIR and UIUC) released with the paper [Masked-attention Mask Transformer for Universal Image Segmentation](https://arxiv.org/abs/2112.01527) by Bowen Cheng, Ishan Misra, Alexander G. Schwing, Alexander Kirillov, Rohit Girdhar. +1. **[MaskFormer](https://huggingface.co/docs/transformers/model_doc/maskformer)** (from Meta and UIUC) released with the paper [Per-Pixel Classification is Not All You Need for Semantic Segmentation](https://arxiv.org/abs/2107.06278) by Bowen Cheng, Alexander G. Schwing, Alexander Kirillov. +1. **[MatCha](https://huggingface.co/docs/transformers/model_doc/matcha)** (from Google AI) released with the paper [MatCha: Enhancing Visual Language Pretraining with Math Reasoning and Chart Derendering](https://arxiv.org/abs/2212.09662) by Fangyu Liu, Francesco Piccinno, Syrine Krichene, Chenxi Pang, Kenton Lee, Mandar Joshi, Yasemin Altun, Nigel Collier, Julian Martin Eisenschlos. +1. **[mBART](https://huggingface.co/docs/transformers/model_doc/mbart)** (from Facebook) released with the paper [Multilingual Denoising Pre-training for Neural Machine Translation](https://arxiv.org/abs/2001.08210) by Yinhan Liu, Jiatao Gu, Naman Goyal, Xian Li, Sergey Edunov, Marjan Ghazvininejad, Mike Lewis, Luke Zettlemoyer. +1. **[mBART-50](https://huggingface.co/docs/transformers/model_doc/mbart)** (from Facebook) released with the paper [Multilingual Translation with Extensible Multilingual Pretraining and Finetuning](https://arxiv.org/abs/2008.00401) by Yuqing Tang, Chau Tran, Xian Li, Peng-Jen Chen, Naman Goyal, Vishrav Chaudhary, Jiatao Gu, Angela Fan. +1. **[MEGA](https://huggingface.co/docs/transformers/model_doc/mega)** (from Meta/USC/CMU/SJTU) released with the paper [Mega: Moving Average Equipped Gated Attention](https://arxiv.org/abs/2209.10655) by Xuezhe Ma, Chunting Zhou, Xiang Kong, Junxian He, Liangke Gui, Graham Neubig, Jonathan May, and Luke Zettlemoyer. +1. **[Megatron-BERT](https://huggingface.co/docs/transformers/model_doc/megatron-bert)** (from NVIDIA) released with the paper [Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism](https://arxiv.org/abs/1909.08053) by Mohammad Shoeybi, Mostofa Patwary, Raul Puri, Patrick LeGresley, Jared Casper and Bryan Catanzaro. +1. **[Megatron-GPT2](https://huggingface.co/docs/transformers/model_doc/megatron_gpt2)** (from NVIDIA) released with the paper [Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism](https://arxiv.org/abs/1909.08053) by Mohammad Shoeybi, Mostofa Patwary, Raul Puri, Patrick LeGresley, Jared Casper and Bryan Catanzaro. +1. **[MGP-STR](https://huggingface.co/docs/transformers/model_doc/mgp-str)** (from Alibaba Research) released with the paper [Multi-Granularity Prediction for Scene Text Recognition](https://arxiv.org/abs/2209.03592) by Peng Wang, Cheng Da, and Cong Yao. +1. **[Mistral](https://huggingface.co/docs/transformers/model_doc/mistral)** (from Mistral AI) by The [Mistral AI](https://mistral.ai) team: Albert Jiang, Alexandre Sablayrolles, Arthur Mensch, Chris Bamford, Devendra Singh Chaplot, Diego de las Casas, Florian Bressand, Gianna Lengyel, Guillaume Lample, Lélio Renard Lavaud, Lucile Saulnier, Marie-Anne Lachaux, Pierre Stock, Teven Le Scao, Thibaut Lavril, Thomas Wang, Timothée Lacroix, William El Sayed. +1. **[Mixtral](https://huggingface.co/docs/transformers/model_doc/mixtral)** (from Mistral AI) by The [Mistral AI](https://mistral.ai) team: Albert Jiang, Alexandre Sablayrolles, Arthur Mensch, Chris Bamford, Devendra Singh Chaplot, Diego de las Casas, Florian Bressand, Gianna Lengyel, Guillaume Lample, Lélio Renard Lavaud, Lucile Saulnier, Marie-Anne Lachaux, Pierre Stock, Teven Le Scao, Thibaut Lavril, Thomas Wang, Timothée Lacroix, William El Sayed. +1. **[mLUKE](https://huggingface.co/docs/transformers/model_doc/mluke)** (from Studio Ousia) released with the paper [mLUKE: The Power of Entity Representations in Multilingual Pretrained Language Models](https://arxiv.org/abs/2110.08151) by Ryokan Ri, Ikuya Yamada, and Yoshimasa Tsuruoka. +1. **[MMS](https://huggingface.co/docs/transformers/model_doc/mms)** (from Facebook) released with the paper [Scaling Speech Technology to 1,000+ Languages](https://arxiv.org/abs/2305.13516) by Vineel Pratap, Andros Tjandra, Bowen Shi, Paden Tomasello, Arun Babu, Sayani Kundu, Ali Elkahky, Zhaoheng Ni, Apoorv Vyas, Maryam Fazel-Zarandi, Alexei Baevski, Yossi Adi, Xiaohui Zhang, Wei-Ning Hsu, Alexis Conneau, Michael Auli. +1. **[MobileBERT](https://huggingface.co/docs/transformers/model_doc/mobilebert)** (from CMU/Google Brain) released with the paper [MobileBERT: a Compact Task-Agnostic BERT for Resource-Limited Devices](https://arxiv.org/abs/2004.02984) by Zhiqing Sun, Hongkun Yu, Xiaodan Song, Renjie Liu, Yiming Yang, and Denny Zhou. +1. **[MobileNetV1](https://huggingface.co/docs/transformers/model_doc/mobilenet_v1)** (from Google Inc.) released with the paper [MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications](https://arxiv.org/abs/1704.04861) by Andrew G. Howard, Menglong Zhu, Bo Chen, Dmitry Kalenichenko, Weijun Wang, Tobias Weyand, Marco Andreetto, Hartwig Adam. +1. **[MobileNetV2](https://huggingface.co/docs/transformers/model_doc/mobilenet_v2)** (from Google Inc.) released with the paper [MobileNetV2: Inverted Residuals and Linear Bottlenecks](https://arxiv.org/abs/1801.04381) by Mark Sandler, Andrew Howard, Menglong Zhu, Andrey Zhmoginov, Liang-Chieh Chen. +1. **[MobileViT](https://huggingface.co/docs/transformers/model_doc/mobilevit)** (from Apple) released with the paper [MobileViT: Light-weight, General-purpose, and Mobile-friendly Vision Transformer](https://arxiv.org/abs/2110.02178) by Sachin Mehta and Mohammad Rastegari. +1. **[MobileViTV2](https://huggingface.co/docs/transformers/model_doc/mobilevitv2)** (from Apple) released with the paper [Separable Self-attention for Mobile Vision Transformers](https://arxiv.org/abs/2206.02680) by Sachin Mehta and Mohammad Rastegari. +1. **[MPNet](https://huggingface.co/docs/transformers/model_doc/mpnet)** (from Microsoft Research) released with the paper [MPNet: Masked and Permuted Pre-training for Language Understanding](https://arxiv.org/abs/2004.09297) by Kaitao Song, Xu Tan, Tao Qin, Jianfeng Lu, Tie-Yan Liu. +1. **[MPT](https://huggingface.co/docs/transformers/model_doc/mpt)** (from MosaiML) released with the repository [llm-foundry](https://github.com/mosaicml/llm-foundry/) by the MosaicML NLP Team. +1. **[MRA](https://huggingface.co/docs/transformers/model_doc/mra)** (from the University of Wisconsin - Madison) released with the paper [Multi Resolution Analysis (MRA) for Approximate Self-Attention](https://arxiv.org/abs/2207.10284) by Zhanpeng Zeng, Sourav Pal, Jeffery Kline, Glenn M Fung, Vikas Singh. +1. **[MT5](https://huggingface.co/docs/transformers/model_doc/mt5)** (from Google AI) released with the paper [mT5: A massively multilingual pre-trained text-to-text transformer](https://arxiv.org/abs/2010.11934) by Linting Xue, Noah Constant, Adam Roberts, Mihir Kale, Rami Al-Rfou, Aditya Siddhant, Aditya Barua, Colin Raffel. +1. **[MusicGen](https://huggingface.co/docs/transformers/model_doc/musicgen)** (from Meta) released with the paper [Simple and Controllable Music Generation](https://arxiv.org/abs/2306.05284) by Jade Copet, Felix Kreuk, Itai Gat, Tal Remez, David Kant, Gabriel Synnaeve, Yossi Adi and Alexandre Défossez. +1. **[MusicGen Melody](https://huggingface.co/docs/transformers/model_doc/musicgen_melody)** (from Meta) released with the paper [Simple and Controllable Music Generation](https://arxiv.org/abs/2306.05284) by Jade Copet, Felix Kreuk, Itai Gat, Tal Remez, David Kant, Gabriel Synnaeve, Yossi Adi and Alexandre Défossez. +1. **[MVP](https://huggingface.co/docs/transformers/model_doc/mvp)** (from RUC AI Box) released with the paper [MVP: Multi-task Supervised Pre-training for Natural Language Generation](https://arxiv.org/abs/2206.12131) by Tianyi Tang, Junyi Li, Wayne Xin Zhao and Ji-Rong Wen. +1. **[NAT](https://huggingface.co/docs/transformers/model_doc/nat)** (from SHI Labs) released with the paper [Neighborhood Attention Transformer](https://arxiv.org/abs/2204.07143) by Ali Hassani, Steven Walton, Jiachen Li, Shen Li, and Humphrey Shi. +1. **[Nezha](https://huggingface.co/docs/transformers/model_doc/nezha)** (from Huawei Noah’s Ark Lab) released with the paper [NEZHA: Neural Contextualized Representation for Chinese Language Understanding](https://arxiv.org/abs/1909.00204) by Junqiu Wei, Xiaozhe Ren, Xiaoguang Li, Wenyong Huang, Yi Liao, Yasheng Wang, Jiashu Lin, Xin Jiang, Xiao Chen and Qun Liu. +1. **[NLLB](https://huggingface.co/docs/transformers/model_doc/nllb)** (from Meta) released with the paper [No Language Left Behind: Scaling Human-Centered Machine Translation](https://arxiv.org/abs/2207.04672) by the NLLB team. +1. **[NLLB-MOE](https://huggingface.co/docs/transformers/model_doc/nllb-moe)** (from Meta) released with the paper [No Language Left Behind: Scaling Human-Centered Machine Translation](https://arxiv.org/abs/2207.04672) by the NLLB team. +1. **[Nougat](https://huggingface.co/docs/transformers/model_doc/nougat)** (from Meta AI) released with the paper [Nougat: Neural Optical Understanding for Academic Documents](https://arxiv.org/abs/2308.13418) by Lukas Blecher, Guillem Cucurull, Thomas Scialom, Robert Stojnic. +1. **[Nyströmformer](https://huggingface.co/docs/transformers/model_doc/nystromformer)** (from the University of Wisconsin - Madison) released with the paper [Nyströmformer: A Nyström-Based Algorithm for Approximating Self-Attention](https://arxiv.org/abs/2102.03902) by Yunyang Xiong, Zhanpeng Zeng, Rudrasis Chakraborty, Mingxing Tan, Glenn Fung, Yin Li, Vikas Singh. +1. **[OLMo](https://huggingface.co/docs/transformers/model_doc/olmo)** (from AI2) released with the paper [OLMo: Accelerating the Science of Language Models](https://arxiv.org/abs/2402.00838) by Dirk Groeneveld, Iz Beltagy, Pete Walsh, Akshita Bhagia, Rodney Kinney, Oyvind Tafjord, Ananya Harsh Jha, Hamish Ivison, Ian Magnusson, Yizhong Wang, Shane Arora, David Atkinson, Russell Authur, Khyathi Raghavi Chandu, Arman Cohan, Jennifer Dumas, Yanai Elazar, Yuling Gu, Jack Hessel, Tushar Khot, William Merrill, Jacob Morrison, Niklas Muennighoff, Aakanksha Naik, Crystal Nam, Matthew E. Peters, Valentina Pyatkin, Abhilasha Ravichander, Dustin Schwenk, Saurabh Shah, Will Smith, Emma Strubell, Nishant Subramani, Mitchell Wortsman, Pradeep Dasigi, Nathan Lambert, Kyle Richardson, Luke Zettlemoyer, Jesse Dodge, Kyle Lo, Luca Soldaini, Noah A. Smith, Hannaneh Hajishirzi. +1. **[OneFormer](https://huggingface.co/docs/transformers/model_doc/oneformer)** (from SHI Labs) released with the paper [OneFormer: One Transformer to Rule Universal Image Segmentation](https://arxiv.org/abs/2211.06220) by Jitesh Jain, Jiachen Li, MangTik Chiu, Ali Hassani, Nikita Orlov, Humphrey Shi. +1. **[OpenLlama](https://huggingface.co/docs/transformers/model_doc/open-llama)** (from [s-JoL](https://huggingface.co/s-JoL)) released on GitHub (now removed). +1. **[OPT](https://huggingface.co/docs/transformers/master/model_doc/opt)** (from Meta AI) released with the paper [OPT: Open Pre-trained Transformer Language Models](https://arxiv.org/abs/2205.01068) by Susan Zhang, Stephen Roller, Naman Goyal, Mikel Artetxe, Moya Chen, Shuohui Chen et al. +1. **[OWL-ViT](https://huggingface.co/docs/transformers/model_doc/owlvit)** (from Google AI) released with the paper [Simple Open-Vocabulary Object Detection with Vision Transformers](https://arxiv.org/abs/2205.06230) by Matthias Minderer, Alexey Gritsenko, Austin Stone, Maxim Neumann, Dirk Weissenborn, Alexey Dosovitskiy, Aravindh Mahendran, Anurag Arnab, Mostafa Dehghani, Zhuoran Shen, Xiao Wang, Xiaohua Zhai, Thomas Kipf, and Neil Houlsby. +1. **[OWLv2](https://huggingface.co/docs/transformers/model_doc/owlv2)** (from Google AI) released with the paper [Scaling Open-Vocabulary Object Detection](https://arxiv.org/abs/2306.09683) by Matthias Minderer, Alexey Gritsenko, Neil Houlsby. +1. **[PatchTSMixer](https://huggingface.co/docs/transformers/model_doc/patchtsmixer)** (from IBM Research) released with the paper [TSMixer: Lightweight MLP-Mixer Model for Multivariate Time Series Forecasting](https://arxiv.org/pdf/2306.09364.pdf) by Vijay Ekambaram, Arindam Jati, Nam Nguyen, Phanwadee Sinthong, Jayant Kalagnanam. +1. **[PatchTST](https://huggingface.co/docs/transformers/model_doc/patchtst)** (from IBM) released with the paper [A Time Series is Worth 64 Words: Long-term Forecasting with Transformers](https://arxiv.org/abs/2211.14730) by Yuqi Nie, Nam H. Nguyen, Phanwadee Sinthong, Jayant Kalagnanam. +1. **[Pegasus](https://huggingface.co/docs/transformers/model_doc/pegasus)** (from Google) released with the paper [PEGASUS: Pre-training with Extracted Gap-sentences for Abstractive Summarization](https://arxiv.org/abs/1912.08777) by Jingqing Zhang, Yao Zhao, Mohammad Saleh and Peter J. Liu. +1. **[PEGASUS-X](https://huggingface.co/docs/transformers/model_doc/pegasus_x)** (from Google) released with the paper [Investigating Efficiently Extending Transformers for Long Input Summarization](https://arxiv.org/abs/2208.04347) by Jason Phang, Yao Zhao, and Peter J. Liu. +1. **[Perceiver IO](https://huggingface.co/docs/transformers/model_doc/perceiver)** (from Deepmind) released with the paper [Perceiver IO: A General Architecture for Structured Inputs & Outputs](https://arxiv.org/abs/2107.14795) by Andrew Jaegle, Sebastian Borgeaud, Jean-Baptiste Alayrac, Carl Doersch, Catalin Ionescu, David Ding, Skanda Koppula, Daniel Zoran, Andrew Brock, Evan Shelhamer, Olivier Hénaff, Matthew M. Botvinick, Andrew Zisserman, Oriol Vinyals, João Carreira. +1. **[Persimmon](https://huggingface.co/docs/transformers/model_doc/persimmon)** (from ADEPT) released in a [blog post](https://www.adept.ai/blog/persimmon-8b) by Erich Elsen, Augustus Odena, Maxwell Nye, Sağnak Taşırlar, Tri Dao, Curtis Hawthorne, Deepak Moparthi, Arushi Somani. +1. **[Phi](https://huggingface.co/docs/transformers/model_doc/phi)** (from Microsoft) released with the papers - [Textbooks Are All You Need](https://arxiv.org/abs/2306.11644) by Suriya Gunasekar, Yi Zhang, Jyoti Aneja, Caio César Teodoro Mendes, Allie Del Giorno, Sivakanth Gopi, Mojan Javaheripi, Piero Kauffmann, Gustavo de Rosa, Olli Saarikivi, Adil Salim, Shital Shah, Harkirat Singh Behl, Xin Wang, Sébastien Bubeck, Ronen Eldan, Adam Tauman Kalai, Yin Tat Lee and Yuanzhi Li, [Textbooks Are All You Need II: phi-1.5 technical report](https://arxiv.org/abs/2309.05463) by Yuanzhi Li, Sébastien Bubeck, Ronen Eldan, Allie Del Giorno, Suriya Gunasekar and Yin Tat Lee. +1. **[PhoBERT](https://huggingface.co/docs/transformers/model_doc/phobert)** (from VinAI Research) released with the paper [PhoBERT: Pre-trained language models for Vietnamese](https://www.aclweb.org/anthology/2020.findings-emnlp.92/) by Dat Quoc Nguyen and Anh Tuan Nguyen. +1. **[Pix2Struct](https://huggingface.co/docs/transformers/model_doc/pix2struct)** (from Google) released with the paper [Pix2Struct: Screenshot Parsing as Pretraining for Visual Language Understanding](https://arxiv.org/abs/2210.03347) by Kenton Lee, Mandar Joshi, Iulia Turc, Hexiang Hu, Fangyu Liu, Julian Eisenschlos, Urvashi Khandelwal, Peter Shaw, Ming-Wei Chang, Kristina Toutanova. +1. **[PLBart](https://huggingface.co/docs/transformers/model_doc/plbart)** (from UCLA NLP) released with the paper [Unified Pre-training for Program Understanding and Generation](https://arxiv.org/abs/2103.06333) by Wasi Uddin Ahmad, Saikat Chakraborty, Baishakhi Ray, Kai-Wei Chang. +1. **[PoolFormer](https://huggingface.co/docs/transformers/model_doc/poolformer)** (from Sea AI Labs) released with the paper [MetaFormer is Actually What You Need for Vision](https://arxiv.org/abs/2111.11418) by Yu, Weihao and Luo, Mi and Zhou, Pan and Si, Chenyang and Zhou, Yichen and Wang, Xinchao and Feng, Jiashi and Yan, Shuicheng. +1. **[Pop2Piano](https://huggingface.co/docs/transformers/model_doc/pop2piano)** released with the paper [Pop2Piano : Pop Audio-based Piano Cover Generation](https://arxiv.org/abs/2211.00895) by Jongho Choi and Kyogu Lee. +1. **[ProphetNet](https://huggingface.co/docs/transformers/model_doc/prophetnet)** (from Microsoft Research) released with the paper [ProphetNet: Predicting Future N-gram for Sequence-to-Sequence Pre-training](https://arxiv.org/abs/2001.04063) by Yu Yan, Weizhen Qi, Yeyun Gong, Dayiheng Liu, Nan Duan, Jiusheng Chen, Ruofei Zhang and Ming Zhou. +1. **[PVT](https://huggingface.co/docs/transformers/model_doc/pvt)** (from Nanjing University, The University of Hong Kong etc.) released with the paper [Pyramid Vision Transformer: A Versatile Backbone for Dense Prediction without Convolutions](https://arxiv.org/pdf/2102.12122.pdf) by Wenhai Wang, Enze Xie, Xiang Li, Deng-Ping Fan, Kaitao Song, Ding Liang, Tong Lu, Ping Luo, Ling Shao. +1. **[PVTv2](https://huggingface.co/docs/transformers/model_doc/pvt_v2)** (from Shanghai AI Laboratory, Nanjing University, The University of Hong Kong etc.) released with the paper [PVT v2: Improved Baselines with Pyramid Vision Transformer](https://arxiv.org/abs/2106.13797) by Wenhai Wang, Enze Xie, Xiang Li, Deng-Ping Fan, Kaitao Song, Ding Liang, Tong Lu, Ping Luo, Ling Shao. +1. **[QDQBert](https://huggingface.co/docs/transformers/model_doc/qdqbert)** (from NVIDIA) released with the paper [Integer Quantization for Deep Learning Inference: Principles and Empirical Evaluation](https://arxiv.org/abs/2004.09602) by Hao Wu, Patrick Judd, Xiaojie Zhang, Mikhail Isaev and Paulius Micikevicius. +1. **[Qwen2](https://huggingface.co/docs/transformers/model_doc/qwen2)** (from the Qwen team, Alibaba Group) released with the paper [Qwen Technical Report](https://arxiv.org/abs/2309.16609) by Jinze Bai, Shuai Bai, Yunfei Chu, Zeyu Cui, Kai Dang, Xiaodong Deng, Yang Fan, Wenbin Ge, Yu Han, Fei Huang, Binyuan Hui, Luo Ji, Mei Li, Junyang Lin, Runji Lin, Dayiheng Liu, Gao Liu, Chengqiang Lu, Keming Lu, Jianxin Ma, Rui Men, Xingzhang Ren, Xuancheng Ren, Chuanqi Tan, Sinan Tan, Jianhong Tu, Peng Wang, Shijie Wang, Wei Wang, Shengguang Wu, Benfeng Xu, Jin Xu, An Yang, Hao Yang, Jian Yang, Shusheng Yang, Yang Yao, Bowen Yu, Hongyi Yuan, Zheng Yuan, Jianwei Zhang, Xingxuan Zhang, Yichang Zhang, Zhenru Zhang, Chang Zhou, Jingren Zhou, Xiaohuan Zhou and Tianhang Zhu. +1. **[Qwen2MoE](https://huggingface.co/docs/transformers/model_doc/qwen2_moe)** (from the Qwen team, Alibaba Group) released with [blog post](https://qwenlm.github.io/blog/qwen-moe/) by Bo Zheng, Dayiheng Liu, Rui Men, Junyang Lin, Zhou San, Bowen Yu, An Yang, Mingfeng Xue, Fei Huang, Binyuan Hui, Mei Li, Tianyu Liu, Xingzhang Ren, Xuancheng Ren, Kexin Yang, Chang Zhou, Jingren Zhou. +1. **[RAG](https://huggingface.co/docs/transformers/model_doc/rag)** (from Facebook) released with the paper [Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks](https://arxiv.org/abs/2005.11401) by Patrick Lewis, Ethan Perez, Aleksandara Piktus, Fabio Petroni, Vladimir Karpukhin, Naman Goyal, Heinrich Küttler, Mike Lewis, Wen-tau Yih, Tim Rocktäschel, Sebastian Riedel, Douwe Kiela. +1. **[REALM](https://huggingface.co/docs/transformers/model_doc/realm.html)** (from Google Research) released with the paper [REALM: Retrieval-Augmented Language Model Pre-Training](https://arxiv.org/abs/2002.08909) by Kelvin Guu, Kenton Lee, Zora Tung, Panupong Pasupat and Ming-Wei Chang. +1. **[RecurrentGemma](https://huggingface.co/docs/transformers/model_doc/recurrent-gemma)** (from Google) released with the paper [RecurrentGemma: Moving Past Transformers for Efficient Open Language Models](https://storage.googleapis.com/deepmind-media/gemma/recurrentgemma-report.pdf) by the Griffin, RLHF and Gemma Teams. +1. **[Reformer](https://huggingface.co/docs/transformers/model_doc/reformer)** (from Google Research) released with the paper [Reformer: The Efficient Transformer](https://arxiv.org/abs/2001.04451) by Nikita Kitaev, Łukasz Kaiser, Anselm Levskaya. +1. **[RegNet](https://huggingface.co/docs/transformers/model_doc/regnet)** (from META Platforms) released with the paper [Designing Network Design Space](https://arxiv.org/abs/2003.13678) by Ilija Radosavovic, Raj Prateek Kosaraju, Ross Girshick, Kaiming He, Piotr Dollár. +1. **[RemBERT](https://huggingface.co/docs/transformers/model_doc/rembert)** (from Google Research) released with the paper [Rethinking embedding coupling in pre-trained language models](https://arxiv.org/abs/2010.12821) by Hyung Won Chung, Thibault Févry, Henry Tsai, M. Johnson, Sebastian Ruder. +1. **[ResNet](https://huggingface.co/docs/transformers/model_doc/resnet)** (from Microsoft Research) released with the paper [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) by Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. +1. **[RoBERTa](https://huggingface.co/docs/transformers/model_doc/roberta)** (from Facebook), released together with the paper [RoBERTa: A Robustly Optimized BERT Pretraining Approach](https://arxiv.org/abs/1907.11692) by Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer, Veselin Stoyanov. +1. **[RoBERTa-PreLayerNorm](https://huggingface.co/docs/transformers/model_doc/roberta-prelayernorm)** (from Facebook) released with the paper [fairseq: A Fast, Extensible Toolkit for Sequence Modeling](https://arxiv.org/abs/1904.01038) by Myle Ott, Sergey Edunov, Alexei Baevski, Angela Fan, Sam Gross, Nathan Ng, David Grangier, Michael Auli. +1. **[RoCBert](https://huggingface.co/docs/transformers/model_doc/roc_bert)** (from WeChatAI) released with the paper [RoCBert: Robust Chinese Bert with Multimodal Contrastive Pretraining](https://aclanthology.org/2022.acl-long.65.pdf) by HuiSu, WeiweiShi, XiaoyuShen, XiaoZhou, TuoJi, JiaruiFang, JieZhou. +1. **[RoFormer](https://huggingface.co/docs/transformers/model_doc/roformer)** (from ZhuiyiTechnology), released together with the paper [RoFormer: Enhanced Transformer with Rotary Position Embedding](https://arxiv.org/abs/2104.09864) by Jianlin Su and Yu Lu and Shengfeng Pan and Bo Wen and Yunfeng Liu. +1. **[RWKV](https://huggingface.co/docs/transformers/model_doc/rwkv)** (from Bo Peng), released on [this repo](https://github.com/BlinkDL/RWKV-LM) by Bo Peng. +1. **[SeamlessM4T](https://huggingface.co/docs/transformers/model_doc/seamless_m4t)** (from Meta AI) released with the paper [SeamlessM4T — Massively Multilingual & Multimodal Machine Translation](https://dl.fbaipublicfiles.com/seamless/seamless_m4t_paper.pdf) by the Seamless Communication team. +1. **[SeamlessM4Tv2](https://huggingface.co/docs/transformers/model_doc/seamless_m4t_v2)** (from Meta AI) released with the paper [Seamless: Multilingual Expressive and Streaming Speech Translation](https://ai.meta.com/research/publications/seamless-multilingual-expressive-and-streaming-speech-translation/) by the Seamless Communication team. +1. **[SegFormer](https://huggingface.co/docs/transformers/model_doc/segformer)** (from NVIDIA) released with the paper [SegFormer: Simple and Efficient Design for Semantic Segmentation with Transformers](https://arxiv.org/abs/2105.15203) by Enze Xie, Wenhai Wang, Zhiding Yu, Anima Anandkumar, Jose M. Alvarez, Ping Luo. +1. **[SegGPT](https://huggingface.co/docs/transformers/model_doc/seggpt)** (from Beijing Academy of Artificial Intelligence (BAAI)) released with the paper [SegGPT: Segmenting Everything In Context](https://arxiv.org/abs/2304.03284) by Xinlong Wang, Xiaosong Zhang, Yue Cao, Wen Wang, Chunhua Shen, Tiejun Huang. +1. **[Segment Anything](https://huggingface.co/docs/transformers/model_doc/sam)** (from Meta AI) released with the paper [Segment Anything](https://arxiv.org/pdf/2304.02643v1.pdf) by Alexander Kirillov, Eric Mintun, Nikhila Ravi, Hanzi Mao, Chloe Rolland, Laura Gustafson, Tete Xiao, Spencer Whitehead, Alex Berg, Wan-Yen Lo, Piotr Dollar, Ross Girshick. +1. **[SEW](https://huggingface.co/docs/transformers/model_doc/sew)** (from ASAPP) released with the paper [Performance-Efficiency Trade-offs in Unsupervised Pre-training for Speech Recognition](https://arxiv.org/abs/2109.06870) by Felix Wu, Kwangyoun Kim, Jing Pan, Kyu Han, Kilian Q. Weinberger, Yoav Artzi. +1. **[SEW-D](https://huggingface.co/docs/transformers/model_doc/sew_d)** (from ASAPP) released with the paper [Performance-Efficiency Trade-offs in Unsupervised Pre-training for Speech Recognition](https://arxiv.org/abs/2109.06870) by Felix Wu, Kwangyoun Kim, Jing Pan, Kyu Han, Kilian Q. Weinberger, Yoav Artzi. +1. **[SigLIP](https://huggingface.co/docs/transformers/model_doc/siglip)** (from Google AI) released with the paper [Sigmoid Loss for Language Image Pre-Training](https://arxiv.org/abs/2303.15343) by Xiaohua Zhai, Basil Mustafa, Alexander Kolesnikov, Lucas Beyer. +1. **[SpeechT5](https://huggingface.co/docs/transformers/model_doc/speecht5)** (from Microsoft Research) released with the paper [SpeechT5: Unified-Modal Encoder-Decoder Pre-Training for Spoken Language Processing](https://arxiv.org/abs/2110.07205) by Junyi Ao, Rui Wang, Long Zhou, Chengyi Wang, Shuo Ren, Yu Wu, Shujie Liu, Tom Ko, Qing Li, Yu Zhang, Zhihua Wei, Yao Qian, Jinyu Li, Furu Wei. +1. **[SpeechToTextTransformer](https://huggingface.co/docs/transformers/model_doc/speech_to_text)** (from Facebook), released together with the paper [fairseq S2T: Fast Speech-to-Text Modeling with fairseq](https://arxiv.org/abs/2010.05171) by Changhan Wang, Yun Tang, Xutai Ma, Anne Wu, Dmytro Okhonko, Juan Pino. +1. **[SpeechToTextTransformer2](https://huggingface.co/docs/transformers/model_doc/speech_to_text_2)** (from Facebook), released together with the paper [Large-Scale Self- and Semi-Supervised Learning for Speech Translation](https://arxiv.org/abs/2104.06678) by Changhan Wang, Anne Wu, Juan Pino, Alexei Baevski, Michael Auli, Alexis Conneau. +1. **[Splinter](https://huggingface.co/docs/transformers/model_doc/splinter)** (from Tel Aviv University), released together with the paper [Few-Shot Question Answering by Pretraining Span Selection](https://arxiv.org/abs/2101.00438) by Ori Ram, Yuval Kirstain, Jonathan Berant, Amir Globerson, Omer Levy. +1. **[SqueezeBERT](https://huggingface.co/docs/transformers/model_doc/squeezebert)** (from Berkeley) released with the paper [SqueezeBERT: What can computer vision teach NLP about efficient neural networks?](https://arxiv.org/abs/2006.11316) by Forrest N. Iandola, Albert E. Shaw, Ravi Krishna, and Kurt W. Keutzer. +1. **[StableLm](https://huggingface.co/docs/transformers/model_doc/stablelm)** (from Stability AI) released with the paper [StableLM 3B 4E1T (Technical Report)](https://stability.wandb.io/stability-llm/stable-lm/reports/StableLM-3B-4E1T--VmlldzoyMjU4?accessToken=u3zujipenkx5g7rtcj9qojjgxpconyjktjkli2po09nffrffdhhchq045vp0wyfo) by Jonathan Tow, Marco Bellagente, Dakota Mahan, Carlos Riquelme Ruiz, Duy Phung, Maksym Zhuravinskyi, Nathan Cooper, Nikhil Pinnaparaju, Reshinth Adithyan, and James Baicoianu. +1. **[Starcoder2](https://huggingface.co/docs/transformers/model_doc/starcoder2)** (from BigCode team) released with the paper [StarCoder 2 and The Stack v2: The Next Generation](https://arxiv.org/abs/2402.19173) by Anton Lozhkov, Raymond Li, Loubna Ben Allal, Federico Cassano, Joel Lamy-Poirier, Nouamane Tazi, Ao Tang, Dmytro Pykhtar, Jiawei Liu, Yuxiang Wei, Tianyang Liu, Max Tian, Denis Kocetkov, Arthur Zucker, Younes Belkada, Zijian Wang, Qian Liu, Dmitry Abulkhanov, Indraneil Paul, Zhuang Li, Wen-Ding Li, Megan Risdal, Jia Li, Jian Zhu, Terry Yue Zhuo, Evgenii Zheltonozhskii, Nii Osae Osae Dade, Wenhao Yu, Lucas Krauß, Naman Jain, Yixuan Su, Xuanli He, Manan Dey, Edoardo Abati, Yekun Chai, Niklas Muennighoff, Xiangru Tang, Muhtasham Oblokulov, Christopher Akiki, Marc Marone, Chenghao Mou, Mayank Mishra, Alex Gu, Binyuan Hui, Tri Dao, Armel Zebaze, Olivier Dehaene, Nicolas Patry, Canwen Xu, Julian McAuley, Han Hu, Torsten Scholak, Sebastien Paquet, Jennifer Robinson, Carolyn Jane Anderson, Nicolas Chapados, Mostofa Patwary, Nima Tajbakhsh, Yacine Jernite, Carlos Muñoz Ferrandis, Lingming Zhang, Sean Hughes, Thomas Wolf, Arjun Guha, Leandro von Werra, and Harm de Vries. +1. **[SuperPoint](https://huggingface.co/docs/transformers/model_doc/superpoint)** (from MagicLeap) released with the paper [SuperPoint: Self-Supervised Interest Point Detection and Description](https://arxiv.org/abs/1712.07629) by Daniel DeTone, Tomasz Malisiewicz and Andrew Rabinovich. +1. **[SwiftFormer](https://huggingface.co/docs/transformers/model_doc/swiftformer)** (from MBZUAI) released with the paper [SwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications](https://arxiv.org/abs/2303.15446) by Abdelrahman Shaker, Muhammad Maaz, Hanoona Rasheed, Salman Khan, Ming-Hsuan Yang, Fahad Shahbaz Khan. +1. **[Swin Transformer](https://huggingface.co/docs/transformers/model_doc/swin)** (from Microsoft) released with the paper [Swin Transformer: Hierarchical Vision Transformer using Shifted Windows](https://arxiv.org/abs/2103.14030) by Ze Liu, Yutong Lin, Yue Cao, Han Hu, Yixuan Wei, Zheng Zhang, Stephen Lin, Baining Guo. +1. **[Swin Transformer V2](https://huggingface.co/docs/transformers/model_doc/swinv2)** (from Microsoft) released with the paper [Swin Transformer V2: Scaling Up Capacity and Resolution](https://arxiv.org/abs/2111.09883) by Ze Liu, Han Hu, Yutong Lin, Zhuliang Yao, Zhenda Xie, Yixuan Wei, Jia Ning, Yue Cao, Zheng Zhang, Li Dong, Furu Wei, Baining Guo. +1. **[Swin2SR](https://huggingface.co/docs/transformers/model_doc/swin2sr)** (from University of Würzburg) released with the paper [Swin2SR: SwinV2 Transformer for Compressed Image Super-Resolution and Restoration](https://arxiv.org/abs/2209.11345) by Marcos V. Conde, Ui-Jin Choi, Maxime Burchi, Radu Timofte. +1. **[SwitchTransformers](https://huggingface.co/docs/transformers/model_doc/switch_transformers)** (from Google) released with the paper [Switch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity](https://arxiv.org/abs/2101.03961) by William Fedus, Barret Zoph, Noam Shazeer. +1. **[T5](https://huggingface.co/docs/transformers/model_doc/t5)** (from Google AI) released with the paper [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/abs/1910.10683) by Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu. +1. **[T5v1.1](https://huggingface.co/docs/transformers/model_doc/t5v1.1)** (from Google AI) released in the repository [google-research/text-to-text-transfer-transformer](https://github.com/google-research/text-to-text-transfer-transformer/blob/main/released_checkpoints.md#t511) by Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu. +1. **[Table Transformer](https://huggingface.co/docs/transformers/model_doc/table-transformer)** (from Microsoft Research) released with the paper [PubTables-1M: Towards Comprehensive Table Extraction From Unstructured Documents](https://arxiv.org/abs/2110.00061) by Brandon Smock, Rohith Pesala, Robin Abraham. +1. **[TAPAS](https://huggingface.co/docs/transformers/model_doc/tapas)** (from Google AI) released with the paper [TAPAS: Weakly Supervised Table Parsing via Pre-training](https://arxiv.org/abs/2004.02349) by Jonathan Herzig, Paweł Krzysztof Nowak, Thomas Müller, Francesco Piccinno and Julian Martin Eisenschlos. +1. **[TAPEX](https://huggingface.co/docs/transformers/model_doc/tapex)** (from Microsoft Research) released with the paper [TAPEX: Table Pre-training via Learning a Neural SQL Executor](https://arxiv.org/abs/2107.07653) by Qian Liu, Bei Chen, Jiaqi Guo, Morteza Ziyadi, Zeqi Lin, Weizhu Chen, Jian-Guang Lou. +1. **[Time Series Transformer](https://huggingface.co/docs/transformers/model_doc/time_series_transformer)** (from HuggingFace). +1. **[TimeSformer](https://huggingface.co/docs/transformers/model_doc/timesformer)** (from Facebook) released with the paper [Is Space-Time Attention All You Need for Video Understanding?](https://arxiv.org/abs/2102.05095) by Gedas Bertasius, Heng Wang, Lorenzo Torresani. +1. **[Trajectory Transformer](https://huggingface.co/docs/transformers/model_doc/trajectory_transformers)** (from the University of California at Berkeley) released with the paper [Offline Reinforcement Learning as One Big Sequence Modeling Problem](https://arxiv.org/abs/2106.02039) by Michael Janner, Qiyang Li, Sergey Levine +1. **[Transformer-XL](https://huggingface.co/docs/transformers/model_doc/transfo-xl)** (from Google/CMU) released with the paper [Transformer-XL: Attentive Language Models Beyond a Fixed-Length Context](https://arxiv.org/abs/1901.02860) by Zihang Dai*, Zhilin Yang*, Yiming Yang, Jaime Carbonell, Quoc V. Le, Ruslan Salakhutdinov. +1. **[TrOCR](https://huggingface.co/docs/transformers/model_doc/trocr)** (from Microsoft), released together with the paper [TrOCR: Transformer-based Optical Character Recognition with Pre-trained Models](https://arxiv.org/abs/2109.10282) by Minghao Li, Tengchao Lv, Lei Cui, Yijuan Lu, Dinei Florencio, Cha Zhang, Zhoujun Li, Furu Wei. +1. **[TVLT](https://huggingface.co/docs/transformers/model_doc/tvlt)** (from UNC Chapel Hill) released with the paper [TVLT: Textless Vision-Language Transformer](https://arxiv.org/abs/2209.14156) by Zineng Tang, Jaemin Cho, Yixin Nie, Mohit Bansal. +1. **[TVP](https://huggingface.co/docs/transformers/model_doc/tvp)** (from Intel) released with the paper [Text-Visual Prompting for Efficient 2D Temporal Video Grounding](https://arxiv.org/abs/2303.04995) by Yimeng Zhang, Xin Chen, Jinghan Jia, Sijia Liu, Ke Ding. +1. **[UDOP](https://huggingface.co/docs/transformers/model_doc/udop)** (from Microsoft Research) released with the paper [Unifying Vision, Text, and Layout for Universal Document Processing](https://arxiv.org/abs/2212.02623) by Zineng Tang, Ziyi Yang, Guoxin Wang, Yuwei Fang, Yang Liu, Chenguang Zhu, Michael Zeng, Cha Zhang, Mohit Bansal. +1. **[UL2](https://huggingface.co/docs/transformers/model_doc/ul2)** (from Google Research) released with the paper [Unifying Language Learning Paradigms](https://arxiv.org/abs/2205.05131v1) by Yi Tay, Mostafa Dehghani, Vinh Q. Tran, Xavier Garcia, Dara Bahri, Tal Schuster, Huaixiu Steven Zheng, Neil Houlsby, Donald Metzler +1. **[UMT5](https://huggingface.co/docs/transformers/model_doc/umt5)** (from Google Research) released with the paper [UniMax: Fairer and More Effective Language Sampling for Large-Scale Multilingual Pretraining](https://openreview.net/forum?id=kXwdL1cWOAi) by Hyung Won Chung, Xavier Garcia, Adam Roberts, Yi Tay, Orhan Firat, Sharan Narang, Noah Constant. +1. **[UniSpeech](https://huggingface.co/docs/transformers/model_doc/unispeech)** (from Microsoft Research) released with the paper [UniSpeech: Unified Speech Representation Learning with Labeled and Unlabeled Data](https://arxiv.org/abs/2101.07597) by Chengyi Wang, Yu Wu, Yao Qian, Kenichi Kumatani, Shujie Liu, Furu Wei, Michael Zeng, Xuedong Huang. +1. **[UniSpeechSat](https://huggingface.co/docs/transformers/model_doc/unispeech-sat)** (from Microsoft Research) released with the paper [UNISPEECH-SAT: UNIVERSAL SPEECH REPRESENTATION LEARNING WITH SPEAKER AWARE PRE-TRAINING](https://arxiv.org/abs/2110.05752) by Sanyuan Chen, Yu Wu, Chengyi Wang, Zhengyang Chen, Zhuo Chen, Shujie Liu, Jian Wu, Yao Qian, Furu Wei, Jinyu Li, Xiangzhan Yu. +1. **[UnivNet](https://huggingface.co/docs/transformers/model_doc/univnet)** (from Kakao Corporation) released with the paper [UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation](https://arxiv.org/abs/2106.07889) by Won Jang, Dan Lim, Jaesam Yoon, Bongwan Kim, and Juntae Kim. +1. **[UPerNet](https://huggingface.co/docs/transformers/model_doc/upernet)** (from Peking University) released with the paper [Unified Perceptual Parsing for Scene Understanding](https://arxiv.org/abs/1807.10221) by Tete Xiao, Yingcheng Liu, Bolei Zhou, Yuning Jiang, Jian Sun. +1. **[VAN](https://huggingface.co/docs/transformers/model_doc/van)** (from Tsinghua University and Nankai University) released with the paper [Visual Attention Network](https://arxiv.org/abs/2202.09741) by Meng-Hao Guo, Cheng-Ze Lu, Zheng-Ning Liu, Ming-Ming Cheng, Shi-Min Hu. +1. **[VideoMAE](https://huggingface.co/docs/transformers/model_doc/videomae)** (from Multimedia Computing Group, Nanjing University) released with the paper [VideoMAE: Masked Autoencoders are Data-Efficient Learners for Self-Supervised Video Pre-Training](https://arxiv.org/abs/2203.12602) by Zhan Tong, Yibing Song, Jue Wang, Limin Wang. +1. **[ViLT](https://huggingface.co/docs/transformers/model_doc/vilt)** (from NAVER AI Lab/Kakao Enterprise/Kakao Brain) released with the paper [ViLT: Vision-and-Language Transformer Without Convolution or Region Supervision](https://arxiv.org/abs/2102.03334) by Wonjae Kim, Bokyung Son, Ildoo Kim. +1. **[VipLlava](https://huggingface.co/docs/transformers/model_doc/vipllava)** (from University of Wisconsin–Madison) released with the paper [Making Large Multimodal Models Understand Arbitrary Visual Prompts](https://arxiv.org/abs/2312.00784) by Mu Cai, Haotian Liu, Siva Karthik Mustikovela, Gregory P. Meyer, Yuning Chai, Dennis Park, Yong Jae Lee. +1. **[Vision Transformer (ViT)](https://huggingface.co/docs/transformers/model_doc/vit)** (from Google AI) released with the paper [An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale](https://arxiv.org/abs/2010.11929) by Alexey Dosovitskiy, Lucas Beyer, Alexander Kolesnikov, Dirk Weissenborn, Xiaohua Zhai, Thomas Unterthiner, Mostafa Dehghani, Matthias Minderer, Georg Heigold, Sylvain Gelly, Jakob Uszkoreit, Neil Houlsby. +1. **[VisualBERT](https://huggingface.co/docs/transformers/model_doc/visual_bert)** (from UCLA NLP) released with the paper [VisualBERT: A Simple and Performant Baseline for Vision and Language](https://arxiv.org/pdf/1908.03557) by Liunian Harold Li, Mark Yatskar, Da Yin, Cho-Jui Hsieh, Kai-Wei Chang. +1. **[ViT Hybrid](https://huggingface.co/docs/transformers/model_doc/vit_hybrid)** (from Google AI) released with the paper [An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale](https://arxiv.org/abs/2010.11929) by Alexey Dosovitskiy, Lucas Beyer, Alexander Kolesnikov, Dirk Weissenborn, Xiaohua Zhai, Thomas Unterthiner, Mostafa Dehghani, Matthias Minderer, Georg Heigold, Sylvain Gelly, Jakob Uszkoreit, Neil Houlsby. +1. **[VitDet](https://huggingface.co/docs/transformers/model_doc/vitdet)** (from Meta AI) released with the paper [Exploring Plain Vision Transformer Backbones for Object Detection](https://arxiv.org/abs/2203.16527) by Yanghao Li, Hanzi Mao, Ross Girshick, Kaiming He. +1. **[ViTMAE](https://huggingface.co/docs/transformers/model_doc/vit_mae)** (from Meta AI) released with the paper [Masked Autoencoders Are Scalable Vision Learners](https://arxiv.org/abs/2111.06377) by Kaiming He, Xinlei Chen, Saining Xie, Yanghao Li, Piotr Dollár, Ross Girshick. +1. **[ViTMatte](https://huggingface.co/docs/transformers/model_doc/vitmatte)** (from HUST-VL) released with the paper [ViTMatte: Boosting Image Matting with Pretrained Plain Vision Transformers](https://arxiv.org/abs/2305.15272) by Jingfeng Yao, Xinggang Wang, Shusheng Yang, Baoyuan Wang. +1. **[ViTMSN](https://huggingface.co/docs/transformers/model_doc/vit_msn)** (from Meta AI) released with the paper [Masked Siamese Networks for Label-Efficient Learning](https://arxiv.org/abs/2204.07141) by Mahmoud Assran, Mathilde Caron, Ishan Misra, Piotr Bojanowski, Florian Bordes, Pascal Vincent, Armand Joulin, Michael Rabbat, Nicolas Ballas. +1. **[VITS](https://huggingface.co/docs/transformers/model_doc/vits)** (from Kakao Enterprise) released with the paper [Conditional Variational Autoencoder with Adversarial Learning for End-to-End Text-to-Speech](https://arxiv.org/abs/2106.06103) by Jaehyeon Kim, Jungil Kong, Juhee Son. +1. **[ViViT](https://huggingface.co/docs/transformers/model_doc/vivit)** (from Google Research) released with the paper [ViViT: A Video Vision Transformer](https://arxiv.org/abs/2103.15691) by Anurag Arnab, Mostafa Dehghani, Georg Heigold, Chen Sun, Mario Lučić, Cordelia Schmid. +1. **[Wav2Vec2](https://huggingface.co/docs/transformers/model_doc/wav2vec2)** (from Facebook AI) released with the paper [wav2vec 2.0: A Framework for Self-Supervised Learning of Speech Representations](https://arxiv.org/abs/2006.11477) by Alexei Baevski, Henry Zhou, Abdelrahman Mohamed, Michael Auli. +1. **[Wav2Vec2-BERT](https://huggingface.co/docs/transformers/model_doc/wav2vec2-bert)** (from Meta AI) released with the paper [Seamless: Multilingual Expressive and Streaming Speech Translation](https://ai.meta.com/research/publications/seamless-multilingual-expressive-and-streaming-speech-translation/) by the Seamless Communication team. +1. **[Wav2Vec2-Conformer](https://huggingface.co/docs/transformers/model_doc/wav2vec2-conformer)** (from Facebook AI) released with the paper [FAIRSEQ S2T: Fast Speech-to-Text Modeling with FAIRSEQ](https://arxiv.org/abs/2010.05171) by Changhan Wang, Yun Tang, Xutai Ma, Anne Wu, Sravya Popuri, Dmytro Okhonko, Juan Pino. +1. **[Wav2Vec2Phoneme](https://huggingface.co/docs/transformers/model_doc/wav2vec2_phoneme)** (from Facebook AI) released with the paper [Simple and Effective Zero-shot Cross-lingual Phoneme Recognition](https://arxiv.org/abs/2109.11680) by Qiantong Xu, Alexei Baevski, Michael Auli. +1. **[WavLM](https://huggingface.co/docs/transformers/model_doc/wavlm)** (from Microsoft Research) released with the paper [WavLM: Large-Scale Self-Supervised Pre-Training for Full Stack Speech Processing](https://arxiv.org/abs/2110.13900) by Sanyuan Chen, Chengyi Wang, Zhengyang Chen, Yu Wu, Shujie Liu, Zhuo Chen, Jinyu Li, Naoyuki Kanda, Takuya Yoshioka, Xiong Xiao, Jian Wu, Long Zhou, Shuo Ren, Yanmin Qian, Yao Qian, Jian Wu, Michael Zeng, Furu Wei. +1. **[Whisper](https://huggingface.co/docs/transformers/model_doc/whisper)** (from OpenAI) released with the paper [Robust Speech Recognition via Large-Scale Weak Supervision](https://cdn.openai.com/papers/whisper.pdf) by Alec Radford, Jong Wook Kim, Tao Xu, Greg Brockman, Christine McLeavey, Ilya Sutskever. +1. **[X-CLIP](https://huggingface.co/docs/transformers/model_doc/xclip)** (from Microsoft Research) released with the paper [Expanding Language-Image Pretrained Models for General Video Recognition](https://arxiv.org/abs/2208.02816) by Bolin Ni, Houwen Peng, Minghao Chen, Songyang Zhang, Gaofeng Meng, Jianlong Fu, Shiming Xiang, Haibin Ling. +1. **[X-MOD](https://huggingface.co/docs/transformers/model_doc/xmod)** (from Meta AI) released with the paper [Lifting the Curse of Multilinguality by Pre-training Modular Transformers](http://dx.doi.org/10.18653/v1/2022.naacl-main.255) by Jonas Pfeiffer, Naman Goyal, Xi Lin, Xian Li, James Cross, Sebastian Riedel, Mikel Artetxe. +1. **[XGLM](https://huggingface.co/docs/transformers/model_doc/xglm)** (From Facebook AI) released with the paper [Few-shot Learning with Multilingual Language Models](https://arxiv.org/abs/2112.10668) by Xi Victoria Lin, Todor Mihaylov, Mikel Artetxe, Tianlu Wang, Shuohui Chen, Daniel Simig, Myle Ott, Naman Goyal, Shruti Bhosale, Jingfei Du, Ramakanth Pasunuru, Sam Shleifer, Punit Singh Koura, Vishrav Chaudhary, Brian O'Horo, Jeff Wang, Luke Zettlemoyer, Zornitsa Kozareva, Mona Diab, Veselin Stoyanov, Xian Li. +1. **[XLM](https://huggingface.co/docs/transformers/model_doc/xlm)** (from Facebook) released together with the paper [Cross-lingual Language Model Pretraining](https://arxiv.org/abs/1901.07291) by Guillaume Lample and Alexis Conneau. +1. **[XLM-ProphetNet](https://huggingface.co/docs/transformers/model_doc/xlm-prophetnet)** (from Microsoft Research) released with the paper [ProphetNet: Predicting Future N-gram for Sequence-to-Sequence Pre-training](https://arxiv.org/abs/2001.04063) by Yu Yan, Weizhen Qi, Yeyun Gong, Dayiheng Liu, Nan Duan, Jiusheng Chen, Ruofei Zhang and Ming Zhou. +1. **[XLM-RoBERTa](https://huggingface.co/docs/transformers/model_doc/xlm-roberta)** (from Facebook AI), released together with the paper [Unsupervised Cross-lingual Representation Learning at Scale](https://arxiv.org/abs/1911.02116) by Alexis Conneau*, Kartikay Khandelwal*, Naman Goyal, Vishrav Chaudhary, Guillaume Wenzek, Francisco Guzmán, Edouard Grave, Myle Ott, Luke Zettlemoyer and Veselin Stoyanov. +1. **[XLM-RoBERTa-XL](https://huggingface.co/docs/transformers/model_doc/xlm-roberta-xl)** (from Facebook AI), released together with the paper [Larger-Scale Transformers for Multilingual Masked Language Modeling](https://arxiv.org/abs/2105.00572) by Naman Goyal, Jingfei Du, Myle Ott, Giri Anantharaman, Alexis Conneau. +1. **[XLM-V](https://huggingface.co/docs/transformers/model_doc/xlm-v)** (from Meta AI) released with the paper [XLM-V: Overcoming the Vocabulary Bottleneck in Multilingual Masked Language Models](https://arxiv.org/abs/2301.10472) by Davis Liang, Hila Gonen, Yuning Mao, Rui Hou, Naman Goyal, Marjan Ghazvininejad, Luke Zettlemoyer, Madian Khabsa. +1. **[XLNet](https://huggingface.co/docs/transformers/model_doc/xlnet)** (from Google/CMU) released with the paper [XLNet: Generalized Autoregressive Pretraining for Language Understanding](https://arxiv.org/abs/1906.08237) by Zhilin Yang*, Zihang Dai*, Yiming Yang, Jaime Carbonell, Ruslan Salakhutdinov, Quoc V. Le. +1. **[XLS-R](https://huggingface.co/docs/transformers/model_doc/xls_r)** (from Facebook AI) released with the paper [XLS-R: Self-supervised Cross-lingual Speech Representation Learning at Scale](https://arxiv.org/abs/2111.09296) by Arun Babu, Changhan Wang, Andros Tjandra, Kushal Lakhotia, Qiantong Xu, Naman Goyal, Kritika Singh, Patrick von Platen, Yatharth Saraf, Juan Pino, Alexei Baevski, Alexis Conneau, Michael Auli. +1. **[XLSR-Wav2Vec2](https://huggingface.co/docs/transformers/model_doc/xlsr_wav2vec2)** (from Facebook AI) released with the paper [Unsupervised Cross-Lingual Representation Learning For Speech Recognition](https://arxiv.org/abs/2006.13979) by Alexis Conneau, Alexei Baevski, Ronan Collobert, Abdelrahman Mohamed, Michael Auli. +1. **[YOLOS](https://huggingface.co/docs/transformers/model_doc/yolos)** (from Huazhong University of Science & Technology) released with the paper [You Only Look at One Sequence: Rethinking Transformer in Vision through Object Detection](https://arxiv.org/abs/2106.00666) by Yuxin Fang, Bencheng Liao, Xinggang Wang, Jiemin Fang, Jiyang Qi, Rui Wu, Jianwei Niu, Wenyu Liu. +1. **[YOSO](https://huggingface.co/docs/transformers/model_doc/yoso)** (from the University of Wisconsin - Madison) released with the paper [You Only Sample (Almost) Once: Linear Cost Self-Attention Via Bernoulli Sampling](https://arxiv.org/abs/2111.09714) by Zhanpeng Zeng, Yunyang Xiong, Sathya N. Ravi, Shailesh Acharya, Glenn Fung, Vikas Singh. +1. Want to contribute a new model? We have added a **detailed guide and templates** to guide you in the process of adding a new model. You can find them in the [`templates`](./templates) folder of the repository. Be sure to check the [contributing guidelines](./CONTRIBUTING.md) and contact the maintainers or open an issue to collect feedback before starting your PR. + +To check if each model has an implementation in Flax, PyTorch or TensorFlow, or has an associated tokenizer backed by the 🤗 Tokenizers library, refer to [this table](https://huggingface.co/docs/transformers/index#supported-frameworks). + +These implementations have been tested on several datasets (see the example scripts) and should match the performance of the original implementations. You can find more details on performance in the Examples section of the [documentation](https://github.com/huggingface/transformers/tree/main/examples). + + +## Learn more + +| Section | Description | +|-|-| +| [Documentation](https://huggingface.co/docs/transformers/) | Full API documentation and tutorials | +| [Task summary](https://huggingface.co/docs/transformers/task_summary) | Tasks supported by 🤗 Transformers | +| [Preprocessing tutorial](https://huggingface.co/docs/transformers/preprocessing) | Using the `Tokenizer` class to prepare data for the models | +| [Training and fine-tuning](https://huggingface.co/docs/transformers/training) | Using the models provided by 🤗 Transformers in a PyTorch/TensorFlow training loop and the `Trainer` API | +| [Quick tour: Fine-tuning/usage scripts](https://github.com/huggingface/transformers/tree/main/examples) | Example scripts for fine-tuning models on a wide range of tasks | +| [Model sharing and uploading](https://huggingface.co/docs/transformers/model_sharing) | Upload and share your fine-tuned models with the community | + +## Citation + +We now have a [paper](https://www.aclweb.org/anthology/2020.emnlp-demos.6/) you can cite for the 🤗 Transformers library: +```bibtex +@inproceedings{wolf-etal-2020-transformers, + title = "Transformers: State-of-the-Art Natural Language Processing", + author = "Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush", + booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations", + month = oct, + year = "2020", + address = "Online", + publisher = "Association for Computational Linguistics", + url = "https://www.aclweb.org/anthology/2020.emnlp-demos.6", + pages = "38--45" +} +``` diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/RECORD b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..86a4f959a8ef89e7553b22e7de3b31b66a737537 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/RECORD @@ -0,0 +1,3309 @@ +../../../bin/transformers-cli,sha256=r4qn7Bu1z7LxaIeESRDyPhXpIsVjJU4m7DIk01dJNc4,271 +transformers-4.40.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +transformers-4.40.1.dist-info/LICENSE,sha256=d_1HEN757DwPYiWADgI18VpCWr1KiwNVkSf814JhIEk,11418 +transformers-4.40.1.dist-info/METADATA,sha256=Dh24LVBcPQN9VxgenNOjQPbsSU0ty_YErjGeWqrR8nA,137978 +transformers-4.40.1.dist-info/RECORD,, +transformers-4.40.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +transformers-4.40.1.dist-info/entry_points.txt,sha256=kgdW_0F_tXNrWKSZXKWKeUD_LqVgcji9j7atGXve8z4,81 +transformers-4.40.1.dist-info/top_level.txt,sha256=GLBaeTo_CSdhnHvbxQ0kzpEHdlLuA_33foIogaWxntI,13 +transformers/__init__.py,sha256=I9TlFMq2iw-kseOF5e2ojv2eT-9RX_1Ppyph-Z33pVc,331699 +transformers/__pycache__/__init__.cpython-310.pyc,, +transformers/__pycache__/activations.cpython-310.pyc,, +transformers/__pycache__/activations_tf.cpython-310.pyc,, +transformers/__pycache__/audio_utils.cpython-310.pyc,, +transformers/__pycache__/cache_utils.cpython-310.pyc,, +transformers/__pycache__/configuration_utils.cpython-310.pyc,, +transformers/__pycache__/convert_graph_to_onnx.cpython-310.pyc,, +transformers/__pycache__/convert_pytorch_checkpoint_to_tf2.cpython-310.pyc,, +transformers/__pycache__/convert_slow_tokenizer.cpython-310.pyc,, +transformers/__pycache__/convert_slow_tokenizers_checkpoints_to_fast.cpython-310.pyc,, +transformers/__pycache__/convert_tf_hub_seq_to_seq_bert_to_pytorch.cpython-310.pyc,, +transformers/__pycache__/debug_utils.cpython-310.pyc,, +transformers/__pycache__/deepspeed.cpython-310.pyc,, +transformers/__pycache__/dependency_versions_check.cpython-310.pyc,, +transformers/__pycache__/dependency_versions_table.cpython-310.pyc,, +transformers/__pycache__/dynamic_module_utils.cpython-310.pyc,, +transformers/__pycache__/feature_extraction_sequence_utils.cpython-310.pyc,, +transformers/__pycache__/feature_extraction_utils.cpython-310.pyc,, +transformers/__pycache__/file_utils.cpython-310.pyc,, +transformers/__pycache__/hf_argparser.cpython-310.pyc,, +transformers/__pycache__/hyperparameter_search.cpython-310.pyc,, +transformers/__pycache__/image_processing_utils.cpython-310.pyc,, +transformers/__pycache__/image_transforms.cpython-310.pyc,, +transformers/__pycache__/image_utils.cpython-310.pyc,, +transformers/__pycache__/keras_callbacks.cpython-310.pyc,, +transformers/__pycache__/modelcard.cpython-310.pyc,, +transformers/__pycache__/modeling_attn_mask_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_flax_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_outputs.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_tf_utils.cpython-310.pyc,, +transformers/__pycache__/modeling_utils.cpython-310.pyc,, +transformers/__pycache__/optimization.cpython-310.pyc,, +transformers/__pycache__/optimization_tf.cpython-310.pyc,, +transformers/__pycache__/processing_utils.cpython-310.pyc,, +transformers/__pycache__/pytorch_utils.cpython-310.pyc,, +transformers/__pycache__/safetensors_conversion.cpython-310.pyc,, +transformers/__pycache__/testing_utils.cpython-310.pyc,, +transformers/__pycache__/tf_utils.cpython-310.pyc,, +transformers/__pycache__/time_series_utils.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils_base.cpython-310.pyc,, +transformers/__pycache__/tokenization_utils_fast.cpython-310.pyc,, +transformers/__pycache__/trainer.cpython-310.pyc,, +transformers/__pycache__/trainer_callback.cpython-310.pyc,, +transformers/__pycache__/trainer_pt_utils.cpython-310.pyc,, +transformers/__pycache__/trainer_seq2seq.cpython-310.pyc,, +transformers/__pycache__/trainer_utils.cpython-310.pyc,, +transformers/__pycache__/training_args.cpython-310.pyc,, +transformers/__pycache__/training_args_seq2seq.cpython-310.pyc,, +transformers/__pycache__/training_args_tf.cpython-310.pyc,, +transformers/activations.py,sha256=EMN-kVzitS1TmltS7Kr2ROKwxW0oLbAHeAmNdDQuvu4,8177 +transformers/activations_tf.py,sha256=u2Y9dgDRgW-YbN_J-xmd05EK4p24rV8ZkzrQzpz4lCI,4689 +transformers/audio_utils.py,sha256=QhEp44hIpjSaSR3hPUKpEmyNXhgcxK8-2kd9Wt5BjdU,36788 +transformers/benchmark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/benchmark/__pycache__/__init__.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args_tf.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_args_utils.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_tf.cpython-310.pyc,, +transformers/benchmark/__pycache__/benchmark_utils.cpython-310.pyc,, +transformers/benchmark/benchmark.py,sha256=q2Jk1RyHOtzNe7vDSVjkL9Kf1jkMiGZsJPDmsACnxxY,10752 +transformers/benchmark/benchmark_args.py,sha256=djFAjBC11MnI-auxByCWSVVAqRqXGV650Leosd60VmA,4050 +transformers/benchmark/benchmark_args_tf.py,sha256=bAcsgf7bOUyoo8AGFSiQhciR8S5wMJqnL5iVlvbQzow,4735 +transformers/benchmark/benchmark_args_utils.py,sha256=pkgvor3IuC5v9BubOCFVuwbgGHsoGkNp1CDdgJlyBi4,6499 +transformers/benchmark/benchmark_tf.py,sha256=aEjclKepsQhn6vjxVJ5l2ho0ptUJuvaSYfuP4rJE6MQ,13251 +transformers/benchmark/benchmark_utils.py,sha256=f9fv_EF1GwfK6A9wS6O-AYDrjI_cBflTbffL32iFTY0,37600 +transformers/cache_utils.py,sha256=KktrOY-OqGMUUKhxsKG7xrlyIUiXBkjGeHRaBlc78Yw,20155 +transformers/commands/__init__.py,sha256=aFO3I7C6G9OLA9JZSc_yMaZl0glOQtjNPjqMFfu9wfQ,923 +transformers/commands/__pycache__/__init__.cpython-310.pyc,, +transformers/commands/__pycache__/add_new_model.cpython-310.pyc,, +transformers/commands/__pycache__/add_new_model_like.cpython-310.pyc,, +transformers/commands/__pycache__/convert.cpython-310.pyc,, +transformers/commands/__pycache__/download.cpython-310.pyc,, +transformers/commands/__pycache__/env.cpython-310.pyc,, +transformers/commands/__pycache__/lfs.cpython-310.pyc,, +transformers/commands/__pycache__/pt_to_tf.cpython-310.pyc,, +transformers/commands/__pycache__/run.cpython-310.pyc,, +transformers/commands/__pycache__/serving.cpython-310.pyc,, +transformers/commands/__pycache__/train.cpython-310.pyc,, +transformers/commands/__pycache__/transformers_cli.cpython-310.pyc,, +transformers/commands/__pycache__/user.cpython-310.pyc,, +transformers/commands/add_new_model.py,sha256=H8_UkJ8TYyC8sEMqE4Iu-Izq3lJi93l813oU-LI2XyY,11062 +transformers/commands/add_new_model_like.py,sha256=kI87CNAiy_WPsOMJopWmS3ZHQtw95ULORRCJcFRDcww,70870 +transformers/commands/convert.py,sha256=lHz2sQti9HubMNwObLCc_sw9Y7L-IPcaYJMSJR_AVWM,7068 +transformers/commands/download.py,sha256=GKPadx-YGBL7dHJSEcUp-QNOP3R2L71-gPGP0z6NNQI,2395 +transformers/commands/env.py,sha256=q21O011lwdgGX862xAxH1Pjhd53uuxgB3g6C8cfNGV4,5316 +transformers/commands/lfs.py,sha256=4QDGBbJxBcRpgmhHXvigZQUsXuTPwrRY60t1qGjzfWU,8001 +transformers/commands/pt_to_tf.py,sha256=qVXHzdjjik3n_y8Ci8A6Wg6ag0eX0T6Dj36-sSv18Xg,20540 +transformers/commands/run.py,sha256=nyEe2lOoj6e0EOxjKeF08hdW9WVWa101r9hWXl9v3Jo,4249 +transformers/commands/serving.py,sha256=CnNHFVM_SK_-aNxEJnq7vJK5dBqDBw7bxxQiv5truEU,8027 +transformers/commands/train.py,sha256=FKlH-IYr3mVc7_mS5ObCyJaHs9JincYLg3Zt6WQz1ag,6341 +transformers/commands/transformers_cli.py,sha256=QimzKwJXAzZ9da0NDFrupqnATqP8MQ7upoj9TspwnKA,2047 +transformers/commands/user.py,sha256=t35-l945UBen5uYR_KsbhtNOqdHXrfdpHrhTbR3-YXc,7124 +transformers/configuration_utils.py,sha256=P3_uwVe3AUONwXqEYMuhFPxosvPEnGs6H9M4r9FmM0c,56494 +transformers/convert_graph_to_onnx.py,sha256=rJmIK0Rs5WPsOiRGWmgN9q4A5W5gqUDB7OmcRkTqvJY,20151 +transformers/convert_pytorch_checkpoint_to_tf2.py,sha256=bxCJ6CUpi63x9K-Hr2up0W0VPIEhuuceM0siBse8brs,14655 +transformers/convert_slow_tokenizer.py,sha256=Prb4ZdYi1_Gw6HVxPgPGqSfEIVdCA2UNuZEtbuDgUPM,56210 +transformers/convert_slow_tokenizers_checkpoints_to_fast.py,sha256=mIX3e0r7Dci5lahBf0iO4C2rvj0OzwkJbmw5lmgiG0Q,4982 +transformers/convert_tf_hub_seq_to_seq_bert_to_pytorch.py,sha256=so9OnNT3TmdTbRMGbuepLY0zCMNfB6huaLg38aDVWOU,2911 +transformers/data/__init__.py,sha256=JWIY7GLKedWilK2mpd_qtVeXLQK2ZXki6ISkRUua09Y,1423 +transformers/data/__pycache__/__init__.cpython-310.pyc,, +transformers/data/__pycache__/data_collator.cpython-310.pyc,, +transformers/data/data_collator.py,sha256=EQgDVvrLxXzDZqoMAHwVd6wkFMf0pjdCYERwlEb_L-w,78254 +transformers/data/datasets/__init__.py,sha256=PGzUJjdmTPOPMyjV4-Tj3sNrmmh-lspjyxrVbrfJoX8,909 +transformers/data/datasets/__pycache__/__init__.cpython-310.pyc,, +transformers/data/datasets/__pycache__/glue.cpython-310.pyc,, +transformers/data/datasets/__pycache__/language_modeling.cpython-310.pyc,, +transformers/data/datasets/__pycache__/squad.cpython-310.pyc,, +transformers/data/datasets/glue.py,sha256=K3h2KxjIg0kWegPCw6ikbOL-lCFbKoQewb7R8wLZoIc,6163 +transformers/data/datasets/language_modeling.py,sha256=E-VGwuyb09J4KmV8v37bNH5in90wDPuZHCYsqGdT7W0,23721 +transformers/data/datasets/squad.py,sha256=OUTQDd687SQns7HRWDCgAjnuo_ZXihifLS6jF2bhUhc,9219 +transformers/data/metrics/__init__.py,sha256=o9t_VTQtqU3lEhqvocDzFMm7OvAKD-uxrjPWy0r74BI,3632 +transformers/data/metrics/__pycache__/__init__.cpython-310.pyc,, +transformers/data/metrics/__pycache__/squad_metrics.cpython-310.pyc,, +transformers/data/metrics/squad_metrics.py,sha256=pMwqcTg9KnCvmhLzAy1VJHRgJOEx6lLD105d-JcnWfg,29698 +transformers/data/processors/__init__.py,sha256=lvN5mp9mdrr5v6QvZT6VcoZ78zZUvXiumTm6Gdvlgvo,1014 +transformers/data/processors/__pycache__/__init__.cpython-310.pyc,, +transformers/data/processors/__pycache__/glue.cpython-310.pyc,, +transformers/data/processors/__pycache__/squad.cpython-310.pyc,, +transformers/data/processors/__pycache__/utils.cpython-310.pyc,, +transformers/data/processors/__pycache__/xnli.cpython-310.pyc,, +transformers/data/processors/glue.py,sha256=hhY12jdX1WnZ3_E3vSv-0rmF53F56c_2gQeW8dTwYb4,23219 +transformers/data/processors/squad.py,sha256=_4WNLcZA6TAy7uNZO46948tmL5ngVF0LSB0y8nUn6rs,33153 +transformers/data/processors/utils.py,sha256=GSaZbJ--XYq57vqyRVx_5LHSR4tklzFyR7ZKHGWsTAs,13829 +transformers/data/processors/xnli.py,sha256=i03-c8vaQVYKpR7r4B8PsF6_CXXHxB7N-YHdzxs-APU,3489 +transformers/debug_utils.py,sha256=6q8ArB104GdcIC2qfBQzKLxO7PfXmHEKdYtfL2FOK2w,12907 +transformers/deepspeed.py,sha256=6C1uUQ84ImJPYu3WqZ-o6uOGPa7IHzD0MkP7DgnQxJY,1478 +transformers/dependency_versions_check.py,sha256=6HbgtT2Wp-QZGOAdyUOklHvNA4rOVITGHrX34dtMOqg,2115 +transformers/dependency_versions_table.py,sha256=cGScoQVuynb1ell9UhMZAaebjVrwEiUPCQ9yAobKUxE,3182 +transformers/dynamic_module_utils.py,sha256=oG4PmP0MBRal3PhhOYaw2nWroucDhawU77s0vgdKUbM,27468 +transformers/feature_extraction_sequence_utils.py,sha256=dPKvTC29tNn8xK_dxZSeDbhNRK2s8VHu2EZIEKesEAs,18307 +transformers/feature_extraction_utils.py,sha256=XaRKR3ez3AyK67ntVMsBTHUPdvv5p7YLF9vk7SvrZMM,29527 +transformers/file_utils.py,sha256=qI7cWTYpFy0v9HZSRBASv2yvD2U1OJgYShIOsQ7cCUg,3744 +transformers/generation/__init__.py,sha256=Ox2TuVV2Eg-5Lir9UztnDLf-lyYhh_RevPAtIQbkajs,11214 +transformers/generation/__pycache__/__init__.cpython-310.pyc,, +transformers/generation/__pycache__/beam_constraints.cpython-310.pyc,, +transformers/generation/__pycache__/beam_search.cpython-310.pyc,, +transformers/generation/__pycache__/candidate_generator.cpython-310.pyc,, +transformers/generation/__pycache__/configuration_utils.cpython-310.pyc,, +transformers/generation/__pycache__/flax_logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/flax_utils.cpython-310.pyc,, +transformers/generation/__pycache__/logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/stopping_criteria.cpython-310.pyc,, +transformers/generation/__pycache__/streamers.cpython-310.pyc,, +transformers/generation/__pycache__/tf_logits_process.cpython-310.pyc,, +transformers/generation/__pycache__/tf_utils.cpython-310.pyc,, +transformers/generation/__pycache__/utils.cpython-310.pyc,, +transformers/generation/beam_constraints.py,sha256=GefqriO2jWruyhdZI9pyGz4yZ-W9AYmzZueSWITgok4,19105 +transformers/generation/beam_search.py,sha256=d6ZduwortYoRu6d0uCWfz1ivHqeQAxdA_lDrRA0kUOU,48812 +transformers/generation/candidate_generator.py,sha256=uyroHZgYZAD3zxat1XYH_dvD7nDql1c4xei5KcYPDXQ,20792 +transformers/generation/configuration_utils.py,sha256=vatVMXQvHtDOQW_4gJ7jrADmvbE8yaADlsZaJp0pUsc,57916 +transformers/generation/flax_logits_process.py,sha256=JBFbiYJANPD_2LeY2hghPHfrerBwr4ZEGds4kcmaS1k,23005 +transformers/generation/flax_utils.py,sha256=x4J5blTRgFVp8o0lK-UvjOYzpeTP54kdy5m5eK8apzQ,50078 +transformers/generation/logits_process.py,sha256=XFBNkklG1DhG-Py1rffCj0WnnnO7fgn7h0Dd3WEXAw0,106675 +transformers/generation/stopping_criteria.py,sha256=jMkMaMeBKO7Vw-Cn8or1kd7PlRsNV-pwnBRPcU2fZZs,8590 +transformers/generation/streamers.py,sha256=ArJCKAVRKIKALqdGBAsQu038-BwZbo05tzOXZWP9yng,9213 +transformers/generation/tf_logits_process.py,sha256=ZsIBDrFJ3egkk8aWYKtCvqH4M7INnlBa2zoCAIT5MR0,28114 +transformers/generation/tf_utils.py,sha256=dUFykUJNLGm5gYMadkcJgoHK5y1zw2pCa3Vm0HcdRbI,175623 +transformers/generation/utils.py,sha256=sO4vvRA950NtMdxcPRA0_2w4rLdpQYidV6-OrtM2bSM,271310 +transformers/hf_argparser.py,sha256=t6EC7gJ6yWJPCDScgrppfgXOAkjZxEJJO6pe8W-aK_0,19823 +transformers/hyperparameter_search.py,sha256=wmfAWk_NTUQj3MezO_6CaDaJyUt9pbARcs-tbo_BdeM,4171 +transformers/image_processing_utils.py,sha256=YrlSb_pIVAneGj-YaKjAiO6h5XSGIfQ93biaGCnXT-k,36375 +transformers/image_transforms.py,sha256=q1pV5pPS32f9i5m2NXObPp50nqAMiARQduiuvHgCruY,34203 +transformers/image_utils.py,sha256=o2ajRGC2m0wQsu1UljF0d2l_TJ_G0RQtglI2lTHC-0M,30007 +transformers/integrations/__init__.py,sha256=fyK711qayQzwCj1pXHOgDi3aBFWaLThXWrV1bQkbAVc,4832 +transformers/integrations/__pycache__/__init__.cpython-310.pyc,, +transformers/integrations/__pycache__/aqlm.cpython-310.pyc,, +transformers/integrations/__pycache__/awq.cpython-310.pyc,, +transformers/integrations/__pycache__/bitsandbytes.cpython-310.pyc,, +transformers/integrations/__pycache__/deepspeed.cpython-310.pyc,, +transformers/integrations/__pycache__/integration_utils.cpython-310.pyc,, +transformers/integrations/__pycache__/peft.cpython-310.pyc,, +transformers/integrations/__pycache__/quanto.cpython-310.pyc,, +transformers/integrations/__pycache__/tpu.cpython-310.pyc,, +transformers/integrations/aqlm.py,sha256=wpVq2OAdGDMTywT-_rpH6vpRQEhUH4hLTi13jACFDCg,4462 +transformers/integrations/awq.py,sha256=vA5OO8hjOL1LxTnCDFBnZGLUe8HgcH51O5WQ8eNYMek,18380 +transformers/integrations/bitsandbytes.py,sha256=i0oRUxEbpqlsFA3-3K0G2A_mCPeZ9zFZINloQvr-jZE,15305 +transformers/integrations/deepspeed.py,sha256=toOGbMcsGg_TACcGQ5dEqjS9x34Ix-7uhjhelC2dxHA,18703 +transformers/integrations/integration_utils.py,sha256=gMUtD9L91UScWCjvUqmp9STBlmCTBLKMzoyL8mQLW30,85781 +transformers/integrations/peft.py,sha256=_1zABToVWSH9U7XoPG5cJVmAT_5jbSbMDUADHvGiAXE,22620 +transformers/integrations/quanto.py,sha256=VR7GV9KG6mFweixYDaUdhYzfGoLZVurwXWU24Idg32w,4250 +transformers/integrations/tpu.py,sha256=Y8YMwIrEgh1s-OCNbOQZFD1_3Tvqpo3e1H6eECTceSU,1392 +transformers/keras_callbacks.py,sha256=i95nrEd_QsEo10x3T9RqZf3xGzfPiMOhmU1Ef_HvnGE,20675 +transformers/kernels/deformable_detr/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deformable_detr/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.cu,sha256=l7UQ6zn1qbeve1meY0QLq2RKk3X6fGpp2UfKt4aEYJ4,7466 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.cuh,sha256=HD7bMWLoGrDKw7XUPPgILCAdOSo1IC8RIv_KyKAnLb0,61539 +transformers/kernels/deformable_detr/cuda/ms_deform_attn_cuda.h,sha256=xxP17aer-SiU9J5ASLHdtLIyhFmHC5iLcPIPNW2xkrg,1694 +transformers/kernels/deformable_detr/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deformable_detr/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deformable_detr/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.cpp,sha256=VcCGm9IrvgVvmyZt0KyP16Q-ONmbeg6bKwccP6KadL0,1255 +transformers/kernels/deta/cpu/ms_deform_attn_cpu.h,sha256=nvVsKj9nabQ7IaNY4di5xVx6u-0lIifQvLg2JCoxiik,1138 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cu,sha256=M5-bW9g5z-upTFMNPIfnyLAqKTxGMCjAPqBr0GmWHX8,7360 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.cuh,sha256=hygB20Vh3RttOSdCuTFz8V0d3CXNp-Q89x22rYmD258,61433 +transformers/kernels/deta/cuda/ms_deform_attn_cuda.h,sha256=rPWOOMo3QyFdB5kMiexpApLFZ4dnRtx4CluEAGwsfO8,1139 +transformers/kernels/deta/cuda/ms_deform_im2col_cuda.cuh,sha256=BRN8-yfSHY8ChLij8jFl2_z2LL0LEFKuVF6Byi-YLAY,54695 +transformers/kernels/deta/ms_deform_attn.h,sha256=H2bBXGyl0R-v2DqGVz11asoRvxbjZ9iWB9djomZTpgY,1837 +transformers/kernels/deta/vision.cpp,sha256=8RvZy7P_MMx5QEszo_MwNODddJLQ8mKcmmMfgLYC_HA,798 +transformers/kernels/mra/cuda_kernel.cu,sha256=LxxRYTymSoBEQpWXHA0PMzwZwpolcwX7mFAjwU8-ZMc,11678 +transformers/kernels/mra/cuda_kernel.h,sha256=UJvYq_MDzhcp07bZpYcOBn8ZGFcf_Ax1dynuiVTBvmA,1682 +transformers/kernels/mra/cuda_launch.cu,sha256=Ox5MTACriC30CGyn-g1Kb5EgQSMAZSaN6fpit3xLFWc,4072 +transformers/kernels/mra/cuda_launch.h,sha256=RVCkN_euasvgPK0zADNRvRYGWd4ah5l9X-7UG_AcdH8,707 +transformers/kernels/mra/torch_extension.cpp,sha256=N0YdBLVX0lZabckJzV_RYTHS2atCNvn13E4Ivobt25g,1405 +transformers/kernels/rwkv/wkv_cuda.cu,sha256=EvaUrEnw_qr2EjMKP-Pq7VPzFfGlMJnFhdHNLtn1fPU,6219 +transformers/kernels/rwkv/wkv_cuda_bf16.cu,sha256=DG9hTtOAlrnpDFahjt-MmnOxjMuhGU55GPsmV21HtrQ,6633 +transformers/kernels/rwkv/wkv_op.cpp,sha256=qSExhKdT6p3hyaTv5SypCnH_c7EmaX6HbhTcCntvZWg,4022 +transformers/kernels/yoso/common.h,sha256=Tq2rOUtE8Y4DRAUrRISvwIwVI3u8JBf21WgWSAYiDlQ,273 +transformers/kernels/yoso/common_cuda.h,sha256=Sji70AuVcuZSotLF7Gotmun9MJuOHo8wEkxizKXLRtc,258 +transformers/kernels/yoso/common_cuda_device.h,sha256=y6WUgAiapnMKqthRMS5s-DMSWNVkar_i8g4KPFvqiuk,2063 +transformers/kernels/yoso/fast_lsh_cumulation.cu,sha256=LA4LGNgyXT3osIyQtFBcRanSyNQWm8yqmpz7AeLP7cw,19061 +transformers/kernels/yoso/fast_lsh_cumulation.h,sha256=1cTWZjOm751HGiEB5P-UPJ8SE1VO7XRyXmBgyxYDyjI,1575 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.cu,sha256=HKGLWl-WFz5BXjaAPHTNTbG6IUkJjhBdvFf2K7hrDVQ,32870 +transformers/kernels/yoso/fast_lsh_cumulation_cuda.h,sha256=_KGI8HQbVFtCN5KAcSGpyiJ2foGi26RKen138CUc2fY,5490 +transformers/kernels/yoso/fast_lsh_cumulation_torch.cpp,sha256=-Rh7o39Z3rtOPwNnEM-c51TCqywpVdK0WVaA7VRrXbQ,3154 +transformers/modelcard.py,sha256=zeGRoH_h9x3BNmXiG_YhZ69pCxp8YSgzt2tMooaszGQ,35155 +transformers/modeling_attn_mask_utils.py,sha256=-hc3S9YP3oQb2NZd4HLQhVBSQ8yHXYgphfzTYizwjls,22276 +transformers/modeling_flax_outputs.py,sha256=wXse1g9VyQyVOZ9DrbPALeoZBdS45fsBA9fNrGnwaZc,41961 +transformers/modeling_flax_pytorch_utils.py,sha256=UL5zridIWWbmo5vZ6uVoRcF6kIuEN4jthQ4q8uRKgRQ,21886 +transformers/modeling_flax_utils.py,sha256=UCYFom8AM-0nN0o6jwheWrXEVs9nGmtYTrb0_3q6kBs,61404 +transformers/modeling_outputs.py,sha256=CYpjijqZNOVUc-kixDLI-jMFru9MhpDQvnncSfp0wb4,112567 +transformers/modeling_tf_outputs.py,sha256=nXCMOmFZ7IZFVuiQr7EU2ciV9QqwOYPYld_r2jBxVpE,56074 +transformers/modeling_tf_pytorch_utils.py,sha256=5V6hH7KnfdP1f-R8s09GEyOKXSuoZVwDISxPmFZnhjw,27908 +transformers/modeling_tf_utils.py,sha256=muR9u6vciEekJG939anukWeBROWD6YeAkfUZEwtqnDM,166736 +transformers/modeling_utils.py,sha256=upsOIqBVhgHmVpHfsQIL492QL4dbwgcjHUmL6IBSxlE,237486 +transformers/models/__init__.py,sha256=G2S4oftKfd2YTDX4Q8QcGC1ISDnw6-SdVMTjKzx6eL8,4222 +transformers/models/__pycache__/__init__.cpython-310.pyc,, +transformers/models/albert/__init__.py,sha256=eXW8msH9V8No-Tb5R28tdpXQbOnnSG77L_TVEwCRf9o,5482 +transformers/models/albert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/albert/__pycache__/configuration_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/convert_albert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_flax_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/modeling_tf_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/tokenization_albert.cpython-310.pyc,, +transformers/models/albert/__pycache__/tokenization_albert_fast.cpython-310.pyc,, +transformers/models/albert/configuration_albert.py,sha256=orosaIXnT6NdUCmKUv9J3BPByewA1BKe8OD15DLoMWA,8184 +transformers/models/albert/convert_albert_original_tf_checkpoint_to_pytorch.py,sha256=nTwtVg0AZgG4QnG9K361HM37gxGegQvD-ymZWuhic7s,2162 +transformers/models/albert/modeling_albert.py,sha256=PhiNBl2jRMs8OJdhL3Go8Nx2ry5qKeYHvIqd9X9f-bg,60519 +transformers/models/albert/modeling_flax_albert.py,sha256=u2EEkckxVFt5WA8oQNbLJGcV5mhHGIJ6DMS867O150U,40739 +transformers/models/albert/modeling_tf_albert.py,sha256=CYfemwBDzPw9fjsy36jrmAjJMI7yzBFsiF8RBOdfjJg,68950 +transformers/models/albert/tokenization_albert.py,sha256=ysc5uU3xkgZFpAS4EGyYjEvfw9MHy6bx4kbsIfL-fnE,14423 +transformers/models/albert/tokenization_albert_fast.py,sha256=FhSf6cK3YdDHdHJqtar_mMPeSkoOCCEJouFMl6jWwwU,8832 +transformers/models/align/__init__.py,sha256=DWtMJsXbmRuoSAwLLOy6aXKY65IT1TDV4ifwBmApkM0,2064 +transformers/models/align/__pycache__/__init__.cpython-310.pyc,, +transformers/models/align/__pycache__/configuration_align.cpython-310.pyc,, +transformers/models/align/__pycache__/convert_align_tf_to_hf.cpython-310.pyc,, +transformers/models/align/__pycache__/modeling_align.cpython-310.pyc,, +transformers/models/align/__pycache__/processing_align.cpython-310.pyc,, +transformers/models/align/configuration_align.py,sha256=a8fS9v05sS50o11bk0aRY-RD3FbhXYqpElyew61WwWw,18194 +transformers/models/align/convert_align_tf_to_hf.py,sha256=tzPoEMyLV_ckVngYdvJ6uAFZ6RgsuX55JYjEkIMtPTg,15536 +transformers/models/align/modeling_align.py,sha256=Z27Ux-Mym96AbnHZoiZPcQhG0DKnSHz71NPJi5PiQXs,71777 +transformers/models/align/processing_align.py,sha256=nd9rZAv_C3Xz7Zbv3SBkmCy-pZ6ChimNzEBf33zzdrM,6110 +transformers/models/altclip/__init__.py,sha256=bvOH6rQhnWm4shjpJ51SPs0uxlDdPrViBxQqTt3gRik,2126 +transformers/models/altclip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/altclip/__pycache__/configuration_altclip.cpython-310.pyc,, +transformers/models/altclip/__pycache__/modeling_altclip.cpython-310.pyc,, +transformers/models/altclip/__pycache__/processing_altclip.cpython-310.pyc,, +transformers/models/altclip/configuration_altclip.py,sha256=cDETvjuzn-FFtHMIZMACD-HYCRYB8eCnniWdKkLQxj4,19801 +transformers/models/altclip/modeling_altclip.py,sha256=4VuL9K0aaKbigpYaqRmRC_4l05mDR-lEyyNG1rIv9ww,78250 +transformers/models/altclip/processing_altclip.py,sha256=LHCFcwZHPlRnVYzIONbEtm60ZRHzJeTQq2o7akvAM_g,6396 +transformers/models/audio_spectrogram_transformer/__init__.py,sha256=-LyBP9am8Di97o7CZupQyqD1-2bYHKLcUqVWTZBHVs8,2159 +transformers/models/audio_spectrogram_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/configuration_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/convert_audio_spectrogram_transformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/feature_extraction_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/__pycache__/modeling_audio_spectrogram_transformer.cpython-310.pyc,, +transformers/models/audio_spectrogram_transformer/configuration_audio_spectrogram_transformer.py,sha256=xEHkE6ckUKtuSLQrhSnmh8wVTu0oDsf_zBVxFYscP1Y,5549 +transformers/models/audio_spectrogram_transformer/convert_audio_spectrogram_transformer_original_to_pytorch.py,sha256=Csn0NnGlPMLUehRWvgU1cW49EzTNZ7p0COxWNIqQIp8,11052 +transformers/models/audio_spectrogram_transformer/feature_extraction_audio_spectrogram_transformer.py,sha256=CLMcdUUk8ehA2PC9wEBwvWd68tIMFtZswNhVbVwXWc8,9908 +transformers/models/audio_spectrogram_transformer/modeling_audio_spectrogram_transformer.py,sha256=FRTHbROpF95XpMUfzrucRZ1_RWYSLSEXd5Blf2EzKq0,25924 +transformers/models/auto/__init__.py,sha256=pkAEEIEmLLFzRM_jTAP42u15RL8dwmJc4xY2op7NwPg,16840 +transformers/models/auto/__pycache__/__init__.cpython-310.pyc,, +transformers/models/auto/__pycache__/auto_factory.cpython-310.pyc,, +transformers/models/auto/__pycache__/configuration_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/feature_extraction_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/image_processing_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_flax_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/modeling_tf_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/processing_auto.cpython-310.pyc,, +transformers/models/auto/__pycache__/tokenization_auto.cpython-310.pyc,, +transformers/models/auto/auto_factory.py,sha256=m9d07NNVM9JDRHyRCsi4q5DhwgbNvrhIozsbSa948-k,43218 +transformers/models/auto/configuration_auto.py,sha256=mB9qTmRzBjXePSpnpwq8CK7PfxbzlQS2ppT4wYb_zwg,38372 +transformers/models/auto/feature_extraction_auto.py,sha256=YfgaeHgaDj8qtVEHSLdq9Xjit6c_O5c1wO_pRykwGrY,19509 +transformers/models/auto/image_processing_auto.py,sha256=IpCXZv1iwq2hAQLYdbE1bey1ASOdkD8mEGtoTl7e2vE,21868 +transformers/models/auto/modeling_auto.py,sha256=nePbz4WQBdzaaQB9lmX6Adi54VGndxuvPk9PGaYoCbk,68407 +transformers/models/auto/modeling_flax_auto.py,sha256=WKcWOmDTq2kwtFYGHccSyV3o8yUtvHlCgVRlh_5K2OI,14475 +transformers/models/auto/modeling_tf_auto.py,sha256=fB3ufe0eyB2DzDupxt_EBfDUybgUz3HdT6qhF7DAUu8,28077 +transformers/models/auto/processing_auto.py,sha256=0u7tVknEh1L6WIyY_ztD5Zi-6K4E8YP3KbGQBKloCZg,17056 +transformers/models/auto/tokenization_auto.py,sha256=kLADVhqr2UbmfMBvJuOixPwFS-0N7EOBYCrvcNfpDHk,46856 +transformers/models/autoformer/__init__.py,sha256=wNFDMEr-Yo9Bt33bP5qqiC5dWKXOnWQPFg4C_ewyfGU,1914 +transformers/models/autoformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/autoformer/__pycache__/configuration_autoformer.cpython-310.pyc,, +transformers/models/autoformer/__pycache__/modeling_autoformer.cpython-310.pyc,, +transformers/models/autoformer/configuration_autoformer.py,sha256=j1rKPbxRBrljEb0k9f0LrcW56s98dadHTWfd__cbTlU,12244 +transformers/models/autoformer/modeling_autoformer.py,sha256=vs6vneh5RLDK4pC6s2BrkoWmIzl0hZCaGYGPOhyEOTQ,108848 +transformers/models/bark/__init__.py,sha256=o6hWj_LrFLp-JSNY04tbWewQyrA44B0mhLUDpyv4jVw,2212 +transformers/models/bark/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bark/__pycache__/configuration_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/convert_suno_to_hf.cpython-310.pyc,, +transformers/models/bark/__pycache__/generation_configuration_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/modeling_bark.cpython-310.pyc,, +transformers/models/bark/__pycache__/processing_bark.cpython-310.pyc,, +transformers/models/bark/configuration_bark.py,sha256=nDioA5tQVccq1raX9PUewVvbAbVGXMpOlAY9XUpE6Fk,12836 +transformers/models/bark/convert_suno_to_hf.py,sha256=O1OYzKyTr-9snPYUAw09GmVwb76UmiQGi3C2WfEIwTw,9373 +transformers/models/bark/generation_configuration_bark.py,sha256=80ZI8x5r8JH26siXfm_c8NkuaRTUUzcxiMrtfIKDoSg,14992 +transformers/models/bark/modeling_bark.py,sha256=DKdikVinEBLfbcNvr2OoRdx3rgGEbGdBLB92xSWlTms,86628 +transformers/models/bark/processing_bark.py,sha256=PgoptE_6V_ESvgXhGrRfVa68pTjJHXv1j9YwV24W9HA,13312 +transformers/models/bart/__init__.py,sha256=FH8iETt_U4YAIIjo-Oap-WtQsBZqsaxGr9028KnrDEQ,4397 +transformers/models/bart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bart/__pycache__/configuration_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/convert_bart_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_flax_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/modeling_tf_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/tokenization_bart.cpython-310.pyc,, +transformers/models/bart/__pycache__/tokenization_bart_fast.cpython-310.pyc,, +transformers/models/bart/configuration_bart.py,sha256=TDtjKSQs7UaY6TZFDDhTqIbdqXa-lTdtNrm2LWikJv0,18783 +transformers/models/bart/convert_bart_original_pytorch_checkpoint_to_pytorch.py,sha256=VIRm-jWP4PNWN0Japr8yCJAJAAPVkJpJmEzYnHexU88,6055 +transformers/models/bart/modeling_bart.py,sha256=iPA4Z8NMV5OLezna3y8BTzCAm9-a6AjTkP-Gzf3zJc4,109215 +transformers/models/bart/modeling_flax_bart.py,sha256=JH4YXctmpkynng1wP-50Vn4t8vEuhEmFfsfQZu1-lFI,82707 +transformers/models/bart/modeling_tf_bart.py,sha256=SCaGH910Egz8gtbPF8Kg38uTG5KwPilRQTG4CMLvTaU,80773 +transformers/models/bart/tokenization_bart.py,sha256=fqHZCAZwmM4QoFAk6unDGwrbXtZkV1kPWMsVIu7HrPg,16250 +transformers/models/bart/tokenization_bart_fast.py,sha256=h-DIyLB7ii7QjTUFF78nU8e5fPVEMUCXaPKkHqMI71E,11723 +transformers/models/barthez/__init__.py,sha256=7IXg6okZoJ10NCYRWn0GvoWWUvGUN27eIw7CzJ5CVGA,1848 +transformers/models/barthez/__pycache__/__init__.cpython-310.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez.cpython-310.pyc,, +transformers/models/barthez/__pycache__/tokenization_barthez_fast.cpython-310.pyc,, +transformers/models/barthez/tokenization_barthez.py,sha256=i_gcpfOaZ8IY9l5kqNMhChOo336CEyJ2i7ldKn3e_kg,12066 +transformers/models/barthez/tokenization_barthez_fast.py,sha256=HQ-622iS3S_XjkSTWyx-gfMhpFAOfHe7POWqqnALrhA,7838 +transformers/models/bartpho/__init__.py,sha256=Q0mAOPJGQaHHigdajLg5-2TPOw9NWw5uIRQlmfhh8Ds,1362 +transformers/models/bartpho/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bartpho/__pycache__/tokenization_bartpho.cpython-310.pyc,, +transformers/models/bartpho/tokenization_bartpho.py,sha256=1qUC54eDZVK7cpBqlkFSM5xM8c6JCFBPKXxFMPKitC0,13525 +transformers/models/beit/__init__.py,sha256=T88Lwe4Y0tQmdrOpVnewjuHJoW_DZEbRmbTZDU2oAR0,3339 +transformers/models/beit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/beit/__pycache__/configuration_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/convert_beit_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/beit/__pycache__/feature_extraction_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/image_processing_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/modeling_beit.cpython-310.pyc,, +transformers/models/beit/__pycache__/modeling_flax_beit.cpython-310.pyc,, +transformers/models/beit/configuration_beit.py,sha256=Gmv_5upSDT5CQV_fFqa5R1pJztvBWdpxCSEjZLjUS9A,11698 +transformers/models/beit/convert_beit_unilm_to_pytorch.py,sha256=CndMgSTJoOik5LPH3YVLnQ6IR7IqfCsEN0KPUR43jHA,16578 +transformers/models/beit/feature_extraction_beit.py,sha256=C9wchKLt3K__wzqOkDWsbK0hMPzVn9HZtm5KPI5Oq2s,1172 +transformers/models/beit/image_processing_beit.py,sha256=y83OF3kyhKd6ODDq2IkZAyZhGNw7SzBhvz5v0k7U37U,25074 +transformers/models/beit/modeling_beit.py,sha256=67Sc74br06LoRjUnREs1bWdQdvQl6JSL1CXnBCNhBzU,59791 +transformers/models/beit/modeling_flax_beit.py,sha256=9_xkFN7xtiLrxbShhpX8EgpY8kuOKIui-OlRidmNUAI,36996 +transformers/models/bert/__init__.py,sha256=Tj3tueT-1FoWBmNNZXGGnytzeoLeEcjviP32uyfU1rw,6057 +transformers/models/bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert/__pycache__/configuration_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_original_tf2_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_pytorch_checkpoint_to_original_tf.cpython-310.pyc,, +transformers/models/bert/__pycache__/convert_bert_token_dropping_original_tf2_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_flax_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/modeling_tf_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_fast.cpython-310.pyc,, +transformers/models/bert/__pycache__/tokenization_bert_tf.cpython-310.pyc,, +transformers/models/bert/configuration_bert.py,sha256=7NgpZT7tKTkmYNrewwywV36M8vWmvHiAJV0D9MJk7O0,7340 +transformers/models/bert/convert_bert_original_tf2_checkpoint_to_pytorch.py,sha256=niQmTMwlmUA0aII1Zzg2OiJSpFljzwLCeJYotJ4tKOY,10490 +transformers/models/bert/convert_bert_original_tf_checkpoint_to_pytorch.py,sha256=Hq-TMOnQnfpZOh0m9GHoykkogg0-HgLAmSiFvK8E6K4,2159 +transformers/models/bert/convert_bert_pytorch_checkpoint_to_original_tf.py,sha256=6nISsCdgO_sJFFiLpnkGGsmTqC9Yp-gzDPDM-EafVXA,4112 +transformers/models/bert/convert_bert_token_dropping_original_tf2_checkpoint_to_pytorch.py,sha256=5kYqUUc-RGck4D0OUTlLDnyIPb_OIJ1NWboYRJ-7H0c,7606 +transformers/models/bert/modeling_bert.py,sha256=Hw94jzZlKj33oYz1_VFCJ5s4-M0fuM2QfxH9jHhd7Rg,83243 +transformers/models/bert/modeling_flax_bert.py,sha256=UMRUMxvvwu8oIzkLfVjXWP9Y47WolZPtZFELypsG-pg,63672 +transformers/models/bert/modeling_tf_bert.py,sha256=wgt4VwesdaX5kXwSuBCLMTA1nqAEUxmANmEmDygSlo4,94392 +transformers/models/bert/tokenization_bert.py,sha256=5PvCwO7TdeghUBTOMEoBEiQ2WEawchcHLHrRykJaGjI,20528 +transformers/models/bert/tokenization_bert_fast.py,sha256=FoAutpMtmt_D77Z82RtBcttl8Cl5P2Rdt_HFIKUT2m8,7652 +transformers/models/bert/tokenization_bert_tf.py,sha256=1zWzz3FPrh5zWqRG7YVY_wIVCzzB8iNGR6MGx48ke3c,11895 +transformers/models/bert_generation/__init__.py,sha256=2XUvSVePne5Hspjzn6l_PonKfZ9WXjRBub9bevOv8R4,2275 +transformers/models/bert_generation/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/configuration_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/modeling_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/__pycache__/tokenization_bert_generation.cpython-310.pyc,, +transformers/models/bert_generation/configuration_bert_generation.py,sha256=DIEAcuNI_Ufp7hPLN-nDuvJLYDYgr9gNphiroKv-4qY,6342 +transformers/models/bert_generation/modeling_bert_generation.py,sha256=XwCC1kp-Sr2QssLGXpH4wds3Y8J80Xz8MNHHj2_w9j4,48087 +transformers/models/bert_generation/tokenization_bert_generation.py,sha256=jOLb4GKOuFKlqxBDgtJ3Ii4iH3ovUGONeoUCwEHElck,7078 +transformers/models/bert_japanese/__init__.py,sha256=6prQNXS2J4cWXqAqkqDyxNmzx-vaFQtOjJQio-ZUc4g,1053 +transformers/models/bert_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bert_japanese/__pycache__/tokenization_bert_japanese.cpython-310.pyc,, +transformers/models/bert_japanese/tokenization_bert_japanese.py,sha256=tc6yQ6PwwyXpwsgQC61VePBfSQ-GwwG_eLiT91o-gCo,39028 +transformers/models/bertweet/__init__.py,sha256=sXE2NweoWp8UIaJkuSaLSw4EaSEzpWwBe3pegec_Kj0,959 +transformers/models/bertweet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bertweet/__pycache__/tokenization_bertweet.cpython-310.pyc,, +transformers/models/bertweet/tokenization_bertweet.py,sha256=lC_rUAk5FTs4tgDyIjBoe6Eel_DlrsV3GW3pa7hnE9I,26988 +transformers/models/big_bird/__init__.py,sha256=XaBDMkK9Dhqc9pVSqqn2xFCNYInFMsBpPOP8GZ0F04Q,4574 +transformers/models/big_bird/__pycache__/__init__.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/configuration_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/convert_bigbird_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/modeling_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/modeling_flax_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird.cpython-310.pyc,, +transformers/models/big_bird/__pycache__/tokenization_big_bird_fast.cpython-310.pyc,, +transformers/models/big_bird/configuration_big_bird.py,sha256=9go6AGwK90fiINxUr2QVpRa84NmdtBMdE1yRaQdeIFE,7932 +transformers/models/big_bird/convert_bigbird_original_tf_checkpoint_to_pytorch.py,sha256=Y75oSwtX-d2wwOSwLo6LlUlZ9uzSEVtWwzwiJYcrXyg,2493 +transformers/models/big_bird/modeling_big_bird.py,sha256=mwU2ApXuiKEQ4pGw41M9FTzFYf95-X5Asi1Vlh_do_E,142329 +transformers/models/big_bird/modeling_flax_big_bird.py,sha256=ePVW-6VwD8sgJYIlX4eWv0EVNaInVosJW_CtqlyzpGs,109510 +transformers/models/big_bird/tokenization_big_bird.py,sha256=ieqxWuknmGjrFCdbn4eHBtW20D-8X1RTEuhy8ExX5A0,14218 +transformers/models/big_bird/tokenization_big_bird_fast.py,sha256=IdSpX06f6d92UnjbR5oXHaoUQTit4yVeYps8ebvLaKc,10168 +transformers/models/bigbird_pegasus/__init__.py,sha256=lTnaYtQ3nRjYYND5G3wilFyh6VOOWlKjNXbsmJTo-A4,2316 +transformers/models/bigbird_pegasus/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/configuration_bigbird_pegasus.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/convert_bigbird_pegasus_tf_to_pytorch.cpython-310.pyc,, +transformers/models/bigbird_pegasus/__pycache__/modeling_bigbird_pegasus.cpython-310.pyc,, +transformers/models/bigbird_pegasus/configuration_bigbird_pegasus.py,sha256=9MOM5DGsV6qMwCgBTlw766O26R-WlpQn_6akpBHgdR4,19323 +transformers/models/bigbird_pegasus/convert_bigbird_pegasus_tf_to_pytorch.py,sha256=Wc7aoNvtzxt-DPi655Kl30CgDgq_hp08psISb8dWpLU,6288 +transformers/models/bigbird_pegasus/modeling_bigbird_pegasus.py,sha256=M9Xr4ukUftzShmCZZT7lJjtmcuHj2aHhmwOWZaEADcM,145911 +transformers/models/biogpt/__init__.py,sha256=dV4wh5lT3U-EYdvjCy6b9lI4Lr2zIN1RqSs6Rsuc6Sg,2058 +transformers/models/biogpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/configuration_biogpt.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/convert_biogpt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/modeling_biogpt.cpython-310.pyc,, +transformers/models/biogpt/__pycache__/tokenization_biogpt.cpython-310.pyc,, +transformers/models/biogpt/configuration_biogpt.py,sha256=k2iyeXyZa50vG6BDeZCoryOL8R5gjRupfny3O4qSRtk,6277 +transformers/models/biogpt/convert_biogpt_original_pytorch_checkpoint_to_pytorch.py,sha256=5zNYzaEy7QPc99LCHTcofXSCI3tr0pzlIpFpwT1ZgN0,10578 +transformers/models/biogpt/modeling_biogpt.py,sha256=uK6gC28UpvxBf283QmsQ68QbCjuW3y-R0r1JyOp9bls,41084 +transformers/models/biogpt/tokenization_biogpt.py,sha256=xrM8Q7_nUFI8MFELCXSiQspn1y7pM5-hHy2hvGQBSKU,13256 +transformers/models/bit/__init__.py,sha256=g9Upc1daCF75FealBk9SK9FMQ-wkJMQxtjoN5mDk4cI,2244 +transformers/models/bit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bit/__pycache__/configuration_bit.cpython-310.pyc,, +transformers/models/bit/__pycache__/convert_bit_to_pytorch.cpython-310.pyc,, +transformers/models/bit/__pycache__/image_processing_bit.cpython-310.pyc,, +transformers/models/bit/__pycache__/modeling_bit.cpython-310.pyc,, +transformers/models/bit/configuration_bit.py,sha256=upReAwVSescKNWbsk0naktI-fR8anH1SfBhiWUYYA1g,6365 +transformers/models/bit/convert_bit_to_pytorch.py,sha256=Z50gXtfe6Tj44cPdIvrFRqjHPdWHdeka5oAqsTuK_ig,5955 +transformers/models/bit/image_processing_bit.py,sha256=NjlrvLfIuCExl48RLRO-5kft5NwqwhZPjex7qBjDSr8,16395 +transformers/models/bit/modeling_bit.py,sha256=yrYTWfA4I4amTBbf0LEx1B9JYQHuEqiduj-P769tyv8,31814 +transformers/models/blenderbot/__init__.py,sha256=nB9V1KQEetB0dazUyJ_KWDJscltclpJ6fJ746wy6zuU,4031 +transformers/models/blenderbot/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/configuration_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/convert_blenderbot_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_flax_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/modeling_tf_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot.cpython-310.pyc,, +transformers/models/blenderbot/__pycache__/tokenization_blenderbot_fast.cpython-310.pyc,, +transformers/models/blenderbot/configuration_blenderbot.py,sha256=JvjK0b-KKOwlYH3CEVrqh60eFiiM50kDJDo8wY84EpQ,18884 +transformers/models/blenderbot/convert_blenderbot_original_pytorch_checkpoint_to_pytorch.py,sha256=86QBWYTeyJvxMUOfxqmGHwpDneadfqbEGSujMYw3yuU,3702 +transformers/models/blenderbot/modeling_blenderbot.py,sha256=p3OFM5CHEgSFexKbmvtA9vUB4QsGmdxeI0NspQPFzxU,75689 +transformers/models/blenderbot/modeling_flax_blenderbot.py,sha256=-2C6LxBSnWTRtoaOHDJrt9pGPLqo-7nGwCYQkJdQ4Js,64985 +transformers/models/blenderbot/modeling_tf_blenderbot.py,sha256=YROTUbcA-LZRlKB0Fuo1_glkTd-Vuu45h6YOrx9ti4U,72696 +transformers/models/blenderbot/tokenization_blenderbot.py,sha256=ts-1ntfTkffcPvfKhfs7V6R8S3wfTe7mz1_HSQHbJyU,19075 +transformers/models/blenderbot/tokenization_blenderbot_fast.py,sha256=RJXNwnxtwIAPAR4LAFfhDRhW4-Ecm5RcH7copzj93ys,13877 +transformers/models/blenderbot_small/__init__.py,sha256=O-iMMZ9xZdyvP2PV4QYvxFcCaY6jEpKt5iyDzI_mrfM,4263 +transformers/models/blenderbot_small/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/configuration_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_flax_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/modeling_tf_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small.cpython-310.pyc,, +transformers/models/blenderbot_small/__pycache__/tokenization_blenderbot_small_fast.cpython-310.pyc,, +transformers/models/blenderbot_small/configuration_blenderbot_small.py,sha256=GspvDUXyjyfh4a0lyxXtl2sJQJyNP-epvHgo-703ay0,18321 +transformers/models/blenderbot_small/modeling_blenderbot_small.py,sha256=WxrZrwkUa8jovo31-ZhueZ9T-kD-Mz1kaMY-2RjkhCk,74555 +transformers/models/blenderbot_small/modeling_flax_blenderbot_small.py,sha256=7S4Aw5OKwRuUErJrna1O5LNERPCtclQ4p_bFbApnLOI,65946 +transformers/models/blenderbot_small/modeling_tf_blenderbot_small.py,sha256=fJBdZGkwA1VejvUF8iPZj8gUFN6Bt3knuxU-C6NsGQI,71608 +transformers/models/blenderbot_small/tokenization_blenderbot_small.py,sha256=NXWfTtB-D9UmwpaGFsMt22UqGG5_RUVXmpR9QyhGo-8,8911 +transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py,sha256=jVbE-wwb3zhY2uCp27EgcXoyLhNP57iTMCx73Wwb39A,4309 +transformers/models/blip/__init__.py,sha256=1OJOhjlrdGG1mkS-46qni8DdTosNMNVWZlR9QTe1K2I,3692 +transformers/models/blip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blip/__pycache__/configuration_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/convert_blip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/blip/__pycache__/image_processing_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_blip_text.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip.cpython-310.pyc,, +transformers/models/blip/__pycache__/modeling_tf_blip_text.cpython-310.pyc,, +transformers/models/blip/__pycache__/processing_blip.cpython-310.pyc,, +transformers/models/blip/configuration_blip.py,sha256=BM2UyTXiZNMMHkyfOQyMoirYEuTPTLzDDTU9BgOFZlQ,16571 +transformers/models/blip/convert_blip_original_pytorch_to_hf.py,sha256=olLA10DbRUnCUOY2uHxF70u3W9wY2EBwm7eyAGfm8nM,6992 +transformers/models/blip/image_processing_blip.py,sha256=hn7D0Svr6hfuggT9wPSgXuHkEg_bjjpHZL4oVM7d1So,15692 +transformers/models/blip/modeling_blip.py,sha256=qBARXCfB0l-rfqpZtKIkxYwbs65V9Yzeir4L3yKmdZ8,61466 +transformers/models/blip/modeling_blip_text.py,sha256=MMmp7Is3B_dluI1QIqGI6_yQ8EQHY34_cJBB-aQN4kE,43781 +transformers/models/blip/modeling_tf_blip.py,sha256=oq2iOlJnUyv0K7znD8gqi7EBID54YHH3bkJOZ6yb3Uo,71414 +transformers/models/blip/modeling_tf_blip_text.py,sha256=iJiYcnZpqJhoNrfUcxPxtokT_qMJGgLyz1hAcAWZ-t4,49972 +transformers/models/blip/processing_blip.py,sha256=oU2XUYUq7FZy_9TiJFzlsojF0P-hTd9o93f4TNtSxxo,6205 +transformers/models/blip_2/__init__.py,sha256=uEo0Z9nF4AxtGnnMSZPEvbdImyy24KR_F1YtOJj_mvY,2153 +transformers/models/blip_2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/configuration_blip_2.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/convert_blip_2_original_to_pytorch.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/modeling_blip_2.cpython-310.pyc,, +transformers/models/blip_2/__pycache__/processing_blip_2.cpython-310.pyc,, +transformers/models/blip_2/configuration_blip_2.py,sha256=8T-Go6bUcycWouvObZoGz7hrTZ2MMJDtggmoLHF70oc,16587 +transformers/models/blip_2/convert_blip_2_original_to_pytorch.py,sha256=0343xouUoM4JqP29bgDyCbNIJfSl8BO-e278133ytSA,12276 +transformers/models/blip_2/modeling_blip_2.py,sha256=8LRioqvJpRuMiP_bpdQT1fAvffoMz9wikgbs2olfIH4,82756 +transformers/models/blip_2/processing_blip_2.py,sha256=4HnjqBRHKwuEH6NKGv0s27Tx3-alA0DYoWXJtM2gZ2I,6699 +transformers/models/bloom/__init__.py,sha256=21dUYJI8_NttCwbHTXqYSl6VcqLj_PoHPPr5NRRu49E,3098 +transformers/models/bloom/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bloom/__pycache__/configuration_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/convert_bloom_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/bloom/__pycache__/modeling_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/modeling_flax_bloom.cpython-310.pyc,, +transformers/models/bloom/__pycache__/tokenization_bloom_fast.cpython-310.pyc,, +transformers/models/bloom/configuration_bloom.py,sha256=wAgm59dCFBCemfBFmREfAbVtw-ACvEXagW3nlS94LAk,10235 +transformers/models/bloom/convert_bloom_original_checkpoint_to_pytorch.py,sha256=WvxNS5YRu84Ek1ieKkyHRKcakRbZFJr5989nEjI6qQs,10302 +transformers/models/bloom/modeling_bloom.py,sha256=Hv_vRyXVDoc4UG2veZ5R82XCLRZ18cVx6awFYfGZUrE,54994 +transformers/models/bloom/modeling_flax_bloom.py,sha256=zBWwHZI6OBs9S1h9JSSAaEnskPKpa8jHn5AROhbLXpw,30092 +transformers/models/bloom/tokenization_bloom_fast.py,sha256=7kdn8u4ZTwk7IKEK8HTpJ-XtTtDy9pYmE7cE8ZYWybA,7033 +transformers/models/bridgetower/__init__.py,sha256=hqrBKe3gtOVATPn1QP5BEpqSVNhJZ2x_Cg11t0Bv-lc,2864 +transformers/models/bridgetower/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/configuration_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/image_processing_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/modeling_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/__pycache__/processing_bridgetower.cpython-310.pyc,, +transformers/models/bridgetower/configuration_bridgetower.py,sha256=ugoxQU3dN5BvV-xCraSR_f_a5gNZ1EMYXaABP6Gz2sI,16313 +transformers/models/bridgetower/image_processing_bridgetower.py,sha256=MS7LDFMYTUJHF-WIbxpcJAVPuhZGFkQCC8f7qgKkLxk,26821 +transformers/models/bridgetower/modeling_bridgetower.py,sha256=-zRrxV9HZvoleRqP5WC9U_d05_UL1RpzvhhpBKi3qRA,88197 +transformers/models/bridgetower/processing_bridgetower.py,sha256=FriChYR6CPgyDBUwOJrDlCJBuHo9RBIWXwN_NxgSGN8,5057 +transformers/models/bros/__init__.py,sha256=T1UKhF6X3-gs8q9-oIzspFbX-kmnMVirfNN1yZyCT2o,2445 +transformers/models/bros/__pycache__/__init__.cpython-310.pyc,, +transformers/models/bros/__pycache__/configuration_bros.cpython-310.pyc,, +transformers/models/bros/__pycache__/convert_bros_to_pytorch.cpython-310.pyc,, +transformers/models/bros/__pycache__/modeling_bros.cpython-310.pyc,, +transformers/models/bros/__pycache__/processing_bros.cpython-310.pyc,, +transformers/models/bros/configuration_bros.py,sha256=bsY0G9Wq7fUrRNbPMcq6JDyTfv2VIYkJXUsTZfXnJ6k,6488 +transformers/models/bros/convert_bros_to_pytorch.py,sha256=kxZDGzvIYxz9hbIzzJOfOj5tixji5efb2884rqwoY6A,4871 +transformers/models/bros/modeling_bros.py,sha256=ffxJcV4jX4dmo0dl053rRtB9ujwPi3j0ij5NQd4fcrQ,57936 +transformers/models/bros/processing_bros.py,sha256=FQUu5czHHvQzZ1P5N9GhfjZu4cmZw_mYKuX0VNjrB54,4193 +transformers/models/byt5/__init__.py,sha256=06YhQd8TFNbc9lU5qzERZUdcSWIFxOeBOaqQh6S4WC4,942 +transformers/models/byt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/byt5/__pycache__/convert_byt5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/byt5/__pycache__/tokenization_byt5.cpython-310.pyc,, +transformers/models/byt5/convert_byt5_original_tf_checkpoint_to_pytorch.py,sha256=83tKCwYRSRW7zXtm9cmszqtPhpw44cH8Cj0SWUSBgN0,2120 +transformers/models/byt5/tokenization_byt5.py,sha256=DF8GtvaS6EpR1UqaQEh6IRaT0lRQD3CKineT6ngRy_4,10031 +transformers/models/camembert/__init__.py,sha256=UBlxBknmDgdOkelwnQSGkAejq1meoGd2CgmQtGayhII,4443 +transformers/models/camembert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/camembert/__pycache__/configuration_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/modeling_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/modeling_tf_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert.cpython-310.pyc,, +transformers/models/camembert/__pycache__/tokenization_camembert_fast.cpython-310.pyc,, +transformers/models/camembert/configuration_camembert.py,sha256=PUXoVUL_wD0GzQZF1tPUvevuMpg1KZMYw8z3MemsGJw,7451 +transformers/models/camembert/modeling_camembert.py,sha256=YOSDDRPyeOqyCIi3iW6j6hsGuzKzzkVAmhzKZRrbi-k,72509 +transformers/models/camembert/modeling_tf_camembert.py,sha256=dZ22NfFnEGrgnmudjH6PQ04cp_yD4FnHpPT596Ljpgg,81636 +transformers/models/camembert/tokenization_camembert.py,sha256=RbjLk4_erty0RUfNn__WEA6q5xmOECFmyJ3eEHLuWxk,13978 +transformers/models/camembert/tokenization_camembert_fast.py,sha256=SxE-LECgWeoJoHax_4ivwPTM2Cmbd7_5IqrzSF-T8rc,8274 +transformers/models/canine/__init__.py,sha256=7AYQEAa5qVyCZ73fkPg0yXl5-YpLg55i3RpY1J3KulM,2272 +transformers/models/canine/__pycache__/__init__.cpython-310.pyc,, +transformers/models/canine/__pycache__/configuration_canine.cpython-310.pyc,, +transformers/models/canine/__pycache__/convert_canine_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/canine/__pycache__/modeling_canine.cpython-310.pyc,, +transformers/models/canine/__pycache__/tokenization_canine.cpython-310.pyc,, +transformers/models/canine/configuration_canine.py,sha256=VtShXIcJvDsUTXigsxBaok_t2UPv7fB49uVWslR_c28,6654 +transformers/models/canine/convert_canine_original_tf_checkpoint_to_pytorch.py,sha256=vGfFFo49PfyXtZdgIQHRcqMPcbmF8aMEC9DiHMyEsn0,2117 +transformers/models/canine/modeling_canine.py,sha256=2x2L3uE9QUKekoDb5NQFnl0Ysv2Da3cH75nnoo7EVWU,73494 +transformers/models/canine/tokenization_canine.py,sha256=bLQPsvTpk8GFuH3rsUR6o0l6f9Ldvb4S3Os0H-oFQQc,9287 +transformers/models/chinese_clip/__init__.py,sha256=SNfgqh2dGAcoNXXZx-8XFNO3UDriK_yV7vf-M23Qnfk,2919 +transformers/models/chinese_clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/configuration_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/convert_chinese_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/feature_extraction_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/image_processing_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/modeling_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/__pycache__/processing_chinese_clip.cpython-310.pyc,, +transformers/models/chinese_clip/configuration_chinese_clip.py,sha256=Hh_PJMLRsFI22qEPskV-vsW4iIzrv5nWlkmfPW0rsec,22431 +transformers/models/chinese_clip/convert_chinese_clip_original_pytorch_to_hf.py,sha256=-0bnVcdXxStmygkyj6S1hIGCVbpEbe3cM7AoshHH5ZE,5069 +transformers/models/chinese_clip/feature_extraction_chinese_clip.py,sha256=znduyOyJ-Qdx4MC5CPb6MFZ-Wrb5PLgHWRh0xfoULR0,1247 +transformers/models/chinese_clip/image_processing_chinese_clip.py,sha256=eIjF9ejRpZBkmGpzNSopH8FicTbd_5GuzvnA1vY0ia4,15946 +transformers/models/chinese_clip/modeling_chinese_clip.py,sha256=ix32T8Zqg4ccIRxU5A6dBsCAJ_sPklKsI8vVvtRWaBE,73081 +transformers/models/chinese_clip/processing_chinese_clip.py,sha256=xeAbYW_LoAVP3wwtnOdjoJ3kvCPUVoE8OFWMldm_QXY,6706 +transformers/models/clap/__init__.py,sha256=MOoheQt_0P8KCRlN4QiWyzrskH9dUUfSSF_pZpJEchw,2322 +transformers/models/clap/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clap/__pycache__/configuration_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/convert_clap_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clap/__pycache__/feature_extraction_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/modeling_clap.cpython-310.pyc,, +transformers/models/clap/__pycache__/processing_clap.cpython-310.pyc,, +transformers/models/clap/configuration_clap.py,sha256=Y1vAbl7FYI2UehNFgLNwETR0CrD-72ymSkPT-igvhkU,20382 +transformers/models/clap/convert_clap_original_pytorch_to_hf.py,sha256=FqHoVAYXIzfUY9342azwlm9zfSP7QdS8p-u9Q6RE_K4,5149 +transformers/models/clap/feature_extraction_clap.py,sha256=rN5ZDLkqtfddEsT6kcFW2OVe7nehoPUE4HM7T3ua5us,18692 +transformers/models/clap/modeling_clap.py,sha256=pSoYaFvFpFL877F0Yz8Rp8WStKred_14115B2vMWL6A,104794 +transformers/models/clap/processing_clap.py,sha256=QpXK1vA69fFLzQesu-qetj22YiV_BiO-0cpatq8ViKo,5705 +transformers/models/clip/__init__.py,sha256=4_WowO4qRlP_COGzdscG6QH0pZU-Q5a38GsrtBTlSHs,5193 +transformers/models/clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clip/__pycache__/configuration_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/convert_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clip/__pycache__/feature_extraction_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/image_processing_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_flax_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/modeling_tf_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/processing_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/tokenization_clip.cpython-310.pyc,, +transformers/models/clip/__pycache__/tokenization_clip_fast.cpython-310.pyc,, +transformers/models/clip/configuration_clip.py,sha256=vMFHNjjfLiqeJTtQ6jN-mB8tOQU2QlixyX5VGXZo52Y,20990 +transformers/models/clip/convert_clip_original_pytorch_to_hf.py,sha256=3_eKm-gpqB5DNvL8b3OKSUrjG7YFxqrQl1DBdL_IboA,5306 +transformers/models/clip/feature_extraction_clip.py,sha256=hgRfD-s9DoI7tzDLAJ0EW3rSbkY9dOiGqoGClOiRiBM,1172 +transformers/models/clip/image_processing_clip.py,sha256=zmKMxx_qthrWGJBOO9aeVvVaB7syFsdNLHyZYxo0LQA,16512 +transformers/models/clip/modeling_clip.py,sha256=wNUb3ENle3SltgMs3ntlNf0baktqeonmoz9zVrM35jM,61190 +transformers/models/clip/modeling_flax_clip.py,sha256=4uabm9t6i4bnqRR3DZrGk7X1NcaV78L6b6E6i0Gkl2U,50517 +transformers/models/clip/modeling_tf_clip.py,sha256=4DBx81Dc8OmozGPQKeUOZqpvXUIVSrCHooJVQollyHw,60461 +transformers/models/clip/processing_clip.py,sha256=xXp4RfloqWH1K1dFCL81jGvaOowCNQ2s0CU1vz2ClP8,7148 +transformers/models/clip/tokenization_clip.py,sha256=lDCHtIoqfquNg1n69Eok2jnV1mv5ebZStvt32cnowRU,20584 +transformers/models/clip/tokenization_clip_fast.py,sha256=vrfnIp8ZOxNHwzEkCJKSAMHaY7Jc8puUHIuudJ7Iuj0,6555 +transformers/models/clipseg/__init__.py,sha256=XmEjQiZo2l7fQvPX8Tm_rsd3wItyBrBg3gtvDAkOTZM,2179 +transformers/models/clipseg/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/configuration_clipseg.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/convert_clipseg_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/modeling_clipseg.cpython-310.pyc,, +transformers/models/clipseg/__pycache__/processing_clipseg.cpython-310.pyc,, +transformers/models/clipseg/configuration_clipseg.py,sha256=ra61UArge-I-7ojfmu8bDzIOcYeePhhi5RyPVFB-zJo,21029 +transformers/models/clipseg/convert_clipseg_original_pytorch_to_hf.py,sha256=kYyPxdpdtt6nSxD65tXUTMbN0xPyyzjfTOOMbQ8OL0Y,11114 +transformers/models/clipseg/modeling_clipseg.py,sha256=74i474c_eJ-9FK1q4siNMd-5q-h-Xg_5eupowsmwutg,64514 +transformers/models/clipseg/processing_clipseg.py,sha256=dm7u-6S5Hg1ITAc0lYzXRJssiR92LOMkWbnR7p4eHzE,7790 +transformers/models/clvp/__init__.py,sha256=VUtmHMpw33TwZIXIYxV_ImQSKobm9ItMAZnw87Ke4Dg,2396 +transformers/models/clvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/clvp/__pycache__/configuration_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/convert_clvp_to_hf.cpython-310.pyc,, +transformers/models/clvp/__pycache__/feature_extraction_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/modeling_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/number_normalizer.cpython-310.pyc,, +transformers/models/clvp/__pycache__/processing_clvp.cpython-310.pyc,, +transformers/models/clvp/__pycache__/tokenization_clvp.cpython-310.pyc,, +transformers/models/clvp/configuration_clvp.py,sha256=1uV0hAVyFwCcZYgDmnHMaog7sh_CiRhdWM_nA-GnCec,21029 +transformers/models/clvp/convert_clvp_to_hf.py,sha256=1WYf_vwj1CeQ_VU9iMqu7Grr_MmlAsaKEK1Lojk6yM4,9326 +transformers/models/clvp/feature_extraction_clvp.py,sha256=rq0Ygr1pCT1DK4mMzv6f4b06zgXeAwT29GYSzu1Fprw,10935 +transformers/models/clvp/modeling_clvp.py,sha256=XtbPX-UX87uEnxLrAVopZPT_cRwAUTzqwQ5e7PRvf7A,91213 +transformers/models/clvp/number_normalizer.py,sha256=gJb8KFEdsDWgzubs6cTn1i2q2R1fHCYs9C3k2hBoCyU,8857 +transformers/models/clvp/processing_clvp.py,sha256=zn13cG8abp5_ZFhoL_QQxcoTRS57rLKXBh9H5KAUBxk,3605 +transformers/models/clvp/tokenization_clvp.py,sha256=dNbrXIhYcqum_vonAZ7xsxvKimu1to6CdDDu5T5-0XA,14800 +transformers/models/code_llama/__init__.py,sha256=S1xpVZ6cLZxN1ADmRNp7dCsoKQKnb3-Tw-HkHjHcnBY,1882 +transformers/models/code_llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama.cpython-310.pyc,, +transformers/models/code_llama/__pycache__/tokenization_code_llama_fast.cpython-310.pyc,, +transformers/models/code_llama/tokenization_code_llama.py,sha256=_cpVOfwPY1I39Jyea7FLoXDam-W7l3-nJp6zoCJ5nUc,22959 +transformers/models/code_llama/tokenization_code_llama_fast.py,sha256=REf6FgNg7WbBovoDFKJey0VekXWMFVJGMVfOwHUCZaU,19758 +transformers/models/codegen/__init__.py,sha256=Zb96Hyd6W5WaIc7l-psLnEhYjANmwxzZlAR-g37xKkI,2443 +transformers/models/codegen/__pycache__/__init__.cpython-310.pyc,, +transformers/models/codegen/__pycache__/configuration_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/modeling_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen.cpython-310.pyc,, +transformers/models/codegen/__pycache__/tokenization_codegen_fast.cpython-310.pyc,, +transformers/models/codegen/configuration_codegen.py,sha256=yDXZ9gsIW7pl5Tjah0XxgsLAfRr3zPoe2SlLEiltFTk,9591 +transformers/models/codegen/modeling_codegen.py,sha256=6Y_fXx2eRskk3_lmsWf_iUJ7bn0iwHppfCzVdyhUj3Y,31320 +transformers/models/codegen/tokenization_codegen.py,sha256=_-xlrNZM80xsLxqdH16T-TCDr_2Uq_dWfwT-vDvnhbo,16531 +transformers/models/codegen/tokenization_codegen_fast.py,sha256=PS7r7kD-hdmuBOPgAlWJG9IFFAWC0e6IYkNMfqBQ6sw,11433 +transformers/models/cohere/__init__.py,sha256=JRbmLNV1IKapV0NxDyyYL9-ZNPuHIWkYpBPbyUCwKAI,2214 +transformers/models/cohere/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cohere/__pycache__/configuration_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/modeling_cohere.cpython-310.pyc,, +transformers/models/cohere/__pycache__/tokenization_cohere_fast.cpython-310.pyc,, +transformers/models/cohere/configuration_cohere.py,sha256=rCmrKme3Qh74CFYClgeGUgqbDLWYs9DtzEIlrDxaIrA,7361 +transformers/models/cohere/modeling_cohere.py,sha256=M_TDYvfnEfWV8qwnSNMhQLRQesU_GJ1rL7hPgXAvysU,58569 +transformers/models/cohere/tokenization_cohere_fast.py,sha256=QNO7uiaMfmvHeyIfFPoUewJ0sz3LzltSpkCP8NeffJA,41992 +transformers/models/conditional_detr/__init__.py,sha256=aFyaZb6RKCOPPf_kPK83WhyaDO5NFiox70ZbMe5gxvw,2828 +transformers/models/conditional_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/configuration_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/convert_conditional_detr_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/feature_extraction_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/image_processing_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/__pycache__/modeling_conditional_detr.cpython-310.pyc,, +transformers/models/conditional_detr/configuration_conditional_detr.py,sha256=x1o4bO0eaiPvhZcA0ZtIwdsKf3IZOzZ3iNEtd4_hb4I,13306 +transformers/models/conditional_detr/convert_conditional_detr_original_pytorch_checkpoint_to_pytorch.py,sha256=O0da9fOwcPhpQSaa0Ci34txn-9YF9fAMGvRHK0dCk3Q,15930 +transformers/models/conditional_detr/feature_extraction_conditional_detr.py,sha256=opHXZebd-6cMJnO6RbrAdmVYmnkNzK1up_fPlHTSLrk,1553 +transformers/models/conditional_detr/image_processing_conditional_detr.py,sha256=4ixTeCxDOS9bz4gyUTVE1GjAJeDCK8srbaKRbt0t-LM,81280 +transformers/models/conditional_detr/modeling_conditional_detr.py,sha256=BA-OJXHlNglkGfMq15IyO4_nqv3LQbe8ENhjAYhmBHc,132187 +transformers/models/convbert/__init__.py,sha256=wkLfe2pjkQmfQ0sd28ixnL1__YYimYDtT5FP1bRD0YE,4069 +transformers/models/convbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convbert/__pycache__/configuration_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.cpython-310.pyc,, +transformers/models/convbert/__pycache__/modeling_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/modeling_tf_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert.cpython-310.pyc,, +transformers/models/convbert/__pycache__/tokenization_convbert_fast.cpython-310.pyc,, +transformers/models/convbert/configuration_convbert.py,sha256=_Ryl9j6dcPjzID_tAPrzgJgwf3NU7xNcJU0haut1lo8,6934 +transformers/models/convbert/convert_convbert_original_tf1_checkpoint_to_pytorch_and_tf2.py,sha256=vTZyGhG9v7o4rDuP9-xM26gX1EzlCda7Sn_ELT9n3Gk,2108 +transformers/models/convbert/modeling_convbert.py,sha256=8DZ6IVR32VDfq8ALr_nZcJ0wLh5ml9i8s3_CT_-UQgM,58380 +transformers/models/convbert/modeling_tf_convbert.py,sha256=5E36T-ANwFpieUU0uGWImtpTsyauum9N2ldRE9TFBdw,61468 +transformers/models/convbert/tokenization_convbert.py,sha256=_FUqWP5wYDZ5Z0A8Y84Ptc0c5iVAUlq0dCU9Z7prvAs,20970 +transformers/models/convbert/tokenization_convbert_fast.py,sha256=XyV1ssOlvc2Ln3rgXZ9_eb7cDzYr_x0nETPei-AtclE,7780 +transformers/models/convnext/__init__.py,sha256=K8TKvIQuVogfZPifZjZeCwGJKA_vnASMr7LWx4CggqA,3150 +transformers/models/convnext/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convnext/__pycache__/configuration_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/convert_convnext_to_pytorch.cpython-310.pyc,, +transformers/models/convnext/__pycache__/feature_extraction_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/image_processing_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/modeling_convnext.cpython-310.pyc,, +transformers/models/convnext/__pycache__/modeling_tf_convnext.cpython-310.pyc,, +transformers/models/convnext/configuration_convnext.py,sha256=DKHCH1QXO8vjBJOWa0a1JRBqlbNWqmZrQ1BSisMlp5M,6227 +transformers/models/convnext/convert_convnext_to_pytorch.py,sha256=6QenssUB5Op--7nvPTPjRUEozX-4kljweJvc-blSpnQ,10220 +transformers/models/convnext/feature_extraction_convnext.py,sha256=TyFMochXYlN3vKH7Ud0nXagzxGhio2Bfma4ofceR_zA,1200 +transformers/models/convnext/image_processing_convnext.py,sha256=JIXegI7ZMZUxJApMMoY4JTmA5iNxJm9FN3UQnQwRpNc,16288 +transformers/models/convnext/modeling_convnext.py,sha256=2Bz-3mive7eeRtCSDPWV4E2MvXkIJ5-YwN6xMTiWuTU,21883 +transformers/models/convnext/modeling_tf_convnext.py,sha256=E21qdpGpPYVH4xJcMyjw5tdTCpoVobcjEcqhhtSID90,27195 +transformers/models/convnextv2/__init__.py,sha256=JmOrlR6-q7yFZqSG7obPonJSuSpLVhTOIax7X-3FDwY,2825 +transformers/models/convnextv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/configuration_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/convert_convnextv2_to_pytorch.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/modeling_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/__pycache__/modeling_tf_convnextv2.cpython-310.pyc,, +transformers/models/convnextv2/configuration_convnextv2.py,sha256=8zueCHJbSR0BqWzIqn7G8p49usf0QVUHQJVjGC9IFQE,5525 +transformers/models/convnextv2/convert_convnextv2_to_pytorch.py,sha256=Yswl5UwLP0t0tC8O2b8wix2beNaMtPy7areKFCuEccg,12473 +transformers/models/convnextv2/modeling_convnextv2.py,sha256=VT0O_uhD08q8fJKv9wKMW5zWXIcWDzt8IHcLgcToIrM,23655 +transformers/models/convnextv2/modeling_tf_convnextv2.py,sha256=RAbH2qhb2IPA9QYlqofUQ6ACGoqaKAAr6s1LeUeGgs4,27595 +transformers/models/cpm/__init__.py,sha256=9SmT0nL5DgGjXxmPaQFi9GGPXWuhFic2DX2GsF-BynQ,1816 +transformers/models/cpm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm.cpython-310.pyc,, +transformers/models/cpm/__pycache__/tokenization_cpm_fast.cpython-310.pyc,, +transformers/models/cpm/tokenization_cpm.py,sha256=CS26Yw8B-Jo_HIwVz6C-VR12d4MVxNh-M0mM3qTlYDE,15026 +transformers/models/cpm/tokenization_cpm_fast.py,sha256=Rt2x767ZSd-A_Khz5itY9iw4AQXtLhKCxP8w-sUz1wU,10425 +transformers/models/cpmant/__init__.py,sha256=5hTyJtQwoONrf9-BMvt_nT_bovkj9avoSk9UdLCvW4w,2117 +transformers/models/cpmant/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/configuration_cpmant.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/modeling_cpmant.cpython-310.pyc,, +transformers/models/cpmant/__pycache__/tokenization_cpmant.cpython-310.pyc,, +transformers/models/cpmant/configuration_cpmant.py,sha256=F69mQQvTS_eskh5_8-4mmtcgHMnuNqumodJHABqyRkk,5215 +transformers/models/cpmant/modeling_cpmant.py,sha256=AwKfTJbnqiNX4IssjQtdy1yA1tZHfaByKYIVhoja7uw,37512 +transformers/models/cpmant/tokenization_cpmant.py,sha256=cfySlpG9_Ula60VT3BxPJlcFxmrQ--CQxGswVZNgWGk,9711 +transformers/models/ctrl/__init__.py,sha256=-Sa7nUQv3Cxj4KLXFaBtnkG_r3uIdpbU_Q_TmMl1lKM,2688 +transformers/models/ctrl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/configuration_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/modeling_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/modeling_tf_ctrl.cpython-310.pyc,, +transformers/models/ctrl/__pycache__/tokenization_ctrl.cpython-310.pyc,, +transformers/models/ctrl/configuration_ctrl.py,sha256=0r-lMZF0FCvkxDSSut0UoFqlJkWIap3cWIChSdq0-ao,4754 +transformers/models/ctrl/modeling_ctrl.py,sha256=zWef4WjFz6iro6fxfYkPP6y90BEO5QmYsOyuQTv1U5k,35697 +transformers/models/ctrl/modeling_tf_ctrl.py,sha256=54SBEtzFfUjGTtluw7RnFH1GOYXTZaPyazTqghxU2aI,39730 +transformers/models/ctrl/tokenization_ctrl.py,sha256=EGNrlwwhfT39EWpoLFk8VvclzCnpfgttJHwM7VUSoqw,8058 +transformers/models/cvt/__init__.py,sha256=dk1C0zaBDT0dl7BYLe1mRb85Dp_a_IHomekjOjYPHJ8,2434 +transformers/models/cvt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/cvt/__pycache__/configuration_cvt.cpython-310.pyc,, +transformers/models/cvt/__pycache__/convert_cvt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/cvt/__pycache__/modeling_cvt.cpython-310.pyc,, +transformers/models/cvt/__pycache__/modeling_tf_cvt.cpython-310.pyc,, +transformers/models/cvt/configuration_cvt.py,sha256=VZLhB8Ol81xijVNMgrTY2ves1pzVQ5hAyYWuZsyQAJw,6754 +transformers/models/cvt/convert_cvt_original_pytorch_checkpoint_to_pytorch.py,sha256=miqNzPWIAjwl5rtkWOmRUJl-18X-9cRXXWb9M3ScHI4,13570 +transformers/models/cvt/modeling_cvt.py,sha256=ErKQINT0sm045FNLxrrI_mu6jc2AEeAavKU130VPdtE,28764 +transformers/models/cvt/modeling_tf_cvt.py,sha256=aOCKA0ON6OY3acBGGRwpM_zZcOY72YeBCINi-8R1IT4,43563 +transformers/models/data2vec/__init__.py,sha256=1Pq8n8wNccLQ76e8oNDwOemqh-E0eMKpr6tdt2ata8w,4933 +transformers/models/data2vec/__pycache__/__init__.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_audio.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_text.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/configuration_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_audio.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_text.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/__pycache__/modeling_tf_data2vec_vision.cpython-310.pyc,, +transformers/models/data2vec/configuration_data2vec_audio.py,sha256=XsmBloKL5Bop4r7O4ag5B2R_h94EIIwdJRUzT82rKKE,16322 +transformers/models/data2vec/configuration_data2vec_text.py,sha256=B0L25th5BhuUQ4g1Jz0Ac-XqptaMMZjxs9hiJo_AaFs,7380 +transformers/models/data2vec/configuration_data2vec_vision.py,sha256=8WDKE8oQ9InFj3SMpKXdAoFCXRrBe-AfRLtGhkVvZtI,9347 +transformers/models/data2vec/convert_data2vec_audio_original_pytorch_checkpoint_to_pytorch.py,sha256=czYaA_tlF-uCDMFV1RFaL5g8QJRozBiVUCu9nuhLcZU,10858 +transformers/models/data2vec/convert_data2vec_text_original_pytorch_checkpoint_to_pytorch.py,sha256=4scSS9J1m1xG6sy_BLvjbCeEL8Ke2RhNtNqsVt2zUCI,9580 +transformers/models/data2vec/convert_data2vec_vision_original_pytorch_checkpoint_to_pytorch.py,sha256=qKjV-jqIgL-6i17m4yQLW_93SbPpGxQnvHjuy1xVxQU,15340 +transformers/models/data2vec/modeling_data2vec_audio.py,sha256=q7ZgT0uIF90cblhwXUkcXJUHfr2EV4u37Z5Lmvet7ks,65405 +transformers/models/data2vec/modeling_data2vec_text.py,sha256=zGEJKXfSx2dzYaTz7yW8BnkEo4wgbzTNFQWuheSuzVE,71278 +transformers/models/data2vec/modeling_data2vec_vision.py,sha256=lpCgPsKjll09wAmVI8C-CrrRRUgp2X21b4Fj8tmdCYs,53758 +transformers/models/data2vec/modeling_tf_data2vec_vision.py,sha256=NptTURm89gID4Xn7M5whOHxMX2RWHVBWDWcca2G0srA,73356 +transformers/models/dbrx/__init__.py,sha256=n36C-BWFjJ9wkgBAv764sGksJFOL_fkME1fe1cTm-sg,1513 +transformers/models/dbrx/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dbrx/__pycache__/configuration_dbrx.cpython-310.pyc,, +transformers/models/dbrx/__pycache__/modeling_dbrx.cpython-310.pyc,, +transformers/models/dbrx/configuration_dbrx.py,sha256=5A7PHWzLZ7VQ7rPVXbfV0Ix3SAQ72DWdXNVZHvMmmtk,11054 +transformers/models/dbrx/modeling_dbrx.py,sha256=4YYQHqZLZcn1v8ETXIlBfl8GLe3QWOlHWHye5heek3A,70159 +transformers/models/deberta/__init__.py,sha256=azYcZaZso6o7T3SDyUrczkAZ4ZzgDh4hcPoT0bgPRSE,3677 +transformers/models/deberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deberta/__pycache__/configuration_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/modeling_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/modeling_tf_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta.cpython-310.pyc,, +transformers/models/deberta/__pycache__/tokenization_deberta_fast.cpython-310.pyc,, +transformers/models/deberta/configuration_deberta.py,sha256=vKByhl26MNUkc2PbGuAjJ7jIa_wS3YRd0iozdrjFxmU,8782 +transformers/models/deberta/modeling_deberta.py,sha256=AiY5atrWFBRFdyZxYLRgCcENxvBiAGTqC2carfQ09vs,57918 +transformers/models/deberta/modeling_tf_deberta.py,sha256=Z0GnuxXSoGQtgvjkd8MFvP7pGZ_4GaXHtsnwv-iwf9s,68935 +transformers/models/deberta/tokenization_deberta.py,sha256=7ikisAJGKrVD1bhhsYlcN4MHhP0R_UqUEHCu5MzpDrc,17052 +transformers/models/deberta/tokenization_deberta_fast.py,sha256=MfbMLRaqECwKDRvpJ6dAr317gKN0q6TbjjTSe5JTL0M,10722 +transformers/models/deberta_v2/__init__.py,sha256=afG1pzu0TIczwpL6vPJXnwkO5Sn9R5qrMvjaTzysH1U,3981 +transformers/models/deberta_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/configuration_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/modeling_tf_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2.cpython-310.pyc,, +transformers/models/deberta_v2/__pycache__/tokenization_deberta_v2_fast.cpython-310.pyc,, +transformers/models/deberta_v2/configuration_deberta_v2.py,sha256=Hp9o7TDKpc3kQ7c_rsSsUvXJIqKyR3M9M0sJ87UGPN0,8721 +transformers/models/deberta_v2/modeling_deberta_v2.py,sha256=E-bduoiC6I0Wozr0m9VkyggI9yvZjUaELHBm0-1CjpI,67493 +transformers/models/deberta_v2/modeling_tf_deberta_v2.py,sha256=B6ETqQpXYwLr1-zBHWis1GsuLvue9OMdQ2cjj9MTXus,81234 +transformers/models/deberta_v2/tokenization_deberta_v2.py,sha256=j0JyIHf3JHcrO_mpGz_W_igf6GJS2XWADOp3CwsEkUw,20704 +transformers/models/deberta_v2/tokenization_deberta_v2_fast.py,sha256=qwgQDjV0k_3d40RgzlUiIDBqwRO1v_dbqwtcf2WiZ-M,9758 +transformers/models/decision_transformer/__init__.py,sha256=geVmBybTFepK0keGuRrLYl6hwZhT5I2BK4dfeYFDqWw,2124 +transformers/models/decision_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/decision_transformer/__pycache__/configuration_decision_transformer.cpython-310.pyc,, +transformers/models/decision_transformer/__pycache__/modeling_decision_transformer.cpython-310.pyc,, +transformers/models/decision_transformer/configuration_decision_transformer.py,sha256=7Z0GZiYd90utw6yBr5xUqIT-iyH4eiq5LGDq3ALkCzw,7099 +transformers/models/decision_transformer/modeling_decision_transformer.py,sha256=QcIDiihZ3Ct8Ku08wSkHkfLj6YsCiDd8QyMW6D7ILHA,43049 +transformers/models/deformable_detr/__init__.py,sha256=jwNDOMAnuD5Efvu3FYvA1H9JJB9QBb6NpoaoCCJU1Ns,2599 +transformers/models/deformable_detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/configuration_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/convert_deformable_detr_to_pytorch.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/feature_extraction_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/image_processing_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/load_custom.cpython-310.pyc,, +transformers/models/deformable_detr/__pycache__/modeling_deformable_detr.cpython-310.pyc,, +transformers/models/deformable_detr/configuration_deformable_detr.py,sha256=5n4pHUoiDWEUeEGObAyY3loaKNLUfPBZDqEayBWMHsM,14522 +transformers/models/deformable_detr/convert_deformable_detr_to_pytorch.py,sha256=264dW2XMu4QcgO6IaMa4eOjrIHErz-RLw_9FLD6C46Q,9477 +transformers/models/deformable_detr/feature_extraction_deformable_detr.py,sha256=GwYaT6B6-Fu2Jbl8CALodb7Lz4gr9jSRfq01QfLQc7Y,1546 +transformers/models/deformable_detr/image_processing_deformable_detr.py,sha256=JizyRpHgEX8ZNruNiFth0a_Co-D0M3cFEbi8At8jCrU,68712 +transformers/models/deformable_detr/load_custom.py,sha256=0jENX1Mkz0bYlyUYYgp1YYEpQ8r32degzoL4CmVGe3w,1559 +transformers/models/deformable_detr/modeling_deformable_detr.py,sha256=pcwxvcPrryk9Dl0tDsXXU8CAOZTHQEodnai-u7QB1Lw,121344 +transformers/models/deit/__init__.py,sha256=ZVWuhflGzxt-AZ2wcCTX0JfXBY3puVD_O9WkNqfOH1A,3486 +transformers/models/deit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deit/__pycache__/configuration_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/convert_deit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/deit/__pycache__/feature_extraction_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/image_processing_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/modeling_deit.cpython-310.pyc,, +transformers/models/deit/__pycache__/modeling_tf_deit.cpython-310.pyc,, +transformers/models/deit/configuration_deit.py,sha256=5DiDN1-_FHZX59MM9n5gITzkt8mDtN5Td2497ZQUBaU,5792 +transformers/models/deit/convert_deit_timm_to_pytorch.py,sha256=JMCXzccvcbz1euXpqx-pb86V2PVDLKl-OYbFDLvvSZU,9217 +transformers/models/deit/feature_extraction_deit.py,sha256=1j_aV0oAZUofSYJGCEFRo0WNd_zVEXjj3SFlTQSuV1E,1172 +transformers/models/deit/image_processing_deit.py,sha256=VgMa1Wp87jIbbkcfqNUj_61sapJtXzOFZ0vFIbKpcdA,15720 +transformers/models/deit/modeling_deit.py,sha256=wV7Ag5ODDECuJz8rJWuRby3RFZqEhQRmuXc1T6DzBjw,38183 +transformers/models/deit/modeling_tf_deit.py,sha256=k0x2P8L_4SZ9CEGbDZ7_ArofzLRNdfsL7-E9GwwCCrw,49513 +transformers/models/deprecated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/__pycache__/_archive_maps.cpython-310.pyc,, +transformers/models/deprecated/_archive_maps.py,sha256=OsadX80pmOWHO2xgu8WBe8vgHxwko-MDcglgM_jG_30,124695 +transformers/models/deprecated/bort/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/deprecated/bort/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/bort/__pycache__/convert_bort_original_gluonnlp_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/bort/convert_bort_original_gluonnlp_checkpoint_to_pytorch.py,sha256=y0wlQneBswkzekq70fW2-mqsn9RuITThO1AKV_8Cn5I,14068 +transformers/models/deprecated/mctct/__init__.py,sha256=Rbzjcs6HiXhpUeaKRE6Qtj9XsIRLkUrFAiQnbOerMrM,1892 +transformers/models/deprecated/mctct/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/configuration_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/feature_extraction_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/modeling_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/__pycache__/processing_mctct.cpython-310.pyc,, +transformers/models/deprecated/mctct/configuration_mctct.py,sha256=sRUJF1RcoVRib5IAxg4u1sBLj_XxH-11hry18Tfl8ok,9159 +transformers/models/deprecated/mctct/feature_extraction_mctct.py,sha256=JsaSE20NeqBX8Uw-07Y5HdUcQtbYZqCrTN18Wu2B4rI,13460 +transformers/models/deprecated/mctct/modeling_mctct.py,sha256=Xmh-PfvY2jJ0cAgcuuaWPsW0FvmZJQ-FFglPr_XgiPI,32881 +transformers/models/deprecated/mctct/processing_mctct.py,sha256=0ejBpQWA6YVuU0A7hrFg797hFZnOO7GexVU5Da7xLP0,5930 +transformers/models/deprecated/mmbt/__init__.py,sha256=0CCmesCwGIMNFlf2oDsL0gYaCSpsfAC1_bMOXRcAgF4,1480 +transformers/models/deprecated/mmbt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/mmbt/__pycache__/configuration_mmbt.cpython-310.pyc,, +transformers/models/deprecated/mmbt/__pycache__/modeling_mmbt.cpython-310.pyc,, +transformers/models/deprecated/mmbt/configuration_mmbt.py,sha256=agMAOVRnUrMlA8C6adBRLTuLmt8qG4lm4ykjGwS-qs4,1606 +transformers/models/deprecated/mmbt/modeling_mmbt.py,sha256=daov1Smf2qd_BhebAOQiyN53C-8oZZary9m7iZV-nuU,18914 +transformers/models/deprecated/open_llama/__init__.py,sha256=Mlmat1Ln8JLYZcldnGrMfBdgOwM01CmsoQEFedbJ24g,2788 +transformers/models/deprecated/open_llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/open_llama/__pycache__/configuration_open_llama.cpython-310.pyc,, +transformers/models/deprecated/open_llama/__pycache__/modeling_open_llama.cpython-310.pyc,, +transformers/models/deprecated/open_llama/configuration_open_llama.py,sha256=xnOSXytwBznDjv6AQGEyOMQXGExxoq48MbY0lWRzyHk,7964 +transformers/models/deprecated/open_llama/modeling_open_llama.py,sha256=wawujfN1QFBKngsUWOZq6dFc3DHycnn4m_Za3jJBXgc,43896 +transformers/models/deprecated/retribert/__init__.py,sha256=yMGneTgD7_VaMhXG00Liyvt4digAfyQ_j6Ou55p8iEU,2351 +transformers/models/deprecated/retribert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/configuration_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/modeling_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert.cpython-310.pyc,, +transformers/models/deprecated/retribert/__pycache__/tokenization_retribert_fast.cpython-310.pyc,, +transformers/models/deprecated/retribert/configuration_retribert.py,sha256=F9252tGauNQByeRMSgeJ1qPr4iJ5OL4U06WZ2rzCs34,5290 +transformers/models/deprecated/retribert/modeling_retribert.py,sha256=MIXbAXvuAGiOEMzpYEsUGoLKpPmh6xgRRrIodcg78iA,9388 +transformers/models/deprecated/retribert/tokenization_retribert.py,sha256=Va7tovgQiEME_QhTpbZNz9htj6M4dZbBWmwJv8MouUY,22090 +transformers/models/deprecated/retribert/tokenization_retribert_fast.py,sha256=Xd0oASIwAqMU-OzoGBzUFV1Lx5gx8AtKgvDpmpMNjXk,8251 +transformers/models/deprecated/tapex/__init__.py,sha256=lQutKYtwbU8ztPva0tyRnnV-zOWw6rxkGyoOUSuvnUo,926 +transformers/models/deprecated/tapex/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/tapex/__pycache__/tokenization_tapex.cpython-310.pyc,, +transformers/models/deprecated/tapex/tokenization_tapex.py,sha256=MPuB1JknrO9WY_j-Hgy8JWGNKvcowBDrjhFi-bCGALw,64347 +transformers/models/deprecated/trajectory_transformer/__init__.py,sha256=NZl7qNHOSc-VlOFIvhh4iSpn_fyGHZ8k7a9WXXG5HGg,2077 +transformers/models/deprecated/trajectory_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/configuration_trajectory_transformer.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/__pycache__/modeling_trajectory_transformer.cpython-310.pyc,, +transformers/models/deprecated/trajectory_transformer/configuration_trajectory_transformer.py,sha256=HO7LnfesbLDrz16K1zfNH2yboJJHsSpPzAIR7DUksDY,7165 +transformers/models/deprecated/trajectory_transformer/convert_trajectory_transformer_original_pytorch_checkpoint_to_pytorch.py,sha256=9jmCO1yueIbzUUvOHCl62XDCG4ExTkvsgRVCe-aBG7U,3139 +transformers/models/deprecated/trajectory_transformer/modeling_trajectory_transformer.py,sha256=Ez9W4hVDl3eKn3wG3Mw6rtxcVxQwpyN1Cbbm5ootOdY,25697 +transformers/models/deprecated/transfo_xl/__init__.py,sha256=bO5xiMeUsfu9k2nqJ4N2qTGvSniyD9oA8rHEn46ne-0,3183 +transformers/models/deprecated/transfo_xl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/configuration_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/convert_transfo_xl_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_tf_transfo_xl_utilities.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/modeling_transfo_xl_utilities.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/__pycache__/tokenization_transfo_xl.cpython-310.pyc,, +transformers/models/deprecated/transfo_xl/configuration_transfo_xl.py,sha256=cAICvGEF8Q8UKNrTekcZ2Sb08j6ZvCsdFdK5LYJQh5Q,7966 +transformers/models/deprecated/transfo_xl/convert_transfo_xl_original_tf_checkpoint_to_pytorch.py,sha256=cUL10fYCG-kWYI3BHuKto2AIxb0V2pgPQ3Z8JU9G-Sg,4938 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl.py,sha256=gxO81tAVwqcgFIZkEtX1WILtMng7pR9wo19ARAj4o8U,45995 +transformers/models/deprecated/transfo_xl/modeling_tf_transfo_xl_utilities.py,sha256=Kd2QFblDU3C5U0uqrkCIg1U3vytu9a8VLccyomBUu2o,7635 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl.py,sha256=k9_cCEAI9yZjCDmOAbph5ipRDIwRlEXL7aT825wC06s,55979 +transformers/models/deprecated/transfo_xl/modeling_transfo_xl_utilities.py,sha256=oZAsrKz41ek-kSV2rvFHyCHfkAM6e5NyqbGCZSxIML4,10861 +transformers/models/deprecated/transfo_xl/tokenization_transfo_xl.py,sha256=GlTLQX9uKzmx2I63lbcBSNQdnbISt2Rw5EiZ6-eJGDY,31973 +transformers/models/deprecated/van/__init__.py,sha256=LfVeE-QGxQJS0QZhWPmPD9s2yX5Pk9iA5NK90CkoyQQ,1728 +transformers/models/deprecated/van/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/configuration_van.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/convert_van_to_pytorch.cpython-310.pyc,, +transformers/models/deprecated/van/__pycache__/modeling_van.cpython-310.pyc,, +transformers/models/deprecated/van/configuration_van.py,sha256=MMRnXRxKSfu67Ep6Mhoz7-ru0FRiTk7vS8XmnzmmyMM,4742 +transformers/models/deprecated/van/convert_van_to_pytorch.py,sha256=KW-0r4GVcmH_EzxC-qsdUn5TJw4TEl0wmUKPnJPYZaw,10374 +transformers/models/deprecated/van/modeling_van.py,sha256=DT_vMwDxQPVJBf2YaPCVhlL_ImiRHqnS18ypDAxK5zw,21383 +transformers/models/depth_anything/__init__.py,sha256=nSTo0y3RhnvBAua09yiGxbsVy8YKNb6x7Hl-jaM3Sro,1858 +transformers/models/depth_anything/__pycache__/__init__.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/configuration_depth_anything.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/convert_depth_anything_to_hf.cpython-310.pyc,, +transformers/models/depth_anything/__pycache__/modeling_depth_anything.cpython-310.pyc,, +transformers/models/depth_anything/configuration_depth_anything.py,sha256=PpAcQ3jdwqBaGnkvelCP0FnNHII_RR9UYhOFZ43_x0c,6609 +transformers/models/depth_anything/convert_depth_anything_to_hf.py,sha256=N2RCeVAiH6pzGmUZnHq0FPoCHD-EkrMviOqof1Qd7Ww,13710 +transformers/models/depth_anything/modeling_depth_anything.py,sha256=QVPwdQDTE1bsycXM-OrxUi5XznZj6iy6MQISwdrB2sw,18119 +transformers/models/deta/__init__.py,sha256=eHgP2aY7a0Of2OkxgCPavzEYvqk2etS3aqXD23Zd3Rc,2205 +transformers/models/deta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/deta/__pycache__/configuration_deta.cpython-310.pyc,, +transformers/models/deta/__pycache__/convert_deta_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/deta/__pycache__/convert_deta_swin_to_pytorch.cpython-310.pyc,, +transformers/models/deta/__pycache__/image_processing_deta.cpython-310.pyc,, +transformers/models/deta/__pycache__/modeling_deta.cpython-310.pyc,, +transformers/models/deta/configuration_deta.py,sha256=UIY42yFQMrXj9SotRVh41ZcLOGN7MDWr1pW1LPGSN_4,14043 +transformers/models/deta/convert_deta_resnet_to_pytorch.py,sha256=r-beTAdmCNONvgIPQmIf890KgDQmdi8mRoDkSWoumJg,16833 +transformers/models/deta/convert_deta_swin_to_pytorch.py,sha256=WL18erfLKYr7-pmcHC5i5t6it7EnSagPsuHs5VEgLEA,19031 +transformers/models/deta/image_processing_deta.py,sha256=32jbJymBXq0aWrMx2bUV22d6GssnIufh_emIddCWBIw,52396 +transformers/models/deta/modeling_deta.py,sha256=P0zogPkirwOJ_CtbiBos9u0SXJ6PliBkNidy_2f6j_w,139637 +transformers/models/detr/__init__.py,sha256=dWemW6cL_QLOXK3i2uoP6ywKNrjVkpw8IXeQYbs0HfA,2438 +transformers/models/detr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/detr/__pycache__/configuration_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/convert_detr_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/detr/__pycache__/convert_detr_to_pytorch.cpython-310.pyc,, +transformers/models/detr/__pycache__/feature_extraction_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/image_processing_detr.cpython-310.pyc,, +transformers/models/detr/__pycache__/modeling_detr.cpython-310.pyc,, +transformers/models/detr/configuration_detr.py,sha256=I8DS6B1dlh4gGU6vYeiyh3ZiRvwpecxul6JpwTRXF1E,13515 +transformers/models/detr/convert_detr_original_pytorch_checkpoint_to_pytorch.py,sha256=_4fQ1N3Zat1x1r-Gr3FosWuV3pW3yFKQQgM9MKujmbY,13561 +transformers/models/detr/convert_detr_to_pytorch.py,sha256=_E63l9rWZUfwSHCfJbz-HoIDT4hxAwoHRKXj1Ni03AA,18993 +transformers/models/detr/feature_extraction_detr.py,sha256=gMyG16pNJKoimImXOyqi589hGj37OYGWb7ZoTx84d5I,1474 +transformers/models/detr/image_processing_detr.py,sha256=vESOCWU_TLQwivn38arGL_SMCZD8LUKFfbT6U84DEvs,89208 +transformers/models/detr/modeling_detr.py,sha256=aQ9ck_gmiIbAKfyFu0MnCKMsnADZcosneDliUnKGYs0,116496 +transformers/models/dialogpt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dialogpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dialogpt/__pycache__/convert_dialogpt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/dialogpt/convert_dialogpt_original_pytorch_checkpoint_to_pytorch.py,sha256=Zp59TmLBKEs-x1-quZZeqARhpS3cTnnmgT4nCI0zsHY,1537 +transformers/models/dinat/__init__.py,sha256=Jt3EAbCCZcBjJD_sEane9NU0btqsFkOTqz6JkUtmY_4,1812 +transformers/models/dinat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinat/__pycache__/configuration_dinat.cpython-310.pyc,, +transformers/models/dinat/__pycache__/modeling_dinat.cpython-310.pyc,, +transformers/models/dinat/configuration_dinat.py,sha256=scl3Gvwml-AnQ8CvX3TRz_4r-wGLU3jbx_Rbxdi2-RY,7426 +transformers/models/dinat/modeling_dinat.py,sha256=MbU2PyIs2OFxHaK2MtZuger03MXCiF2i5Q3GJQ5DRYg,41719 +transformers/models/dinov2/__init__.py,sha256=vQdLyp1VnVfmx0Vdvwvgvk9bsWCUArt-hPzzoDsA20I,1890 +transformers/models/dinov2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/configuration_dinov2.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/convert_dinov2_to_hf.cpython-310.pyc,, +transformers/models/dinov2/__pycache__/modeling_dinov2.cpython-310.pyc,, +transformers/models/dinov2/configuration_dinov2.py,sha256=XQYd81rCTmpBnJxcbpPkJmX8Gnafr8NUrrcYZ0WIvjs,8140 +transformers/models/dinov2/convert_dinov2_to_hf.py,sha256=g4wmiqVdUlNbRoy_GbEws3DQaXfUA1I9Qh6bHhL6yZk,11964 +transformers/models/dinov2/modeling_dinov2.py,sha256=6aG391Nec6OaDkXXoOkHZol_frdtM2TqyVVvmRVmA-M,36270 +transformers/models/distilbert/__init__.py,sha256=64w_AOUP-vupRT6bGlQF7Ak24rJB5AX58n1V8V_aHM0,5167 +transformers/models/distilbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/configuration_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_flax_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/modeling_tf_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert.cpython-310.pyc,, +transformers/models/distilbert/__pycache__/tokenization_distilbert_fast.cpython-310.pyc,, +transformers/models/distilbert/configuration_distilbert.py,sha256=tIVlhKDhh0YOcubJcMJ9fk4M_LspEC1KvoeVPJdFXos,6091 +transformers/models/distilbert/modeling_distilbert.py,sha256=qkIYcvR8n8qG7m9_zfmYuiWoqPDwRXBsHs1gFyrJTm4,61606 +transformers/models/distilbert/modeling_flax_distilbert.py,sha256=cBRX7sUX2G9aSX6_I15sZ_H1yTXOMvwM7Gw3xbgOL6Q,32629 +transformers/models/distilbert/modeling_tf_distilbert.py,sha256=DhZ2Ap7aE3P9Gg7yGoxkBD5o3pZFM1xb6qqwriMgCqU,48952 +transformers/models/distilbert/tokenization_distilbert.py,sha256=Y9TPnqWaB-k8PbebKeZRoTyRFk2tgwySxcyZ_zybNOY,21906 +transformers/models/distilbert/tokenization_distilbert_fast.py,sha256=oO1CanamHXMuG6eJH8LOwRBEgK2yCSe80lF2fDGz5zo,8037 +transformers/models/dit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +transformers/models/dit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dit/__pycache__/convert_dit_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/dit/convert_dit_unilm_to_pytorch.py,sha256=qoCC3Hm-enjzLj5LoxjbpP8EaIsyhi3U3PERYYeSt7c,9420 +transformers/models/donut/__init__.py,sha256=VraCMZ5ZG0WtYvLmZv-B-gIH5joEM_QdAkiH2iDjLls,2455 +transformers/models/donut/__pycache__/__init__.cpython-310.pyc,, +transformers/models/donut/__pycache__/configuration_donut_swin.cpython-310.pyc,, +transformers/models/donut/__pycache__/convert_donut_to_pytorch.cpython-310.pyc,, +transformers/models/donut/__pycache__/feature_extraction_donut.cpython-310.pyc,, +transformers/models/donut/__pycache__/image_processing_donut.cpython-310.pyc,, +transformers/models/donut/__pycache__/modeling_donut_swin.cpython-310.pyc,, +transformers/models/donut/__pycache__/processing_donut.cpython-310.pyc,, +transformers/models/donut/configuration_donut_swin.py,sha256=VW7HBmDWZwdrop5HHQ92YsRwVjtW_XHqwecMWBTqaVY,5856 +transformers/models/donut/convert_donut_to_pytorch.py,sha256=0IgQ3V9hNWPOJ6KtOfowhVMfTh1m4WEVLOAQSMEGjJE,9316 +transformers/models/donut/feature_extraction_donut.py,sha256=jBSpDfoiCg_IWr4gcphIcxs7DA760JnH6V6hAfaoYPM,1179 +transformers/models/donut/image_processing_donut.py,sha256=vL7BsBj43uQsQEHJXXw3oMHmjOGFVH_IXRxQXzbzfK4,22310 +transformers/models/donut/modeling_donut_swin.py,sha256=jU9ovPUk-S7HI0rTUKSVIkc7R0tyb57LrIdiSicdAyk,43458 +transformers/models/donut/processing_donut.py,sha256=FxN4s7YYI0Yv_RyaBeYCnwx2Ljji37j3kJHvp75W0iI,8220 +transformers/models/dpr/__init__.py,sha256=qc_Fe-hF94ZxS9cfEXCp9h7-tkmi9Tj4KV9h_wg6yhs,4535 +transformers/models/dpr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dpr/__pycache__/configuration_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/convert_dpr_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/dpr/__pycache__/modeling_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/modeling_tf_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr.cpython-310.pyc,, +transformers/models/dpr/__pycache__/tokenization_dpr_fast.cpython-310.pyc,, +transformers/models/dpr/configuration_dpr.py,sha256=hAE_Dth18kXhXn6qavKhMv2p7LdtwXeK-VhJIwNvAFk,6486 +transformers/models/dpr/convert_dpr_original_checkpoint_to_pytorch.py,sha256=XsxG5FBg46-EHlDsMq4w21C9W4wl8RZ6GZvx5coBmfk,6132 +transformers/models/dpr/modeling_dpr.py,sha256=oIoBMXJXKVb0lzDe5Aa5whyyKwfHJWU8cc6Hqo-RIeE,28591 +transformers/models/dpr/modeling_tf_dpr.py,sha256=fEwY6IjRaFbGN2JyxKmQERALN1VilZtm9LnRPIUtJww,33927 +transformers/models/dpr/tokenization_dpr.py,sha256=S4RPwKqgXSxgUy1CvdMrF4kWrC6_xiq0xeejbQNeXpo,15726 +transformers/models/dpr/tokenization_dpr_fast.py,sha256=OcoTaeO_t5AQ-SX9MKIUhLEU2gJNHkxdJbkmUCP81vQ,16112 +transformers/models/dpt/__init__.py,sha256=WoC0ADjpTTkspHtgIX_TtHXXG-4t8S-NGgJaAUiG-q4,2444 +transformers/models/dpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/dpt/__pycache__/configuration_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dinov2_depth_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_beit_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_hybrid_to_pytorch.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_swinv2_to_hf.cpython-310.pyc,, +transformers/models/dpt/__pycache__/convert_dpt_to_pytorch.cpython-310.pyc,, +transformers/models/dpt/__pycache__/feature_extraction_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/image_processing_dpt.cpython-310.pyc,, +transformers/models/dpt/__pycache__/modeling_dpt.cpython-310.pyc,, +transformers/models/dpt/configuration_dpt.py,sha256=B4KgYJ8CFBN-PY4iTUD9hO6r3iWF7-8C22WEWPs0yeE,14504 +transformers/models/dpt/convert_dinov2_depth_to_hf.py,sha256=azN2ivIGa-g5fe6kdkQ0kJbgKitt10k8C2R3x3ff6FI,16935 +transformers/models/dpt/convert_dpt_beit_to_hf.py,sha256=VeC3Jpf_BVCkTdFJQHhrJPTgyRIibPzC32Isrd5iBPg,14347 +transformers/models/dpt/convert_dpt_hybrid_to_pytorch.py,sha256=czo2aHnDSZZqv2qwpx48s1dRTg25v-R5giSg4seNebE,12994 +transformers/models/dpt/convert_dpt_swinv2_to_hf.py,sha256=rFZSF_WFfMcVxXz815SX0THuTfg0juJBy6qCy8yT6QY,15176 +transformers/models/dpt/convert_dpt_to_pytorch.py,sha256=-SpPQGZ5tD6g0g5fQpSbMmUDK9xc1OFIInk9yyjkahE,11894 +transformers/models/dpt/feature_extraction_dpt.py,sha256=ZgBcSKNDX0_Fstv94sp1r9jpr9zvXCLPwvIek76Fkso,1165 +transformers/models/dpt/image_processing_dpt.py,sha256=--rXjjVmmW9b18QuLvkYd2dwSTCotaO7BDwMzyAfp2Q,23020 +transformers/models/dpt/modeling_dpt.py,sha256=y_FBw3uZvuTcIvpE84idIua7eZmXm_3y-8-gXiw9Hr4,57342 +transformers/models/efficientformer/__init__.py,sha256=hFVX-KUt3FRIjqb_MzHVif_h8r9FFezpRtRwFKLBKuY,3550 +transformers/models/efficientformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/configuration_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/convert_efficientformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/image_processing_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/modeling_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/__pycache__/modeling_tf_efficientformer.cpython-310.pyc,, +transformers/models/efficientformer/configuration_efficientformer.py,sha256=7zYu__djyFxNr5IOTXzcvNO5ZcLS8uWVBd0-XfKzGJk,7825 +transformers/models/efficientformer/convert_efficientformer_original_pytorch_checkpoint_to_pytorch.py,sha256=1ni0wyhRjTbF8U4BZ_FXU-_9Jzy43HMLKI3vGlyPjFc,9381 +transformers/models/efficientformer/image_processing_efficientformer.py,sha256=3D0DAnOMi7M3cb7UtjlOp7XDp96SL5XPdFumWLaZTQc,15694 +transformers/models/efficientformer/modeling_efficientformer.py,sha256=KwyEoMNevv3Jkz0U-E4kX9FNhFeC03TDHd9ryHu2h0w,33794 +transformers/models/efficientformer/modeling_tf_efficientformer.py,sha256=3ZEu1wKr37UyFMvH3fPJOeKnYlIrexx-GW7uHag1wvQ,49300 +transformers/models/efficientnet/__init__.py,sha256=mS43eilPqqiySKV0CZ34jg1SPUJa2zc6qyCwwRoJQFM,2670 +transformers/models/efficientnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/configuration_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/convert_efficientnet_to_pytorch.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/image_processing_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/__pycache__/modeling_efficientnet.cpython-310.pyc,, +transformers/models/efficientnet/configuration_efficientnet.py,sha256=UoppUIDRRJh9GpqU6zSCuLWRrDTIf5HxyR0fx2QhhoU,7701 +transformers/models/efficientnet/convert_efficientnet_to_pytorch.py,sha256=e2Na1xvNc7z9XvvI7v6v1V2uFWr88MSTN3JPKR5GstM,12756 +transformers/models/efficientnet/image_processing_efficientnet.py,sha256=t2SCJE3ChbM8bkQzd2QvxAsQ2SGB-sv8ywxw-MyGVc8,18848 +transformers/models/efficientnet/modeling_efficientnet.py,sha256=cqtMmoJwTG6xts8VOw5aZ1fmhFagro3h3aHCKAOCX5I,24052 +transformers/models/electra/__init__.py,sha256=UVRK4T71rPHmZYRbrQ_-5eu98Gfrkp6I9SA3KVVCcYQ,5257 +transformers/models/electra/__pycache__/__init__.cpython-310.pyc,, +transformers/models/electra/__pycache__/configuration_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/convert_electra_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_flax_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/modeling_tf_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/tokenization_electra.cpython-310.pyc,, +transformers/models/electra/__pycache__/tokenization_electra_fast.cpython-310.pyc,, +transformers/models/electra/configuration_electra.py,sha256=RYzIRYomSpcKP2RVySm3QPlDnSaGhtrWkThCE89mfk8,9194 +transformers/models/electra/convert_electra_original_tf_checkpoint_to_pytorch.py,sha256=iwbjp9v26TfI9iIRdR4KWv-zsrxVNbfgkUwn9N1WHaM,2862 +transformers/models/electra/modeling_electra.py,sha256=z1Yc3xI0QQuTasP4KEEiuT5V4gqFS6HhhH93Ok1-K2w,75835 +transformers/models/electra/modeling_flax_electra.py,sha256=S5TkUbjF-9GNOxeiGfXTjc3tnINV18R8CLLFf30A9zU,62268 +transformers/models/electra/modeling_tf_electra.py,sha256=UMST0sC4TTX-9NTjtH8U8c4FpT46MSbt44Nq5cBPdPA,78437 +transformers/models/electra/tokenization_electra.py,sha256=m1-PY3o6pZVr2xcqP4tfv8oMZHKzrHNF0x6HqAOsGUo,20909 +transformers/models/electra/tokenization_electra_fast.py,sha256=zPqzst_6dX5eiFgR2iVsZuzYIS-KTe8BKDkh3fsPTQo,7685 +transformers/models/encodec/__init__.py,sha256=LVz0exnSENNu1jnGsAoPoS7LfXgC-H7s3_lbwNEX_Dw,1910 +transformers/models/encodec/__pycache__/__init__.cpython-310.pyc,, +transformers/models/encodec/__pycache__/configuration_encodec.cpython-310.pyc,, +transformers/models/encodec/__pycache__/convert_encodec_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/encodec/__pycache__/feature_extraction_encodec.cpython-310.pyc,, +transformers/models/encodec/__pycache__/modeling_encodec.cpython-310.pyc,, +transformers/models/encodec/configuration_encodec.py,sha256=aKzD7Pnk3pdmRo2IBjonAJyXCGvMcQg6ICe_6SScJSQ,8596 +transformers/models/encodec/convert_encodec_checkpoint_to_pytorch.py,sha256=zF2ZSOCFsiMNvtIvRhjoucoF2G3m0nW-cHXimF_2uwQ,15253 +transformers/models/encodec/feature_extraction_encodec.py,sha256=luYd1uGvvQC_mDYlUsnMtSBn_S0dhbazYJ9zYGuQ1Kc,9873 +transformers/models/encodec/modeling_encodec.py,sha256=gI0-5Ebv6DyR_sS-48EBR4AHrKEPSxHJfISKcYsKOww,33493 +transformers/models/encoder_decoder/__init__.py,sha256=bR1yPbuqKHUYXaxI_QuDz6ccBSWpCr0THhPBM3lnttA,2451 +transformers/models/encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/configuration_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_flax_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/__pycache__/modeling_tf_encoder_decoder.cpython-310.pyc,, +transformers/models/encoder_decoder/configuration_encoder_decoder.py,sha256=HaF1rtwzf_tDXJYrfycr4ktA8-LlBia_RdAWD60RTu8,4362 +transformers/models/encoder_decoder/modeling_encoder_decoder.py,sha256=bV7b45U8L6TR6JNtKWfa_s01S47TGu06XpyiJv40QXk,35783 +transformers/models/encoder_decoder/modeling_flax_encoder_decoder.py,sha256=geeWvUTNF1OprImdmwdPclf2qUpHGQ_Z0TZzMMbqSsc,43529 +transformers/models/encoder_decoder/modeling_tf_encoder_decoder.py,sha256=pVGR6W436j6W2QhrlcyRLJji_wP8nJi3vyrqW0Lv3xQ,34308 +transformers/models/ernie/__init__.py,sha256=s0oBhpPU0MdftoAKWUbo3VR2D9VPTvjPde4NBylw5qI,2331 +transformers/models/ernie/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ernie/__pycache__/configuration_ernie.cpython-310.pyc,, +transformers/models/ernie/__pycache__/modeling_ernie.cpython-310.pyc,, +transformers/models/ernie/configuration_ernie.py,sha256=vrAn42VLrtzQkHf2blk2XkqouHtltRV_liEyyfYpR1c,7744 +transformers/models/ernie/modeling_ernie.py,sha256=FUwuyEzmohwLeYzgMhsgf_ZuHSlxKd18WCSjnApp13k,83926 +transformers/models/ernie_m/__init__.py,sha256=0neb_RuFu2HBnM3QZ5XRTBI9j8jzppR90ssXHH9LpGA,2637 +transformers/models/ernie_m/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ernie_m/__pycache__/configuration_ernie_m.cpython-310.pyc,, +transformers/models/ernie_m/__pycache__/modeling_ernie_m.cpython-310.pyc,, +transformers/models/ernie_m/__pycache__/tokenization_ernie_m.cpython-310.pyc,, +transformers/models/ernie_m/configuration_ernie_m.py,sha256=O8S19tEevivnWWwXwlLLi6hw6dRYO3gil9oOgmGDd-M,5982 +transformers/models/ernie_m/modeling_ernie_m.py,sha256=gcNj64kS1S1pxYXAuOupUHWanVQEJVjHe0i-GOY2SCU,47920 +transformers/models/ernie_m/tokenization_ernie_m.py,sha256=H2w93i-UQZpvEK8r-bl74x8ZxJ5mT3_wjUj2XKajDF4,16167 +transformers/models/esm/__init__.py,sha256=IfHOSRyzJHTD8eVSelVu_ijHcYnRp0Umm6hZGsoFYHQ,2978 +transformers/models/esm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/esm/__pycache__/configuration_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/convert_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_esmfold.cpython-310.pyc,, +transformers/models/esm/__pycache__/modeling_tf_esm.cpython-310.pyc,, +transformers/models/esm/__pycache__/tokenization_esm.cpython-310.pyc,, +transformers/models/esm/configuration_esm.py,sha256=3KH-ti_vdKcNC235XlA0k_thhnkMUjDCXQkB6hSZJXA,14454 +transformers/models/esm/convert_esm.py,sha256=x0dfu2oexN80cndU3Zn81oVynsRuzfEtJZF20TK1y3k,18470 +transformers/models/esm/modeling_esm.py,sha256=PeAg54V-K3j95Ff77QfrNioNiw8YEBmRVK91_gW6hfo,55663 +transformers/models/esm/modeling_esmfold.py,sha256=GgMkBeEhTvZBj61fNGqDkZsWTGeRwrhkHSGYa0otbJ4,86908 +transformers/models/esm/modeling_tf_esm.py,sha256=q8Nh_eDqeI9xBSWmutfaeB3rgxkUDdhF_mzafFdAFc8,68965 +transformers/models/esm/openfold_utils/__init__.py,sha256=Xy2uqvFsLC8Ax-OOce5PgoBDiZgEJgJPqs__p5SBWUY,446 +transformers/models/esm/openfold_utils/__pycache__/__init__.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/chunk_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/data_transforms.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/feats.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/loss.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/protein.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/residue_constants.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/rigid_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/__pycache__/tensor_utils.cpython-310.pyc,, +transformers/models/esm/openfold_utils/chunk_utils.py,sha256=eyd0NSdGIVBr9gLuI-3VI5cjJr46wYa9hlYBq1L1gCU,14392 +transformers/models/esm/openfold_utils/data_transforms.py,sha256=F4wGANRhKLd6MLHrwg2IxpqCxCJEx8aFSxqAdsXsBMo,3764 +transformers/models/esm/openfold_utils/feats.py,sha256=dgLcLJriW-eDIBdc0MyKPDT5w0POab9QLuN56qE8wsk,8376 +transformers/models/esm/openfold_utils/loss.py,sha256=wY2ONqbuRvWMomjkpfPwfoa7dqCO2vFkM-kmNfhjivo,3705 +transformers/models/esm/openfold_utils/protein.py,sha256=x9NK6bryLs9vNi3j8OfOlw0Jb1cFrwMhCi6JdxkDdQw,11490 +transformers/models/esm/openfold_utils/residue_constants.py,sha256=KDcdOt5wkJ7cO7p-LtmS8sLIzfQ2ej7p40Re8EsTkv0,37993 +transformers/models/esm/openfold_utils/rigid_utils.py,sha256=EF79POBO-abRsdXrfdKLaqJUVIPp4EOMFVt5oOjx504,41122 +transformers/models/esm/openfold_utils/tensor_utils.py,sha256=A07D5psNs5lGgWJp_kzJgrY8cmWmaL3odDgKXN1NVAE,4798 +transformers/models/esm/tokenization_esm.py,sha256=kthSlBphrNeHRinLA0-FdVQ0_olxhQhViQ2_3K_DBo8,5355 +transformers/models/falcon/__init__.py,sha256=Sf4eyG7aJ4pQoqLJXStTSTxP7iEHks73GWe9QjAnU3w,2067 +transformers/models/falcon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/falcon/__pycache__/configuration_falcon.cpython-310.pyc,, +transformers/models/falcon/__pycache__/convert_custom_code_checkpoint.cpython-310.pyc,, +transformers/models/falcon/__pycache__/modeling_falcon.cpython-310.pyc,, +transformers/models/falcon/configuration_falcon.py,sha256=2epVIM91N3mJKEvwjAMO698OA30XyxfLkHy89BfnY5w,9614 +transformers/models/falcon/convert_custom_code_checkpoint.py,sha256=XPJ1owRjRno_Y1AD5UeoPE4oo6a-SeQR9w9u-EIUktE,3061 +transformers/models/falcon/modeling_falcon.py,sha256=CnaGsYFw3XXSXx0RVf6bVuvu4eL7u0fDaP3XG_6s5wo,75692 +transformers/models/fastspeech2_conformer/__init__.py,sha256=eAZrmrz-mhay_crQQcN59ra1YBH341kxCGvR2h__YBE,2770 +transformers/models/fastspeech2_conformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/configuration_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_fastspeech2_conformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_hifigan.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/convert_model_with_hifigan.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/modeling_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/__pycache__/tokenization_fastspeech2_conformer.cpython-310.pyc,, +transformers/models/fastspeech2_conformer/configuration_fastspeech2_conformer.py,sha256=DoA6gWj-XXM_UuCBIwiDdOwCaSm6q-RSwLpPhoydaYA,24662 +transformers/models/fastspeech2_conformer/convert_fastspeech2_conformer_original_pytorch_checkpoint_to_pytorch.py,sha256=-ToJHpwI-xoLLMzLYdqFrBL6j6nsSPlNbkQ3pfTgJ6Y,8939 +transformers/models/fastspeech2_conformer/convert_hifigan.py,sha256=RC1PaVnl1cLx8c2LdYycNti7iYRhUM7_KrX2mF5WyCM,5431 +transformers/models/fastspeech2_conformer/convert_model_with_hifigan.py,sha256=wT4pQGgEHVFoWI1Lb71L7_i6ujfNrSMDGYuDGb4oeh8,3471 +transformers/models/fastspeech2_conformer/modeling_fastspeech2_conformer.py,sha256=Btj_h3RCbp0lGpc06W3kzlC3M459OyfuB3eeKoAhKtc,77676 +transformers/models/fastspeech2_conformer/tokenization_fastspeech2_conformer.py,sha256=aM39xyh9UuqEsplBCWrbBtRYNj3BEw6V1QmGkTqCyAU,6218 +transformers/models/flaubert/__init__.py,sha256=neN63qn5CVIfPSr50g0WhbrcKDT7w0qIljyqSCxbqLI,3488 +transformers/models/flaubert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/configuration_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/modeling_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/modeling_tf_flaubert.cpython-310.pyc,, +transformers/models/flaubert/__pycache__/tokenization_flaubert.cpython-310.pyc,, +transformers/models/flaubert/configuration_flaubert.py,sha256=hCD-A6OTCJmxfMMIPDogR2Der27ch4hy9F-lKjlV6GE,11288 +transformers/models/flaubert/modeling_flaubert.py,sha256=5_dfp-KEyaBaDpT1pJY5PDhx5i-_-AgWF6wVBt_jJ-Q,57599 +transformers/models/flaubert/modeling_tf_flaubert.py,sha256=RH236R8k0gr1m5k3pXhj5AiILVFNgoxgrgGtpGpRb2k,57184 +transformers/models/flaubert/tokenization_flaubert.py,sha256=SmfPpRAS5kQXm9WvpRjS6QL7lA2JQZr4Vs3GbhXP-Xo,22136 +transformers/models/flava/__init__.py,sha256=TtPrEOob3V4Lk_NK3rgacXw0jJ2ABWKPnLP8x4uSs4I,3030 +transformers/models/flava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/flava/__pycache__/configuration_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/convert_dalle_to_flava_codebook.cpython-310.pyc,, +transformers/models/flava/__pycache__/convert_flava_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/flava/__pycache__/feature_extraction_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/image_processing_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/modeling_flava.cpython-310.pyc,, +transformers/models/flava/__pycache__/processing_flava.cpython-310.pyc,, +transformers/models/flava/configuration_flava.py,sha256=3t2E9ekLNZMSAgo6djFRubfLOfbqDMyw2uSx3yjjyhw,37184 +transformers/models/flava/convert_dalle_to_flava_codebook.py,sha256=iEJM9W_cKk3HK0gKS6i2ygEMeyymWCMl18LDaQXRAhY,3428 +transformers/models/flava/convert_flava_original_pytorch_to_hf.py,sha256=LilQpbe6qeN2P_uXljae6zEPx_KoepoRv4uvCEAo0QA,4372 +transformers/models/flava/feature_extraction_flava.py,sha256=mA1uAn29yv9PV7gYXauz0VTAJDgcpl9DPHvH99Ed__s,1201 +transformers/models/flava/image_processing_flava.py,sha256=88KY6CipM_6HkY3SKu9dfdv_KhSAoYrFZRkKqrpal7A,38581 +transformers/models/flava/modeling_flava.py,sha256=obSU7TGFe-_XjLMyGSkDrTiiP1dGFXDcfq1HkQ4sjtE,96774 +transformers/models/flava/processing_flava.py,sha256=fj9uFlMerVGFnB9hV1XJ61c3q82qstjPwmWUdMiL46U,6832 +transformers/models/fnet/__init__.py,sha256=spzYrdM_-MVYRr6Axeh_adtgX1pCDAsUJEpR-cPdxgE,3179 +transformers/models/fnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fnet/__pycache__/configuration_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/convert_fnet_original_flax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fnet/__pycache__/modeling_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet.cpython-310.pyc,, +transformers/models/fnet/__pycache__/tokenization_fnet_fast.cpython-310.pyc,, +transformers/models/fnet/configuration_fnet.py,sha256=Bo0KeJXuaW22kjeS0SFRpDtAnqSVoIjTZLzEY5094Jo,5637 +transformers/models/fnet/convert_fnet_original_flax_checkpoint_to_pytorch.py,sha256=bxrdtJbyINwJtiIpagL3Ttkq0D5ujBK1Wi72fIR2vss,6912 +transformers/models/fnet/modeling_fnet.py,sha256=7xVl8KRse3ugfF0URJ7Xv9oURNVEEKp3JOFx90_lC_k,49043 +transformers/models/fnet/tokenization_fnet.py,sha256=DBOFFYsJ2W3z2tMKVFC3r1Hw48OdNKSAOqt--aRk15M,14549 +transformers/models/fnet/tokenization_fnet_fast.py,sha256=comrMbQXVPEhm8EGUiqgrvItcDiFLPwTm_YupT3S4fI,8064 +transformers/models/focalnet/__init__.py,sha256=RPvCimVzndLWR8r1MfUbrAiQTJEvJ6VGTM1OFmAS9-A,1989 +transformers/models/focalnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/configuration_focalnet.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/convert_focalnet_to_hf_format.cpython-310.pyc,, +transformers/models/focalnet/__pycache__/modeling_focalnet.cpython-310.pyc,, +transformers/models/focalnet/configuration_focalnet.py,sha256=cEGW_Mlrb6aCQyORrNzXSDzy6pRpwwnGr8XfSv-x3yk,8127 +transformers/models/focalnet/convert_focalnet_to_hf_format.py,sha256=xBoop7K4unfPawCbmlv7BTQHpbJkaUWasrwsw8dW_KI,9450 +transformers/models/focalnet/modeling_focalnet.py,sha256=4LWagJUt4Zu0XJccaaaKpPcfiorL1XUudUG-yADj608,43186 +transformers/models/fsmt/__init__.py,sha256=e0xh51cBRMFkSYEcmZzyINHoXBKwgonWv3zEPqZuMYE,1675 +transformers/models/fsmt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/configuration_fsmt.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/convert_fsmt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/modeling_fsmt.cpython-310.pyc,, +transformers/models/fsmt/__pycache__/tokenization_fsmt.cpython-310.pyc,, +transformers/models/fsmt/configuration_fsmt.py,sha256=GZrow13QeYa3jZ-olYXGlVdMN3nqRbnU4_i-fdeJd_g,10161 +transformers/models/fsmt/convert_fsmt_original_pytorch_checkpoint_to_pytorch.py,sha256=BWtn90XQAuWGp8k9zns5St9On_os395ESNgkaXy6y2g,11264 +transformers/models/fsmt/modeling_fsmt.py,sha256=Qo_LDfeYujqdZs99eeb6LElHYMFtZd4e2Q7Fd96M8I0,58402 +transformers/models/fsmt/tokenization_fsmt.py,sha256=uqmF6EjUAT7OQ3ZA0Ioj_m0eINcJZOGiMEEmHC67_Vo,19261 +transformers/models/funnel/__init__.py,sha256=QQgGGD4BfFL3j1qtC1oNuuagXUPYWw0KJ4XVKTzMvW0,4126 +transformers/models/funnel/__pycache__/__init__.cpython-310.pyc,, +transformers/models/funnel/__pycache__/configuration_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/convert_funnel_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/funnel/__pycache__/modeling_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/modeling_tf_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel.cpython-310.pyc,, +transformers/models/funnel/__pycache__/tokenization_funnel_fast.cpython-310.pyc,, +transformers/models/funnel/configuration_funnel.py,sha256=k7ff4rr8IweCknqduGwjl2aNmrPMFK2oMLk_GntnTEE,7750 +transformers/models/funnel/convert_funnel_original_tf_checkpoint_to_pytorch.py,sha256=fdaL7-j0ZWjCKvvpS_gFYHBthQ8TFbGmkOmfd53enaI,2335 +transformers/models/funnel/modeling_funnel.py,sha256=wWxW9cODrXeCguOvdu-1U0gsJ4ID8k4M-Nbcw6MsWpM,69551 +transformers/models/funnel/modeling_tf_funnel.py,sha256=zCzTxVUVSxbxwJGhS7KkdLxFoFsN-I8_o8kyHukK2UE,80267 +transformers/models/funnel/tokenization_funnel.py,sha256=A7Xmjp3TgF-jjk6Lr2W_xfaqFWcbKAe8DlEGwhN8rTA,22369 +transformers/models/funnel/tokenization_funnel_fast.py,sha256=KpC1leC5Wi7e22-orBKLLIFeMt7tb3Y8N98-jpOV7TU,8644 +transformers/models/fuyu/__init__.py,sha256=SLRcFqITZh127We258kiNPRKoegottQTbpuCZ72dTBU,2184 +transformers/models/fuyu/__pycache__/__init__.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/configuration_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/convert_fuyu_model_weights_to_hf.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/image_processing_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/modeling_fuyu.cpython-310.pyc,, +transformers/models/fuyu/__pycache__/processing_fuyu.cpython-310.pyc,, +transformers/models/fuyu/configuration_fuyu.py,sha256=SaL8tMrWsMdBW9chJe7-RIxyUtagYM2ChS8ssRLiUxE,10155 +transformers/models/fuyu/convert_fuyu_model_weights_to_hf.py,sha256=c8A4qiUY47MfPeEG518qofxFdzut0me3EtFNizEHv6Q,4847 +transformers/models/fuyu/image_processing_fuyu.py,sha256=jYB8EWiRio_c5g4EkReAxLFFrv7fdoONlKGVGZnadxM,33810 +transformers/models/fuyu/modeling_fuyu.py,sha256=hto2ZclUlvFlg-OPGnjRyJwbtZB5VtyNjR0qfUll39o,17758 +transformers/models/fuyu/processing_fuyu.py,sha256=AQW_0vMDaOfOlOVjmmr4utHA-pT9Gvnc6ITlBpLrSa8,31896 +transformers/models/gemma/__init__.py,sha256=boIWLnLMFp69VbfjGEcoCMTSObbY_0OevWvwBOa29Xg,3339 +transformers/models/gemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gemma/__pycache__/configuration_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/convert_gemma_weights_to_hf.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modeling_flax_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/modeling_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma.cpython-310.pyc,, +transformers/models/gemma/__pycache__/tokenization_gemma_fast.cpython-310.pyc,, +transformers/models/gemma/configuration_gemma.py,sha256=zcwX5vkni6iKEHcuMsS_LIOhy-sukba7u9y8_DS0k3g,7141 +transformers/models/gemma/convert_gemma_weights_to_hf.py,sha256=UCoyJd4wVYKlikKMK0-9GRFAa-Cm3OtLt7oSJjXOuPA,7366 +transformers/models/gemma/modeling_flax_gemma.py,sha256=rDG3jua0r9HMiwbdirTDB2h2xEvuxtlTWoW59dXL1Dw,32332 +transformers/models/gemma/modeling_gemma.py,sha256=2OIgMCHmoBOyScSk9AzNyRNTYQ9M3csOVZ4oIDPXCjU,64133 +transformers/models/gemma/tokenization_gemma.py,sha256=CXHdN19ZMBvqfFeIEyF-p92iJO1umUakJ1sfPLOOaiY,13981 +transformers/models/gemma/tokenization_gemma_fast.py,sha256=bTIi46E_PXDmRwD8hQG0AI6HRlj-03Y7itWFA6tclQE,8279 +transformers/models/git/__init__.py,sha256=KG0HrIdVgj64GVVUk32IdidJRaC5BcjQZt62oVRL5Eo,1888 +transformers/models/git/__pycache__/__init__.cpython-310.pyc,, +transformers/models/git/__pycache__/configuration_git.cpython-310.pyc,, +transformers/models/git/__pycache__/convert_git_to_pytorch.cpython-310.pyc,, +transformers/models/git/__pycache__/modeling_git.cpython-310.pyc,, +transformers/models/git/__pycache__/processing_git.cpython-310.pyc,, +transformers/models/git/configuration_git.py,sha256=LKzTZXFIsdNyxaXEY6JKw2J0hQ-NNSOPYg04WkbaN3s,11310 +transformers/models/git/convert_git_to_pytorch.py,sha256=HzsGAVKq7fhWCgI89QsSEDUO1IaQn0LNPkprFq3-vYk,22390 +transformers/models/git/modeling_git.py,sha256=mNGhSVAdXmSVX_Hy95-romnN_RSBBExVqzzqH0-HG28,69114 +transformers/models/git/processing_git.py,sha256=z-nGl5S4cxDFam85wJWrONVASpM4auyjjUn_lhq4cZM,5381 +transformers/models/glpn/__init__.py,sha256=-5zqCuk1phx-Bjw3Mq-NJmPvusXfEYcNGIrFO27vr3s,2384 +transformers/models/glpn/__pycache__/__init__.cpython-310.pyc,, +transformers/models/glpn/__pycache__/configuration_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/convert_glpn_to_pytorch.cpython-310.pyc,, +transformers/models/glpn/__pycache__/feature_extraction_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/image_processing_glpn.cpython-310.pyc,, +transformers/models/glpn/__pycache__/modeling_glpn.cpython-310.pyc,, +transformers/models/glpn/configuration_glpn.py,sha256=dAqIy5-a8cbTOyBN781JAiYA0u1rFwVfUz89T5WRjI0,6068 +transformers/models/glpn/convert_glpn_to_pytorch.py,sha256=dT5q2vCISTu1DjoTkLSyHmlcR75n_CGhXxxknL5KjJQ,8558 +transformers/models/glpn/feature_extraction_glpn.py,sha256=S263LFeHVRym_jKt8KkTOjjtA1_BqARnUgbSFExgPN4,1172 +transformers/models/glpn/image_processing_glpn.py,sha256=-vAlAJdllzBjNJdB_OJn9NOx5gkDaB_sUYZN23Y7xGY,11003 +transformers/models/glpn/modeling_glpn.py,sha256=Zp2otpnM7nTzdd-DAUhOXANYtXyqIyqLvnFfCai2Fmk,31502 +transformers/models/gpt2/__init__.py,sha256=d_QyBAIVXohGlkOMWC9r03kE9uS2IHwXwPCsxnMGGkg,4674 +transformers/models/gpt2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/configuration_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/convert_gpt2_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_flax_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/modeling_tf_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_fast.cpython-310.pyc,, +transformers/models/gpt2/__pycache__/tokenization_gpt2_tf.cpython-310.pyc,, +transformers/models/gpt2/configuration_gpt2.py,sha256=m-3GXLz7ryiNvroT6JVh7Ag7MfyYGs25jzNMFiPKZvY,12079 +transformers/models/gpt2/convert_gpt2_original_tf_checkpoint_to_pytorch.py,sha256=nRAxbikMz9v88rDqfrX8OwPvBKe7fiYC2fg-6BB8Mzk,2532 +transformers/models/gpt2/modeling_flax_gpt2.py,sha256=6vAeL1SwHlYUxTwHmfHXEYLuvTJoLRq5zl_GwUm5PiE,32014 +transformers/models/gpt2/modeling_gpt2.py,sha256=rJQj-4HoKSakDuXx8ha79J1-jlSCVm6kN3DT4KnDLs8,88050 +transformers/models/gpt2/modeling_tf_gpt2.py,sha256=HjAhqrJgp2YJ4IBfobuzND9jkVcBWBzAJuRXd9TYqj8,56691 +transformers/models/gpt2/tokenization_gpt2.py,sha256=R0pYfuNDBx2Rdp9zDGOGcWprMGEcDBzIFsUOXw1KBPI,13849 +transformers/models/gpt2/tokenization_gpt2_fast.py,sha256=lRMLo_ML0hP0K6IJtceoISZXiBaw2Z1h2MJCs693IvM,6535 +transformers/models/gpt2/tokenization_gpt2_tf.py,sha256=Ptg01f1bV0fAvI1JK6v-FE4lVKUPIiXrxxPrf8M7kgU,3833 +transformers/models/gpt_bigcode/__init__.py,sha256=waW0WeT6jgb8gWpaGmMZBJCYoqKzCbaQbyjHZkuEARE,2037 +transformers/models/gpt_bigcode/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_bigcode/__pycache__/configuration_gpt_bigcode.cpython-310.pyc,, +transformers/models/gpt_bigcode/__pycache__/modeling_gpt_bigcode.cpython-310.pyc,, +transformers/models/gpt_bigcode/configuration_gpt_bigcode.py,sha256=DucM152GIQPCLHztnilZZ3WR10WySLc1P6cIy8TnGvU,6382 +transformers/models/gpt_bigcode/modeling_gpt_bigcode.py,sha256=aywzb7EFlA0xzKY01aFp0KYtP60_8-F2TdmKa52GLXY,69693 +transformers/models/gpt_neo/__init__.py,sha256=tCBf4wXQijfaRh959WfU7_npuc1na00rwCZCgcxuTOo,2718 +transformers/models/gpt_neo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/configuration_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/convert_gpt_neo_mesh_tf_to_pytorch.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_flax_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/__pycache__/modeling_gpt_neo.cpython-310.pyc,, +transformers/models/gpt_neo/configuration_gpt_neo.py,sha256=gAqXIqy6_YiogYm6rwRPE2t_dqIoEiwLm7oePX2__gg,11931 +transformers/models/gpt_neo/convert_gpt_neo_mesh_tf_to_pytorch.py,sha256=SSlCsIZmkN010Cu64F4lxwHcQRsqEGbb7a6PqCSWJY0,2589 +transformers/models/gpt_neo/modeling_flax_gpt_neo.py,sha256=xgwE5UixFan9wDb9ScOd8DcEH-o1Iu-AX1bNkMWQFEA,28074 +transformers/models/gpt_neo/modeling_gpt_neo.py,sha256=VRC5JDwwaumJDna7PM0pZgv-DeL1uMbuwB_1KiAyIgE,58257 +transformers/models/gpt_neox/__init__.py,sha256=NETOJyNfZJ1SXJ4jc1heeVs2TMqXjlbminmJQKSnLnA,2595 +transformers/models/gpt_neox/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/configuration_gpt_neox.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/modeling_gpt_neox.cpython-310.pyc,, +transformers/models/gpt_neox/__pycache__/tokenization_gpt_neox_fast.cpython-310.pyc,, +transformers/models/gpt_neox/configuration_gpt_neox.py,sha256=4-pmNwzHWZMJNYHsZgYJmWfXVYOQLfcsO77Up9BsWXY,8974 +transformers/models/gpt_neox/modeling_gpt_neox.py,sha256=_GHHApaLCnK7E9CR29GFz8g0kEvoAtoS2qC9vuN6N0I,64897 +transformers/models/gpt_neox/tokenization_gpt_neox_fast.py,sha256=muSciDUjACX1nufh4Nq5tX6QhM5QOz1YeEQdUiszB1Y,10211 +transformers/models/gpt_neox_japanese/__init__.py,sha256=7S5Q5Y8aQPbcoaPjIVo7s9ebHh0GLv3cA1TeAhzvFFA,2154 +transformers/models/gpt_neox_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/configuration_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/modeling_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/__pycache__/tokenization_gpt_neox_japanese.cpython-310.pyc,, +transformers/models/gpt_neox_japanese/configuration_gpt_neox_japanese.py,sha256=tnDNY86uFFFQ_SMrQcqYTGPxeYNKPlae2QsXfKgAQ-I,5668 +transformers/models/gpt_neox_japanese/modeling_gpt_neox_japanese.py,sha256=1jC2InCehrPxA2bKUOAn1Jlxt_mEdgARfSSXVlRFJ-w,32386 +transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py,sha256=7CvQVyzCJa5t-3McYp7z2qJRec3-ZthRoWguTp3EiVk,17082 +transformers/models/gpt_sw3/__init__.py,sha256=qJj7vF8ES37BwsKbJE1zV2rPUdmM3vx8mckIFuWrJSU,1361 +transformers/models/gpt_sw3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gpt_sw3/__pycache__/convert_megatron_to_pytorch.cpython-310.pyc,, +transformers/models/gpt_sw3/__pycache__/tokenization_gpt_sw3.cpython-310.pyc,, +transformers/models/gpt_sw3/convert_megatron_to_pytorch.py,sha256=11EGXgi73zwRchm4aMlHE7tCom4_oGLQSWF1YMpBBQA,8156 +transformers/models/gpt_sw3/tokenization_gpt_sw3.py,sha256=cGST5TNnGR90L6CnCHIfnpobDARfCO4BinKaKQ6YaqY,13517 +transformers/models/gptj/__init__.py,sha256=wBErGYabUQpzDULOVQSE9vEvefKWJvJFoU9p0t54qDU,3280 +transformers/models/gptj/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gptj/__pycache__/configuration_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_flax_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_gptj.cpython-310.pyc,, +transformers/models/gptj/__pycache__/modeling_tf_gptj.cpython-310.pyc,, +transformers/models/gptj/configuration_gptj.py,sha256=zy0vdt6BICJlTG6olKuIHfzu2QMEWPPAk0Z_LyadJrU,8880 +transformers/models/gptj/modeling_flax_gptj.py,sha256=VaYTrxQosqkIqHcbKcDFinT_z3aofwdJLasWAqxjRlM,28525 +transformers/models/gptj/modeling_gptj.py,sha256=sWfh0DAi3PlxWkSZFZWEVFLdiQVdlAkwpJimXpbld3M,63142 +transformers/models/gptj/modeling_tf_gptj.py,sha256=Rbgvnc3X-wtnw9BKun5G3UcW--i-H6BFOldygQ5GYvU,48066 +transformers/models/gptsan_japanese/__init__.py,sha256=gkfCyeWUjR_u2kxoe0nD-gLdcFoS4SwjhQBNufTY86w,2294 +transformers/models/gptsan_japanese/__pycache__/__init__.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/configuration_gptsan_japanese.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/convert_gptsan_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/modeling_gptsan_japanese.cpython-310.pyc,, +transformers/models/gptsan_japanese/__pycache__/tokenization_gptsan_japanese.cpython-310.pyc,, +transformers/models/gptsan_japanese/configuration_gptsan_japanese.py,sha256=TL57v1NF7w5TFfRRqYCpPM4kenFk_-bJ1C5z2OEqSTs,7230 +transformers/models/gptsan_japanese/convert_gptsan_tf_checkpoint_to_pytorch.py,sha256=syF4TCbLQByZhm5VqIFgXfzQ4zImmCua8UNjCYJP5t8,9793 +transformers/models/gptsan_japanese/modeling_gptsan_japanese.py,sha256=ttTVYnf2hox6oLiXpRV_AzjmFOLE8UVW1I1AZzkCTXY,66612 +transformers/models/gptsan_japanese/tokenization_gptsan_japanese.py,sha256=Aoncz2BgbOaYNJyqbj6ByCG3cAZoos-WK67m5QKHQeM,24310 +transformers/models/graphormer/__init__.py,sha256=SCL3NOPe62lQVk-qWrJD1enP6JNBWyPreg5EGaifjbE,1873 +transformers/models/graphormer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/graphormer/__pycache__/collating_graphormer.cpython-310.pyc,, +transformers/models/graphormer/__pycache__/configuration_graphormer.cpython-310.pyc,, +transformers/models/graphormer/__pycache__/modeling_graphormer.cpython-310.pyc,, +transformers/models/graphormer/algos_graphormer.pyx,sha256=b_Qlm1hKCHnAqx6oOLGC9LkivAV0K_AZRGgXT9MmBas,3635 +transformers/models/graphormer/collating_graphormer.py,sha256=1r_YqrFzC6uWCaPCsGMqNkvHNKs6SCV1bSw2qLyAYJA,6086 +transformers/models/graphormer/configuration_graphormer.py,sha256=ppv8lh6wroRM_q8iBIWISI4X5COJaZNwS8K2T-ygqbY,10481 +transformers/models/graphormer/modeling_graphormer.py,sha256=a1MmXQ2a71TcXEu109Wqa23B3QdNRvrzoUNXIaqZ8jw,37105 +transformers/models/grounding_dino/__init__.py,sha256=Erk4Xw6A3yKtLAFJ_XaAsp2JKUAYSZXA_NEhC-llHTo,2570 +transformers/models/grounding_dino/__pycache__/__init__.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/configuration_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/convert_grounding_dino_to_hf.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/image_processing_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/modeling_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/__pycache__/processing_grounding_dino.cpython-310.pyc,, +transformers/models/grounding_dino/configuration_grounding_dino.py,sha256=Nm24p_CCv7U57LjdzcuAQIPCfG-_7I65PaGIthB1YXU,15243 +transformers/models/grounding_dino/convert_grounding_dino_to_hf.py,sha256=U3T2-FjtYv9unK36_iA17Ifww7kWWYWpPbVC6u6-unQ,25445 +transformers/models/grounding_dino/image_processing_grounding_dino.py,sha256=Iphjv5yuvntKKtW-_VB9JQ-f0ZhCZu8NIW6QX885HcY,66200 +transformers/models/grounding_dino/modeling_grounding_dino.py,sha256=p2tAqOP3KdvKDtqHk9Peo4IcuEHwvA7ucaRZNuVF0vY,154095 +transformers/models/grounding_dino/processing_grounding_dino.py,sha256=kRjQfahwF3QAdW4_lgVmE1M2S-TX2PgLX-4XjKGL53c,9628 +transformers/models/groupvit/__init__.py,sha256=rO2THuhEVPYRh__0tgdPS9egtqSugEkoXU4lDMAg3q0,2875 +transformers/models/groupvit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/configuration_groupvit.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/convert_groupvit_nvlab_to_hf.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/modeling_groupvit.cpython-310.pyc,, +transformers/models/groupvit/__pycache__/modeling_tf_groupvit.cpython-310.pyc,, +transformers/models/groupvit/configuration_groupvit.py,sha256=VIjM_1UnJOGvxhSqltaiDxYUOcoU4TQp8bO3ATqORBc,20796 +transformers/models/groupvit/convert_groupvit_nvlab_to_hf.py,sha256=9gQxkcjVNCP5lvV54SbbSsOjkKCHORcoiwq2gcczYCM,9775 +transformers/models/groupvit/modeling_groupvit.py,sha256=hqh41N5MbxNyAy3g61xCHqYBbJnYkj-0tHXTmT6lmb8,67884 +transformers/models/groupvit/modeling_tf_groupvit.py,sha256=w0Lc4OFGeUixsV2f0CZ0JL1laey4tnkAS6Fx7Zx3Wio,89848 +transformers/models/herbert/__init__.py,sha256=Sp9gQIqlUhZHausuaL2MFYDqJW4vvsVGLbVryR-kNl0,1472 +transformers/models/herbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert.cpython-310.pyc,, +transformers/models/herbert/__pycache__/tokenization_herbert_fast.cpython-310.pyc,, +transformers/models/herbert/tokenization_herbert.py,sha256=5xsZeIOJU5UQ1-u75m8c_pOtS5IoFdaMOiL43-dYR5Q,25042 +transformers/models/herbert/tokenization_herbert_fast.py,sha256=QITcJycMNQuUIumL_an1mNz55fJADkW9S6jdBmO99KM,5926 +transformers/models/hubert/__init__.py,sha256=rfeBnkDY2iMz8xs_cZY4wSMSxoXQeVQov-C42xhA0eE,2536 +transformers/models/hubert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/hubert/__pycache__/configuration_hubert.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_distilhubert_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_hubert_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/convert_hubert_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/hubert/__pycache__/modeling_hubert.cpython-310.pyc,, +transformers/models/hubert/__pycache__/modeling_tf_hubert.cpython-310.pyc,, +transformers/models/hubert/configuration_hubert.py,sha256=DuxtNSM-RtwpGqGXMMtujQBv8015T51jruii7Hi4nyc,14774 +transformers/models/hubert/convert_distilhubert_original_s3prl_checkpoint_to_pytorch.py,sha256=ENEJNVBI7j5N6ajvUnNEAfSIM6VfEmpI8dF86R4EDog,8942 +transformers/models/hubert/convert_hubert_original_pytorch_checkpoint_to_pytorch.py,sha256=tVrpW4Mqkymh6pcLdYdTtkl0ykhSkHNvfTefbBIpR7w,10380 +transformers/models/hubert/convert_hubert_original_s3prl_checkpoint_to_pytorch.py,sha256=BtUOQ6Jf7kppeKreWA76AvQNdy_a63t2iuq0yHvEs4E,2895 +transformers/models/hubert/modeling_hubert.py,sha256=X6D6U51pn8DRbPt9EyoHm6sbFPhm_hYBj0H2dNm4fxA,60130 +transformers/models/hubert/modeling_tf_hubert.py,sha256=211G8mTNnXGEwRgYZSNGxHgGt0SseG26kzvwLw3BvZ4,70788 +transformers/models/ibert/__init__.py,sha256=uw-Mi7HIih0Or_1DeCK7Ooc20kBdmqokZ6GEDwOD9LU,2086 +transformers/models/ibert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/ibert/__pycache__/configuration_ibert.cpython-310.pyc,, +transformers/models/ibert/__pycache__/modeling_ibert.cpython-310.pyc,, +transformers/models/ibert/__pycache__/quant_modules.cpython-310.pyc,, +transformers/models/ibert/configuration_ibert.py,sha256=eZRFwXoP-EqeqKzLRAUUcTjEGdAM0QaZ8TWm6flautE,7144 +transformers/models/ibert/modeling_ibert.py,sha256=fweAtT2llKdCyFB0oFYNSYYkrrQeTpTVbpRuH11Vyco,56727 +transformers/models/ibert/quant_modules.py,sha256=ItU76CIx0XcZCPOR21dz99J9k5rK2fzffQz0jJCuNmM,30072 +transformers/models/idefics/__init__.py,sha256=XnXH7RPak98A3W6H9eW1o8eiVgxgAMKoi6xAkKBOL8o,2360 +transformers/models/idefics/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics/__pycache__/configuration_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/image_processing_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/modeling_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/perceiver.cpython-310.pyc,, +transformers/models/idefics/__pycache__/processing_idefics.cpython-310.pyc,, +transformers/models/idefics/__pycache__/vision.cpython-310.pyc,, +transformers/models/idefics/configuration_idefics.py,sha256=bhqaO3X_dIyVmYX02y276VdEDzvQvgQncPycpPWdgHo,15467 +transformers/models/idefics/image_processing_idefics.py,sha256=xcHYUAzAgIaXk92aU0YY83scvQdpQekN37UJll9utdg,7801 +transformers/models/idefics/modeling_idefics.py,sha256=jsC5pHWvo6mVySwPGU-zEdJkczYI2rfy9th1-GBkFlY,72847 +transformers/models/idefics/perceiver.py,sha256=RtKLRu3IIjUHCYcLAgZyirDbxK-ZlKKts_to0fv1x6o,9432 +transformers/models/idefics/processing_idefics.py,sha256=QxKO8rSMZCWOsHaDwzZMWQ374rh6IXRpQUNbcOFg7JI,17932 +transformers/models/idefics/vision.py,sha256=B27HyrQNrY9l9o--jMQmL9NdkJRVqYt2u36TXiyNQSs,22502 +transformers/models/idefics2/__init__.py,sha256=ZUUTVILcQ-2gepqN7ZSS2qjrHr1JjBF8p8JtQv_fVR8,2315 +transformers/models/idefics2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/configuration_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/convert_idefics2_weights_to_hf.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/image_processing_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/modeling_idefics2.cpython-310.pyc,, +transformers/models/idefics2/__pycache__/processing_idefics2.cpython-310.pyc,, +transformers/models/idefics2/configuration_idefics2.py,sha256=XwZgRmu127ZWwyc71DQPSlJCGHhV5HN2dnfC3hriReg,11810 +transformers/models/idefics2/convert_idefics2_weights_to_hf.py,sha256=3nd_V1qNTv7DehZZQLLAeKh0086xvjECNuWBvJmFbNM,6669 +transformers/models/idefics2/image_processing_idefics2.py,sha256=RAJpeMN3iNaL4YJtV_6ATVYlva5h_pVFR_bRgHL3MWY,27422 +transformers/models/idefics2/modeling_idefics2.py,sha256=Hg5PpteEtNALJNeSGxSjCDNEz48rVFwJ93-xnmop01s,93563 +transformers/models/idefics2/processing_idefics2.py,sha256=jfud4Imt_3UMyCNpOR7n-9Yrq3HlYuW6ocxhIyXA3wg,16229 +transformers/models/imagegpt/__init__.py,sha256=aPsv_YVn82O_HHaFDIsYqe8bR8hs3sk1RUlcCtaUWcc,2658 +transformers/models/imagegpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/configuration_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/convert_imagegpt_original_tf2_to_pytorch.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/feature_extraction_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/image_processing_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/__pycache__/modeling_imagegpt.cpython-310.pyc,, +transformers/models/imagegpt/configuration_imagegpt.py,sha256=MtR2BysoI2OoG9dPOeabhdnCz7l5M47eHXHzc5DEbJM,8820 +transformers/models/imagegpt/convert_imagegpt_original_tf2_to_pytorch.py,sha256=yneGtcrTR4Ui38NG8ogK7N_4dAyTiVBkmc8JQERb2bs,2691 +transformers/models/imagegpt/feature_extraction_imagegpt.py,sha256=iCpQ4tU3Vml44KgO43kYJvv-RcZVxe8tc794gxUktuU,1200 +transformers/models/imagegpt/image_processing_imagegpt.py,sha256=UH8YSyNGl4jI4rrPb0HrjbPnKp3PSlykBCY4vdGhjA0,14692 +transformers/models/imagegpt/modeling_imagegpt.py,sha256=jmC1ZC5BzttNxnapndU9a607NSeUBCgvfjJ0Jl-aq2U,53680 +transformers/models/informer/__init__.py,sha256=VylZIY0U5EuIfEuvphPh-gCCgBtwRAByccv11nsTA5Q,1857 +transformers/models/informer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/informer/__pycache__/configuration_informer.cpython-310.pyc,, +transformers/models/informer/__pycache__/modeling_informer.cpython-310.pyc,, +transformers/models/informer/configuration_informer.py,sha256=G3nrYsT6Z8jdjJ3CL2OWjrY6bsIKDSs8fEiRMjXQaxU,12512 +transformers/models/informer/modeling_informer.py,sha256=o0pZ_I-KgRBgmm6lhQSSWIuoyqmMOvP7E1fDNPcPlSo,101597 +transformers/models/instructblip/__init__.py,sha256=GpbqWHExuUvlsDeouDhVv-f_etjU9Dwm006DwFiAMEg,2279 +transformers/models/instructblip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/configuration_instructblip.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/convert_instructblip_original_to_pytorch.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/modeling_instructblip.cpython-310.pyc,, +transformers/models/instructblip/__pycache__/processing_instructblip.cpython-310.pyc,, +transformers/models/instructblip/configuration_instructblip.py,sha256=SFPht36C6hPZjZKYuOoXnN4kynYO0K7S6WKlh3msUyo,17169 +transformers/models/instructblip/convert_instructblip_original_to_pytorch.py,sha256=iustpBsjHHzjQzbAhPJvhI7ZBSXCDoa9njtK9m_gm_I,13399 +transformers/models/instructblip/modeling_instructblip.py,sha256=Gg1QQNpxU9x5r8BBaalV5oZnIQrX3MQvR3-UeV09w2o,71246 +transformers/models/instructblip/processing_instructblip.py,sha256=zJT2QvAzlJAFlADmSSr36VWNB6xLpazrqFmp3og5AE8,7856 +transformers/models/jamba/__init__.py,sha256=aD1sOCM0Rjk2I3Zh_fEh3xeot9EVYE3X3nKi8rP-KyI,1661 +transformers/models/jamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/jamba/__pycache__/configuration_jamba.cpython-310.pyc,, +transformers/models/jamba/__pycache__/modeling_jamba.cpython-310.pyc,, +transformers/models/jamba/configuration_jamba.py,sha256=c45AlRvpZ4NGGd81o1m_PhwMiyO7DFefibXk8tIz9A8,11249 +transformers/models/jamba/modeling_jamba.py,sha256=9NZmskgbYrr9nlluDO121loHPt6rTYKFbaIxKpyHFTc,88547 +transformers/models/jukebox/__init__.py,sha256=kZx3ZvfTUb90bEGC0UVrqOfoJvIWSBrUOR701WATaHI,2084 +transformers/models/jukebox/__pycache__/__init__.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/configuration_jukebox.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/convert_jukebox.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/modeling_jukebox.cpython-310.pyc,, +transformers/models/jukebox/__pycache__/tokenization_jukebox.cpython-310.pyc,, +transformers/models/jukebox/configuration_jukebox.py,sha256=cN5kO0pVIOpa2Kwxfcwo8chuZlnybfyxOb2QnnUHM0M,26847 +transformers/models/jukebox/convert_jukebox.py,sha256=RBgOPbwIMv_42mUFJYxRv4IAGZn4cAzjTqjrMI7HtVg,11789 +transformers/models/jukebox/modeling_jukebox.py,sha256=ji9tQcWiTz7qVfmkKV7WzKA669GgNAStgBAUMwy_oqI,119566 +transformers/models/jukebox/tokenization_jukebox.py,sha256=7v083dQ8kcWvzJg4NbZUqm7y6BZVaR_tp3vqJFSOlA0,17349 +transformers/models/kosmos2/__init__.py,sha256=jUzMFMa0nRBdsr0AdK08cnugtfuAWiZTFgOow25AY5o,1967 +transformers/models/kosmos2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/configuration_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/convert_kosmos2_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/modeling_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/__pycache__/processing_kosmos2.cpython-310.pyc,, +transformers/models/kosmos2/configuration_kosmos2.py,sha256=368fp2Q3pEL14rasKErL34mbpUbs1bIskR6FLH1two8,13320 +transformers/models/kosmos2/convert_kosmos2_original_pytorch_checkpoint_to_pytorch.py,sha256=3ejv6hUd6irzFnmSuFVI6Eu1NVWmtJf3_ql2h9P4AHk,2724 +transformers/models/kosmos2/modeling_kosmos2.py,sha256=orwMCflw9GwQamBJ8QgpLKcoDFB58fYnFhjJDLQMtwA,94993 +transformers/models/kosmos2/processing_kosmos2.py,sha256=wwLhLGgBBgpFeRWC3os8SXLI18od-NJagHFJMe9QROo,29760 +transformers/models/layoutlm/__init__.py,sha256=x-7_rGXFn-NroxQIFjQru0Rz5VfmQmINEhahNPm7R8w,3787 +transformers/models/layoutlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/configuration_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/modeling_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/modeling_tf_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm.cpython-310.pyc,, +transformers/models/layoutlm/__pycache__/tokenization_layoutlm_fast.cpython-310.pyc,, +transformers/models/layoutlm/configuration_layoutlm.py,sha256=LxYSGeoSHa9fqrGvMCsz3-8M617Ct8PFrWUNSyGUq4g,9181 +transformers/models/layoutlm/modeling_layoutlm.py,sha256=jFNDDKZg8nv7qxDKxLSeLNU-3QNSC_Yw8rzsLCFgCDE,60816 +transformers/models/layoutlm/modeling_tf_layoutlm.py,sha256=QW6Q_RzzrUCqLKAzGDcbaMO5faqzDw2CkjSYCEZ22lc,73197 +transformers/models/layoutlm/tokenization_layoutlm.py,sha256=YlVOSz-XoiLcIEN2VikUce957pvnYl1CIyImdqIU230,20943 +transformers/models/layoutlm/tokenization_layoutlm_fast.py,sha256=rqnk6TYEbN7tCP3ElDCVk6ukNL2tm4xKmxAlkZP27X4,7787 +transformers/models/layoutlmv2/__init__.py,sha256=Ue5kj1_LyJNklq6UPXvNuaAXj_gadMT8lXxwQwIPsvY,3439 +transformers/models/layoutlmv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/configuration_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/feature_extraction_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/image_processing_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/modeling_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/processing_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2.cpython-310.pyc,, +transformers/models/layoutlmv2/__pycache__/tokenization_layoutlmv2_fast.cpython-310.pyc,, +transformers/models/layoutlmv2/configuration_layoutlmv2.py,sha256=30FlO29KiaXlzvrwYUV5IniBgmx5rOH3x8dRN4D1XiI,10985 +transformers/models/layoutlmv2/feature_extraction_layoutlmv2.py,sha256=M9bDCpKBLI5paxor4ioa2JjEDhSH9Np-PTbgHh2V9KI,1195 +transformers/models/layoutlmv2/image_processing_layoutlmv2.py,sha256=yV8J93JD6AR2chy87LDh2zvl5N60MvDEreNEzaI211Y,13809 +transformers/models/layoutlmv2/modeling_layoutlmv2.py,sha256=MjLO_uxU-NHmSxoqKEGBoCa6Lk31cQCZ46YM26yW-lQ,60530 +transformers/models/layoutlmv2/processing_layoutlmv2.py,sha256=xyhBq9pYYmNYOfK2c13gA-f1cWzu1fp0kO6FC7J9DfI,9292 +transformers/models/layoutlmv2/tokenization_layoutlmv2.py,sha256=ZpOPGao8BUSVPrIjNW16HRM_GkgJCXgS-VrSuW9zjmg,72063 +transformers/models/layoutlmv2/tokenization_layoutlmv2_fast.py,sha256=jEDcU7MIeieUnorFQQ9S-DYPbyqpaooQt5KRexDzxB8,37284 +transformers/models/layoutlmv3/__init__.py,sha256=A4PpxK2Rhqx_ybVzlT5h9W6SyRSwndLqD5-eVKBz4ok,4512 +transformers/models/layoutlmv3/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/configuration_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/feature_extraction_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/image_processing_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/modeling_tf_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/processing_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3.cpython-310.pyc,, +transformers/models/layoutlmv3/__pycache__/tokenization_layoutlmv3_fast.cpython-310.pyc,, +transformers/models/layoutlmv3/configuration_layoutlmv3.py,sha256=FoAmr5ghMAnT6P3XS6MkQ5vNJf5Pr0mUtUZPB3HexSI,13307 +transformers/models/layoutlmv3/feature_extraction_layoutlmv3.py,sha256=jWsmsi2mym0meek1lHWqfqxlJgMJdY3cgfQ_4ASEbto,1195 +transformers/models/layoutlmv3/image_processing_layoutlmv3.py,sha256=3zmcx39HvcXzHJeI70U0Jo2e6fkpUUorArlXlDHX-ow,18813 +transformers/models/layoutlmv3/modeling_layoutlmv3.py,sha256=JgAXoTWAzjaTuqd2tT9pD_0VVO6dPbcdfakOjgE3dCM,59813 +transformers/models/layoutlmv3/modeling_tf_layoutlmv3.py,sha256=hLD25aX1LQWwXoKO3DqLtzjfBpSx9wzWyKJS6CxyMfI,76785 +transformers/models/layoutlmv3/processing_layoutlmv3.py,sha256=ShtvBmZjGHbprdB14v2QsIgVir-74gEnTGHzvL31vCI,9143 +transformers/models/layoutlmv3/tokenization_layoutlmv3.py,sha256=UrCNLze0Z_6WX5KJ-qYxaAfEzOZfEofjx2Ky5hCiC_8,72055 +transformers/models/layoutlmv3/tokenization_layoutlmv3_fast.py,sha256=36lBDAt8h1qmAI2ACWUODe2Umcn4RWxSzl2MBmPtlnQ,39532 +transformers/models/layoutxlm/__init__.py,sha256=AIvjzuqRPFXFuWXxnOlp9pBXaIT5Zzx7fwtg2KKVETA,2037 +transformers/models/layoutxlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/processing_layoutxlm.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm.cpython-310.pyc,, +transformers/models/layoutxlm/__pycache__/tokenization_layoutxlm_fast.cpython-310.pyc,, +transformers/models/layoutxlm/processing_layoutxlm.py,sha256=2xtffeErPXtu2tW_ya4YaHDoqWCljDPfoL2V1Jlo6JI,9242 +transformers/models/layoutxlm/tokenization_layoutxlm.py,sha256=FxqSo2IeMoeE9xwdVDWWClbFd4hvfwmSg6TrgH_kvDI,57299 +transformers/models/layoutxlm/tokenization_layoutxlm_fast.py,sha256=VPYbrW0yGx5dh9lMG1_6prxANqXcyfFHgK7MSuaSYGI,39769 +transformers/models/led/__init__.py,sha256=9CdjSo8a3H8LyFlzOxCmUUZG2icbvPJ_Q_hFcaKBf4E,3008 +transformers/models/led/__pycache__/__init__.cpython-310.pyc,, +transformers/models/led/__pycache__/configuration_led.cpython-310.pyc,, +transformers/models/led/__pycache__/modeling_led.cpython-310.pyc,, +transformers/models/led/__pycache__/modeling_tf_led.cpython-310.pyc,, +transformers/models/led/__pycache__/tokenization_led.cpython-310.pyc,, +transformers/models/led/__pycache__/tokenization_led_fast.cpython-310.pyc,, +transformers/models/led/configuration_led.py,sha256=5fYJMjSlrvlFor5N0cjDtoLRUNDt0U7orwavdu3NLaM,7515 +transformers/models/led/modeling_led.py,sha256=6ImPuU-K8flv0DX-jL5CuNc10WCOr78ie78cpbMUgUg,139153 +transformers/models/led/modeling_tf_led.py,sha256=drHWpT50oyMc1gLh2bNwE75K-IzP6-NYW5dj5QS5LAs,123072 +transformers/models/led/tokenization_led.py,sha256=H2B8JdOoxg6O0a_ul477ToPDDBmoUfiPGsg7zwFfe7U,19752 +transformers/models/led/tokenization_led_fast.py,sha256=oaeDl37DL_c_RzX1XbplbdQLT5F85m_cprAQ_8QQRHc,14542 +transformers/models/levit/__init__.py,sha256=bn2rphZqhhv59V7XPWBSS3nntAk8n8qi8o9uhqmi2do,2508 +transformers/models/levit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/levit/__pycache__/configuration_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/convert_levit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/levit/__pycache__/feature_extraction_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/image_processing_levit.cpython-310.pyc,, +transformers/models/levit/__pycache__/modeling_levit.cpython-310.pyc,, +transformers/models/levit/configuration_levit.py,sha256=xSTUIJE8OmQ0vGSTKVT6sGmvi5AfzUww45Au5ZdsDf8,5814 +transformers/models/levit/convert_levit_timm_to_pytorch.py,sha256=HKjk4WPa6DO_2CM0Qy9R3mAEOdbf71DtS-T4uqoQJ9I,6258 +transformers/models/levit/feature_extraction_levit.py,sha256=l2RHbrbg9MzRqKr_ErOo_AuiSv93Gj-Oq6w0v2p-Izw,1204 +transformers/models/levit/image_processing_levit.py,sha256=CD7HBX2SVeEV9eF6E3hvh6-Y051LSjvpkjy4Y8QUO3Q,17058 +transformers/models/levit/modeling_levit.py,sha256=NHsFjmJdDmT6x9_-6EeAOwsgXRl4PeADDPNjOkwpOCk,29416 +transformers/models/lilt/__init__.py,sha256=bIm8VAW84HA1oTl3ZITLrjMZ9VIyJ4s6_x9R9N767nM,1909 +transformers/models/lilt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/lilt/__pycache__/configuration_lilt.cpython-310.pyc,, +transformers/models/lilt/__pycache__/modeling_lilt.cpython-310.pyc,, +transformers/models/lilt/configuration_lilt.py,sha256=SNi4Rr4_hrCM4VUJZXbEtdPWG-qQg9L-dCeaaMqctDw,6791 +transformers/models/lilt/modeling_lilt.py,sha256=ZrwWsqOMK7PydbkrMIpH1-IK78U_VjkIKu-Y3eEjepE,52704 +transformers/models/llama/__init__.py,sha256=Jur2SZ5J29BTDTaoQfXv69e-ZpcX5NiKbzAP1DGV9-A,3349 +transformers/models/llama/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llama/__pycache__/configuration_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/convert_llama_weights_to_hf.cpython-310.pyc,, +transformers/models/llama/__pycache__/modeling_flax_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/modeling_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/tokenization_llama.cpython-310.pyc,, +transformers/models/llama/__pycache__/tokenization_llama_fast.cpython-310.pyc,, +transformers/models/llama/configuration_llama.py,sha256=_z3ZXQG4r1I8vFXjgLoG976t9jUxIleE39bRlY-PZKQ,9452 +transformers/models/llama/convert_llama_weights_to_hf.py,sha256=CC5jifkiq1F9LZZspnmW_G2TvGTaJ8o_KpjdmWq3uw8,14165 +transformers/models/llama/modeling_flax_llama.py,sha256=shWgZWAGG0NpsFldqkTNBpsylJHD4FBD9NGU_EmK3YE,30831 +transformers/models/llama/modeling_llama.py,sha256=tfs8mePhGNXub9Z4dK0pj7gyguwMqOjGaMLNY6D1tgk,73442 +transformers/models/llama/tokenization_llama.py,sha256=_V_1OSbNR-58lX4R3LT8iORA4wVPV1tH9hX-rBIHQ0s,22019 +transformers/models/llama/tokenization_llama_fast.py,sha256=ic94pHJ2-tx_YghLWM4iFrybBp-9lRlT7-TcTJPJy5w,13056 +transformers/models/llava/__init__.py,sha256=GJ1vhnHiwzzN27stoZkhMdatFwb0aAhIzxSi1KLckz0,1797 +transformers/models/llava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava/__pycache__/configuration_llava.cpython-310.pyc,, +transformers/models/llava/__pycache__/convert_llava_weights_to_hf.cpython-310.pyc,, +transformers/models/llava/__pycache__/modeling_llava.cpython-310.pyc,, +transformers/models/llava/__pycache__/processing_llava.cpython-310.pyc,, +transformers/models/llava/configuration_llava.py,sha256=Ai0mct3mlKEnri3jDDJU0-GjZFCIfnDQBhURpuCUdvM,6188 +transformers/models/llava/convert_llava_weights_to_hf.py,sha256=jqOHXrbRbkwXkpWF_elzKblom0oJgOKqA6r4C9ouCaA,5420 +transformers/models/llava/modeling_llava.py,sha256=q7fT2z9Bw4iLExJhUV5rkghwpV_2JxQGu2oXjBYeACs,29751 +transformers/models/llava/processing_llava.py,sha256=uWsMuJKQCs9cQTzC97CeMccF1vAdEi_5VfJOnqu0X3Q,7176 +transformers/models/llava_next/__init__.py,sha256=U1uTqs5hULnuuZQB6x8OBWUgZ4MmYwQ-BtaY9ph57ow,2363 +transformers/models/llava_next/__pycache__/__init__.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/configuration_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/convert_llava_next_weights_to_hf.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/image_processing_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/modeling_llava_next.cpython-310.pyc,, +transformers/models/llava_next/__pycache__/processing_llava_next.cpython-310.pyc,, +transformers/models/llava_next/configuration_llava_next.py,sha256=PJ4aqBe6XVOE3wZRQIC61noncYPER9FTfrVbMgymeX0,6220 +transformers/models/llava_next/convert_llava_next_weights_to_hf.py,sha256=wwsI9xSFffJ5xRRUJtZVD-omnhKfDjLVXSJPYuJwFYU,15760 +transformers/models/llava_next/image_processing_llava_next.py,sha256=gVsXdxFmPyRJJjXTwMLOV_KkeAGCoII-fpFLJEXUyu4,28939 +transformers/models/llava_next/modeling_llava_next.py,sha256=X9yLO1lCmm4Ra-2tI1HIAjIFtOA2EwURRUv1He0S-OI,36505 +transformers/models/llava_next/processing_llava_next.py,sha256=S518k2ob-SGkKUuFoBQkaKO9OIdj2VVDKJUb7sg1bpQ,7193 +transformers/models/longformer/__init__.py,sha256=mbx6LG2-PW5i_Ntq3kFn1MhnegTVAs0_ZOKAGeMi5ps,4196 +transformers/models/longformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/longformer/__pycache__/configuration_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/convert_longformer_original_pytorch_lightning_to_pytorch.cpython-310.pyc,, +transformers/models/longformer/__pycache__/modeling_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/modeling_tf_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer.cpython-310.pyc,, +transformers/models/longformer/__pycache__/tokenization_longformer_fast.cpython-310.pyc,, +transformers/models/longformer/configuration_longformer.py,sha256=6z-sC3XxqwMgQU3xW1E-KGkdqvMc8QSWRSkWthHm3r4,8867 +transformers/models/longformer/convert_longformer_original_pytorch_lightning_to_pytorch.py,sha256=gKyYNmo8Of0j_h6x8JSHaYc6hTyzJFwWETi5KectvFM,3026 +transformers/models/longformer/modeling_longformer.py,sha256=EaavJ7dDbttkQp9sAzqMqvYKjhdQsVy6IBPChWDy5PU,113970 +transformers/models/longformer/modeling_tf_longformer.py,sha256=5nwjvFoxbJVBHjfCUyHQ59LRamKmTMTQR2Te7-gtgws,129450 +transformers/models/longformer/tokenization_longformer.py,sha256=t64oV7_8WJMD8b2uRr1SH9OaBPddZXBxAKsWbHpGCrk,16797 +transformers/models/longformer/tokenization_longformer_fast.py,sha256=NgQuhW2aHQRkRP59XONS8rfzqQ7rk2Pw3GTh611bX40,11671 +transformers/models/longt5/__init__.py,sha256=nN2BIwcwmdcMffrxzPKx9oeVWsHu9wt1BUJYIPWfm3Y,2546 +transformers/models/longt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/longt5/__pycache__/configuration_longt5.cpython-310.pyc,, +transformers/models/longt5/__pycache__/convert_longt5x_checkpoint_to_flax.cpython-310.pyc,, +transformers/models/longt5/__pycache__/modeling_flax_longt5.cpython-310.pyc,, +transformers/models/longt5/__pycache__/modeling_longt5.cpython-310.pyc,, +transformers/models/longt5/configuration_longt5.py,sha256=dh8qcgIEBuAzNxJ3OkxtdjEXd_DMBCMzHwZSGgX9zG8,8097 +transformers/models/longt5/convert_longt5x_checkpoint_to_flax.py,sha256=5LQpQWNG_8Fc0tU62eYf66RmJzUcb-RynDdrvziZEqw,11089 +transformers/models/longt5/modeling_flax_longt5.py,sha256=TBgoH7wMBAGNMilDvmg1U-394Z7ImK55Tm4saS-0CVs,105672 +transformers/models/longt5/modeling_longt5.py,sha256=jiAvt1CJlQYFikiNJgtglTs5DthqT4emKox8OPedTLM,106035 +transformers/models/luke/__init__.py,sha256=xuqWDYOtcrf1vEC71vfltl8ICWfW7GyU9sP8RWD-iU4,2383 +transformers/models/luke/__pycache__/__init__.cpython-310.pyc,, +transformers/models/luke/__pycache__/configuration_luke.cpython-310.pyc,, +transformers/models/luke/__pycache__/convert_luke_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/luke/__pycache__/modeling_luke.cpython-310.pyc,, +transformers/models/luke/__pycache__/tokenization_luke.cpython-310.pyc,, +transformers/models/luke/configuration_luke.py,sha256=tRGFoUmMOp8GoCAcweTSWbgVaAPr5aSlRfJexo6G3TI,6690 +transformers/models/luke/convert_luke_original_pytorch_checkpoint_to_pytorch.py,sha256=pfnDfBvJDRyCLBLdcsalZaKV01aEz0W1og2Z364hTDs,7467 +transformers/models/luke/modeling_luke.py,sha256=Dpwj3EnVnaH5B4ZCinHGxOsFmKacRi0uPjrEWnDtP34,103858 +transformers/models/luke/tokenization_luke.py,sha256=8r_ZbX0F719Ob5MTWgQZvT9TetdRDN0aAdY4LUcGhQg,84406 +transformers/models/lxmert/__init__.py,sha256=3rn46z5WOBmOrbr6e7zoIWh4F8Bf3hFBASDY0vxlxbI,3396 +transformers/models/lxmert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/configuration_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/convert_lxmert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/modeling_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/modeling_tf_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert.cpython-310.pyc,, +transformers/models/lxmert/__pycache__/tokenization_lxmert_fast.cpython-310.pyc,, +transformers/models/lxmert/configuration_lxmert.py,sha256=ySHIfLgH1ZfM3wZ6yO8fOTm8naotfRarY5d-R1ZVWMs,9005 +transformers/models/lxmert/convert_lxmert_original_tf_checkpoint_to_pytorch.py,sha256=T3vqC76pis49OXeHODsBSBBDGDe6qnUBckwGOWySmpc,2109 +transformers/models/lxmert/modeling_lxmert.py,sha256=WE_v6Z1In05mTqEpRHxQzXh5lrNVvgXeYCfcgghOn1c,64958 +transformers/models/lxmert/modeling_tf_lxmert.py,sha256=F7SS0dxVRlR991pfUzwhoxpFABdRsB4tqp2CeTjhUW8,72721 +transformers/models/lxmert/tokenization_lxmert.py,sha256=0Se6v8_PKzz7B5twxG-as9qvq_VFNgd-PFU7WhJmDeM,20966 +transformers/models/lxmert/tokenization_lxmert_fast.py,sha256=-mHMI4WCLJoXt8nYvJRibyOYfJKByPW_a6nQV_L11PM,7720 +transformers/models/m2m_100/__init__.py,sha256=fT84ZTHmw2vMrme8MqfSoPZWSECY-SLXDG0AR8Z1qRc,1992 +transformers/models/m2m_100/__pycache__/__init__.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/configuration_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/convert_m2m100_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/modeling_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/__pycache__/tokenization_m2m_100.cpython-310.pyc,, +transformers/models/m2m_100/configuration_m2m_100.py,sha256=BoZVjTfxXk0SOALC-ijDnoDUwWGFtEHxLxLIll3kNLw,13461 +transformers/models/m2m_100/convert_m2m100_original_checkpoint_to_pytorch.py,sha256=xNG8NE20odOve8Z1zKPDHJr5Ev8jM30N-mJsJqfsXtM,3159 +transformers/models/m2m_100/modeling_m2m_100.py,sha256=5YQfx9TwuRJG1RPNb_nlFFFvj2J-MN9RNt7UGrIxlLI,75015 +transformers/models/m2m_100/tokenization_m2m_100.py,sha256=2XlY9d4-Nv92SaVyM3VC1Linw0jTLwGcpsL7jyG7R9Y,16320 +transformers/models/mamba/__init__.py,sha256=xLSIqiYCZgZDg4J4rpsc-olAcskXsCL0Ckh6CA_Prvw,1798 +transformers/models/mamba/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mamba/__pycache__/configuration_mamba.cpython-310.pyc,, +transformers/models/mamba/__pycache__/convert_mamba_ssm_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mamba/__pycache__/modeling_mamba.cpython-310.pyc,, +transformers/models/mamba/configuration_mamba.py,sha256=kzPRNKGFg0mLB6d7L2LX9mjuj8vHA5nVY-eoC5W7Nx4,7076 +transformers/models/mamba/convert_mamba_ssm_checkpoint_to_pytorch.py,sha256=BK6M1tYEwLwoM7NE3fdm0BRpZH8LvCTGvdvsee9taZA,6454 +transformers/models/mamba/modeling_mamba.py,sha256=QwPCMLFiG9h47aCII6SeAZrGML8z-cxNEVikk0DcacU,32605 +transformers/models/marian/__init__.py,sha256=_aQPsVh7jA_BTVbCkRprc2NmnLlkhfEtfJW_1WIwUqI,3444 +transformers/models/marian/__pycache__/__init__.cpython-310.pyc,, +transformers/models/marian/__pycache__/configuration_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/convert_marian_tatoeba_to_pytorch.cpython-310.pyc,, +transformers/models/marian/__pycache__/convert_marian_to_pytorch.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_flax_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/modeling_tf_marian.cpython-310.pyc,, +transformers/models/marian/__pycache__/tokenization_marian.cpython-310.pyc,, +transformers/models/marian/configuration_marian.py,sha256=dYTlIdcilG1GNacIz1acKC6bRv4jlZYN9--gNR6wsO4,18328 +transformers/models/marian/convert_marian_tatoeba_to_pytorch.py,sha256=N_YEEFgsGy2W-4QxeGD3bIIGNl_oYv64GkTw0WDpiaU,36254 +transformers/models/marian/convert_marian_to_pytorch.py,sha256=lggn1nlv2EBgLarnYE_SKkUnDPKDgngL_xOtBJxQIgY,26775 +transformers/models/marian/modeling_flax_marian.py,sha256=vt7iI4WBYOAhz36UqJcXPIUu5q8U6xY-wwAphjOQsco,64262 +transformers/models/marian/modeling_marian.py,sha256=ci45qB1A1IYKTfLOmGjnZjLCtkYg-Vlr_VXEkYbCmmw,82080 +transformers/models/marian/modeling_tf_marian.py,sha256=IEwr-j8xPUbuYNBN6mKzYmLyK0FrmbMVGoXRo4C944w,72682 +transformers/models/marian/tokenization_marian.py,sha256=wo7Hy2uRzFP_hX84DhoZTvWUeXtWhqG5-BxmcFAsg_0,16812 +transformers/models/markuplm/__init__.py,sha256=RjQ4xza9uhSlHJ11ZIHA19o-cWoC88fJvts8zYDOznY,2806 +transformers/models/markuplm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/configuration_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/feature_extraction_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/modeling_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/processing_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm.cpython-310.pyc,, +transformers/models/markuplm/__pycache__/tokenization_markuplm_fast.cpython-310.pyc,, +transformers/models/markuplm/configuration_markuplm.py,sha256=dGjiJGaLZ2k3eqLqp93LWrfCFjB57uR_a47UkmtYf5A,7412 +transformers/models/markuplm/feature_extraction_markuplm.py,sha256=3V8MR36mQskKYQeaGrWuqWo9w5JG67nhRvxzWu7fR9s,6404 +transformers/models/markuplm/modeling_markuplm.py,sha256=qxIDO2S2FxWvrMvraf_YT7owmxQykvjZEdTEm93DS1I,58222 +transformers/models/markuplm/processing_markuplm.py,sha256=dCxh-u2OQvsoAeK0GWGDwMgZuLIgF7tu5Q7uERx5NwY,6348 +transformers/models/markuplm/tokenization_markuplm.py,sha256=qVpE_pPBwLioGbNHM2-uqoNiwFCoDACSCaExhZ0MvDc,68972 +transformers/models/markuplm/tokenization_markuplm_fast.py,sha256=v75Kirn2W7tCxDT-AULtW9G3Ew_53mg0HiHFhYYUFys,42939 +transformers/models/mask2former/__init__.py,sha256=_damTN4svyRG1tenZi3AEmsILg7QVyYbuWR_iXzrbXw,2357 +transformers/models/mask2former/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/configuration_mask2former.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/convert_mask2former_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/image_processing_mask2former.cpython-310.pyc,, +transformers/models/mask2former/__pycache__/modeling_mask2former.cpython-310.pyc,, +transformers/models/mask2former/configuration_mask2former.py,sha256=kIO8nw1sWYX8CR0slZaoIlz458ffD9BCAzule31RMcM,12545 +transformers/models/mask2former/convert_mask2former_original_pytorch_checkpoint_to_pytorch.py,sha256=v4a-VTdnEHxZLAykOn5AgqLXZ9yFZzhY4CUu4c3XHUE,45688 +transformers/models/mask2former/image_processing_mask2former.py,sha256=Vj7p448RldI_FjAUSpj0UrptYIkBco7zTE7aErwwhkM,56953 +transformers/models/mask2former/modeling_mask2former.py,sha256=fPaM7RF73nS62864624WGE64DyQmcHdBKJv2Zn9ybVM,121194 +transformers/models/maskformer/__init__.py,sha256=Sy9sX8-Vb9Gnn9gjU34M4pDh3jJZd7vmr5aorB9N5lw,2945 +transformers/models/maskformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/configuration_maskformer_swin.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/convert_maskformer_swin_to_pytorch.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/feature_extraction_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/image_processing_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer.cpython-310.pyc,, +transformers/models/maskformer/__pycache__/modeling_maskformer_swin.cpython-310.pyc,, +transformers/models/maskformer/configuration_maskformer.py,sha256=O5fbg-DYPkvs2HZ0M2OvU21o4RyyTxmX7vzA5SWwIwg,10463 +transformers/models/maskformer/configuration_maskformer_swin.py,sha256=kdf9AuIG8DYqJPIsvz_2rlIqSEZJ3UBI0IXlFdao3YM,7217 +transformers/models/maskformer/convert_maskformer_original_pytorch_checkpoint_to_pytorch.py,sha256=CEKaBhurc8x3mvE7YMqfULIoybxq0Guj0hGHJouG5s8,32237 +transformers/models/maskformer/convert_maskformer_resnet_to_pytorch.py,sha256=iUMC5om4caBO1eSeivN3sZYsbEtYZAeJZE7I1NIygR4,20732 +transformers/models/maskformer/convert_maskformer_swin_to_pytorch.py,sha256=-GWvua0iYDbJYZ7VUcywp0rf-jR7iKXz8az9N4r5k_0,20321 +transformers/models/maskformer/feature_extraction_maskformer.py,sha256=MMPQuQY2EnK4vixDve-I-PIFqCDWQNYYeVdAYvIY8HY,1214 +transformers/models/maskformer/image_processing_maskformer.py,sha256=CjaNU-cO2SU0DWDgn6GVBBiSPYuz1nBazRsHVDxND_Y,58796 +transformers/models/maskformer/modeling_maskformer.py,sha256=AmyHQu9CJRAWtSWoiOOV_dovrCVXpL_jvlOBjP-Hh5U,94251 +transformers/models/maskformer/modeling_maskformer_swin.py,sha256=2DyRWtHLA077-GWY0Z2mngv62I0RpGVHKr3NhIJm3c8,40758 +transformers/models/mbart/__init__.py,sha256=N1NqaZU1QPNt3r2VI3y4sv-XwdBkAtV-41REYSah7w4,4403 +transformers/models/mbart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mbart/__pycache__/configuration_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/convert_mbart_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_flax_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/modeling_tf_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart.cpython-310.pyc,, +transformers/models/mbart/__pycache__/tokenization_mbart_fast.cpython-310.pyc,, +transformers/models/mbart/configuration_mbart.py,sha256=lpHgLTu647Hk7bxJfCxIwXiAY4YUnzBkYXHEU_-f7GM,18162 +transformers/models/mbart/convert_mbart_original_checkpoint_to_pytorch.py,sha256=xVW9Mj-jd7X_MImJCgS52Aok1CGPf-E6u8ptvG1hK8o,3035 +transformers/models/mbart/modeling_flax_mbart.py,sha256=uUgTTL5zTGbJZX45q4YoPKiSbizfXNsx8jr-T7P2C_c,75090 +transformers/models/mbart/modeling_mbart.py,sha256=MrVWKcOvoqX1Oy4gGbdeELtsWNUJU3PtWCsHwVmtnas,100931 +transformers/models/mbart/modeling_tf_mbart.py,sha256=JsKe79VRjtf9p1SgbH8dnbQGUd5fe5CnYgGNijT-Mys,74195 +transformers/models/mbart/tokenization_mbart.py,sha256=cyxJpDRR-_GxBmUqaxwXzWC5SOmgvlSSIsDdtF8N8xo,14106 +transformers/models/mbart/tokenization_mbart_fast.py,sha256=1ieIvKkfDtKZe_hHOaZNbSt6fzVPylKoYOtNI3T6rpw,10997 +transformers/models/mbart50/__init__.py,sha256=5ekQCS9OkL3_5UJXnu7Z5cVeCi76pVgAxHkC8qQ8XKk,1847 +transformers/models/mbart50/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50.cpython-310.pyc,, +transformers/models/mbart50/__pycache__/tokenization_mbart50_fast.cpython-310.pyc,, +transformers/models/mbart50/tokenization_mbart50.py,sha256=INTdGnO_YBeB7mWdpBgkz8PH-prQOKd1dP92qbBsKDE,16307 +transformers/models/mbart50/tokenization_mbart50_fast.py,sha256=4XQPT5nXMLElCwfHfy4uTolWe2VmD1HcXdVJH0jQ3oA,11594 +transformers/models/mega/__init__.py,sha256=sJJLSLHF1HMGGOkDRFol40JHptUCxSDiB0yUUbvDVL4,2140 +transformers/models/mega/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mega/__pycache__/configuration_mega.cpython-310.pyc,, +transformers/models/mega/__pycache__/convert_mega_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mega/__pycache__/modeling_mega.cpython-310.pyc,, +transformers/models/mega/configuration_mega.py,sha256=R3GBbjk7bx8U3WwmQCEKvonPwq4sjZz09npQTkRaWKQ,12681 +transformers/models/mega/convert_mega_original_pytorch_checkpoint_to_pytorch.py,sha256=FK9gAgMB5VEO2Fji39w100ywUJ8wA8utdmWRZFanb2c,13154 +transformers/models/mega/modeling_mega.py,sha256=Og3W7ctai3HEB4nn_XKmKOOxyfwysFl8qx9Vbxz3mMI,109507 +transformers/models/megatron_bert/__init__.py,sha256=TUAneYZq0bKIQqKDcED_EuJhgnzOnWNrNrye_x8KX90,2506 +transformers/models/megatron_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/configuration_megatron_bert.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/convert_megatron_bert_checkpoint.cpython-310.pyc,, +transformers/models/megatron_bert/__pycache__/modeling_megatron_bert.cpython-310.pyc,, +transformers/models/megatron_bert/configuration_megatron_bert.py,sha256=8g4wxCt4-bc5xnm0aoESAknTnH7XWOzDt2RqS088-mk,6572 +transformers/models/megatron_bert/convert_megatron_bert_checkpoint.py,sha256=VAMD1MFdVG8w9cQkRfmlZCEvaMgoo-lyFI9deunD5OA,13686 +transformers/models/megatron_bert/modeling_megatron_bert.py,sha256=LM8kNMgqBDEAqKpVpCFVomjUx8P-6sqBdi17tMHccsE,83326 +transformers/models/megatron_gpt2/__init__.py,sha256=WycFl9cUevoXIBhB76qKtnNRIPMk2LoTDkmkfAfOy9M,630 +transformers/models/megatron_gpt2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/megatron_gpt2/__pycache__/checkpoint_reshaping_and_interoperability.cpython-310.pyc,, +transformers/models/megatron_gpt2/__pycache__/convert_megatron_gpt2_checkpoint.cpython-310.pyc,, +transformers/models/megatron_gpt2/checkpoint_reshaping_and_interoperability.py,sha256=NPoWPPSaT29iHoGRoyc1B_hdc67QNoytsVj_glQF430,36692 +transformers/models/megatron_gpt2/convert_megatron_gpt2_checkpoint.py,sha256=UPLXCjF4Fixnw_gy6kzxTK64ioxo_EIxwSVO6oKCqqQ,13661 +transformers/models/mgp_str/__init__.py,sha256=YMCtFGSXL18Kh4Pm3KTBEgtxlaDDYwb3WnMFsEsaJ-4,2164 +transformers/models/mgp_str/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/configuration_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/modeling_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/processing_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/__pycache__/tokenization_mgp_str.cpython-310.pyc,, +transformers/models/mgp_str/configuration_mgp_str.py,sha256=mtWn6cdfdjRwPuVHK5mPjUhDV3EDk7gwDdAEeD4x6dk,5881 +transformers/models/mgp_str/modeling_mgp_str.py,sha256=VG8u9M4zIvKZxb2nUtU0Uycfmcxl9dN2STTMQId813Q,21997 +transformers/models/mgp_str/processing_mgp_str.py,sha256=dh1MJ17yNZdoorG_Mi31Q7waqTnyRock-s4c2k_g0DQ,9298 +transformers/models/mgp_str/tokenization_mgp_str.py,sha256=CIz9yrKh2VPsckVtYJ0pynFgPhwYY9XuyJasmKD9mKo,3776 +transformers/models/mistral/__init__.py,sha256=b9KtZaVe1auCaeEzoRC_zvykp9KwyW8vqNpww-3jgls,2428 +transformers/models/mistral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mistral/__pycache__/configuration_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/convert_mistral_weights_to_hf.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_flax_mistral.cpython-310.pyc,, +transformers/models/mistral/__pycache__/modeling_mistral.cpython-310.pyc,, +transformers/models/mistral/configuration_mistral.py,sha256=KUIqpzgcLT_7MJphnDYjLYtECaxPH7A_vTRLczcZeYU,6986 +transformers/models/mistral/convert_mistral_weights_to_hf.py,sha256=bG8KXwc1rd3kSd5IothmZGiDiOfhERfh3VrS6_wOaoM,10725 +transformers/models/mistral/modeling_flax_mistral.py,sha256=1xBy97GmBslNjfZZ580ZAfqrRGviVILi0QGf1qbxDPE,31682 +transformers/models/mistral/modeling_mistral.py,sha256=qQ-grOKA7tSrydpeAK3a12Hg6xNeDraWCXX-iNt74Hg,63538 +transformers/models/mixtral/__init__.py,sha256=gUOb9IB2p_2uISpGaLaKXTWW0-nWVa4INgiTZmO8guE,1806 +transformers/models/mixtral/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/configuration_mixtral.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/convert_mixtral_weights_to_hf.cpython-310.pyc,, +transformers/models/mixtral/__pycache__/modeling_mixtral.cpython-310.pyc,, +transformers/models/mixtral/configuration_mixtral.py,sha256=fDcKtS7nez_luNZqh2TUaWttVIfaeCg01JMKmS8KAwI,8204 +transformers/models/mixtral/convert_mixtral_weights_to_hf.py,sha256=WExicalIwkZccqWyRjUU2LBvbL6cM6yiOG_Oby6t3Ok,9156 +transformers/models/mixtral/modeling_mixtral.py,sha256=kMSCKxNqxYQ-5u0io2hBbIoLjuTAIbLU7OMqlVVKoBM,73646 +transformers/models/mluke/__init__.py,sha256=Pj0GBjIU6vYdhEzO7M8O35c5Jj4ivIIGAiLABhN4K7U,1356 +transformers/models/mluke/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mluke/__pycache__/convert_mluke_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mluke/__pycache__/tokenization_mluke.cpython-310.pyc,, +transformers/models/mluke/convert_mluke_original_pytorch_checkpoint_to_pytorch.py,sha256=G6Z94-1_AiilSTU96PSjX_pdgFIx-b_bk8xlMKX5TuE,10185 +transformers/models/mluke/tokenization_mluke.py,sha256=vhnToeQkpEz4-UrFY7bNvQTtY768HqYJIymkMr1LxnE,80833 +transformers/models/mobilebert/__init__.py,sha256=Gpd8kL6D0UrD5ufVg0MjcknSeHhtlLnD3Bkrzqao4Ok,4604 +transformers/models/mobilebert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/configuration_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/convert_mobilebert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/modeling_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/modeling_tf_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert.cpython-310.pyc,, +transformers/models/mobilebert/__pycache__/tokenization_mobilebert_fast.cpython-310.pyc,, +transformers/models/mobilebert/configuration_mobilebert.py,sha256=8Qr1xG8QJdOOAU25gronQyBATzHUBWTjHqi4lQ28VMs,8319 +transformers/models/mobilebert/convert_mobilebert_original_tf_checkpoint_to_pytorch.py,sha256=MRW9sorswIo4RiWq7PVVmaZsYm4wJEc1-DhcLzssDRU,2200 +transformers/models/mobilebert/modeling_mobilebert.py,sha256=1hkXfYNB1gqnRPhN6_cyD3FC7JkYIpvXsGAFzQXQohE,70492 +transformers/models/mobilebert/modeling_tf_mobilebert.py,sha256=rSLG0FE5IsFJ_Oft8e7B34qgIY2LNs6rfO8zB-Nd97U,83824 +transformers/models/mobilebert/tokenization_mobilebert.py,sha256=yeRHMwhu-UZVVWhOS307ThYnDE_JvXXASObFN-hn1AM,20951 +transformers/models/mobilebert/tokenization_mobilebert_fast.py,sha256=Yp5FcPJNWWdqk41D6xDu44gN8OWd0jj104BdDbGaqdg,7798 +transformers/models/mobilenet_v1/__init__.py,sha256=rbZvH8u5nov7gMxVexJZTVa8yJSIwI4ZHilp8sTEw64,2735 +transformers/models/mobilenet_v1/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/configuration_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/convert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/feature_extraction_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/image_processing_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/__pycache__/modeling_mobilenet_v1.cpython-310.pyc,, +transformers/models/mobilenet_v1/configuration_mobilenet_v1.py,sha256=eDDU1UZCqr7jqsSLaZo4AEDs-Bp-k9OSFvhTUJszIaM,4976 +transformers/models/mobilenet_v1/convert_original_tf_checkpoint_to_pytorch.py,sha256=XjGgfnPQBWp-0pNakJ1CNU1YnoYfeXCZ9WSIrTf02n8,4932 +transformers/models/mobilenet_v1/feature_extraction_mobilenet_v1.py,sha256=goR0AC-IhWMrQlvzSK_0Zej42JYN-oswSGNQWnIOENU,1222 +transformers/models/mobilenet_v1/image_processing_mobilenet_v1.py,sha256=7cu5EhkSZEaw2acPGiFQ9Dthq775OjiDA1THH3O_Rec,15814 +transformers/models/mobilenet_v1/modeling_mobilenet_v1.py,sha256=5PS_5KEL5by6zSiwKiOuFvyd_OsjmRi0Aq6DKD945Dw,18673 +transformers/models/mobilenet_v2/__init__.py,sha256=p4OHu9O6JD4N2TcjOgLu7S2u151xEvGwvdHizbzevc0,2830 +transformers/models/mobilenet_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/configuration_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/convert_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/feature_extraction_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/image_processing_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/__pycache__/modeling_mobilenet_v2.cpython-310.pyc,, +transformers/models/mobilenet_v2/configuration_mobilenet_v2.py,sha256=cVT4iIhjLWpvLkdcL7PFsw4TPNnBR8gAVUAIgOYfp9o,6872 +transformers/models/mobilenet_v2/convert_original_tf_checkpoint_to_pytorch.py,sha256=acsdT3rMMqCPV9whw2xyiVK1UOs8tr8ySvYRFNRmVWM,6402 +transformers/models/mobilenet_v2/feature_extraction_mobilenet_v2.py,sha256=_IUVvyoMBsqymCoh-CVmoswZ4nOBpqFJlaoUfD8WQ3E,1222 +transformers/models/mobilenet_v2/image_processing_mobilenet_v2.py,sha256=MebPYCgZFQzhQO6-ImjmUte7VEyVdE-NoOP9-16mnds,18168 +transformers/models/mobilenet_v2/modeling_mobilenet_v2.py,sha256=VfeECqbXCid4JFWHLrCJBXESCrSyAP_ohoITFv7MSS8,34578 +transformers/models/mobilevit/__init__.py,sha256=AN8UeJz0pDko_ezgS5J4cYAZT3P6Hv2EZKlqZGnkgSI,3492 +transformers/models/mobilevit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/configuration_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/convert_mlcvnets_to_pytorch.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/feature_extraction_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/image_processing_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/modeling_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/__pycache__/modeling_tf_mobilevit.cpython-310.pyc,, +transformers/models/mobilevit/configuration_mobilevit.py,sha256=Wuobe30bTBHctDCJfEScHIH5NRZRwc4dKigCO1LfwSs,7634 +transformers/models/mobilevit/convert_mlcvnets_to_pytorch.py,sha256=Ng8zzr_CxIO9IFcf0ijXqR_EWJeAhhQ3HAkethSpCn4,12402 +transformers/models/mobilevit/feature_extraction_mobilevit.py,sha256=na2H01bKIhQsyCHayPaVase5HRGRmmO7zVDDuY76Uj0,1207 +transformers/models/mobilevit/image_processing_mobilevit.py,sha256=4R2jNDd2WCJLkHoY7Tcw2vTxNK-DHtZXo5EBcVz4_CE,21926 +transformers/models/mobilevit/modeling_mobilevit.py,sha256=TLDir_hBJQmzX0umT8gKY1oDWCjHpW79p2bwNQUDvrA,39916 +transformers/models/mobilevit/modeling_tf_mobilevit.py,sha256=G2JHw9KRbgsU-WTJZMnITp07v82UMA3brVxdaVUKI4E,54787 +transformers/models/mobilevitv2/__init__.py,sha256=kSj85QHMKZk8_MdSUYKIsFL6V8SCAJWQlzo1hlvlYw8,2111 +transformers/models/mobilevitv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/configuration_mobilevitv2.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/convert_mlcvnets_to_pytorch.cpython-310.pyc,, +transformers/models/mobilevitv2/__pycache__/modeling_mobilevitv2.cpython-310.pyc,, +transformers/models/mobilevitv2/configuration_mobilevitv2.py,sha256=JbYqOFvezBa32_Vj7XAGOkpykCr5lrEDr8yn5aIjkWs,7195 +transformers/models/mobilevitv2/convert_mlcvnets_to_pytorch.py,sha256=ZzEtog7BRgGK8W0zwC_peXQOOaBkuduPO3Tbq9_xtjo,12557 +transformers/models/mobilevitv2/modeling_mobilevitv2.py,sha256=Zh4ZyIiqnNI8gbiHlpqMk2pmjHenwTYBl2A4Ye3cp5c,38291 +transformers/models/mpnet/__init__.py,sha256=hyB4jNWDdoHWggavnqLZEF85f9a11vXSTKaLWTdPh-k,3875 +transformers/models/mpnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/configuration_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/modeling_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/modeling_tf_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet.cpython-310.pyc,, +transformers/models/mpnet/__pycache__/tokenization_mpnet_fast.cpython-310.pyc,, +transformers/models/mpnet/configuration_mpnet.py,sha256=Nte9C0DNs8c5e5MNNSzHSrDHtlTgXSC8vNxG-Av1MOI,5397 +transformers/models/mpnet/modeling_mpnet.py,sha256=l9HpxRanbLMN-d1CwcY1QuQmybrsGyTglFuczRc3z7A,42584 +transformers/models/mpnet/modeling_tf_mpnet.py,sha256=SqFduEwKKdXhBc7Xuq5vkGS5pncpXtG6-wkJd95gH30,55565 +transformers/models/mpnet/tokenization_mpnet.py,sha256=rcUWsm6ts7ICY0V3R4edO3SEtrStuERIkOz1-T8C8JI,22126 +transformers/models/mpnet/tokenization_mpnet_fast.py,sha256=DUx1wGlRfhxppFPHfvT68EFY9MBLdcyzhBebBdcT5lQ,9158 +transformers/models/mpt/__init__.py,sha256=ZH7_XPJ100kSo0osi0XxzbkyFHj6HnS9ghjxpsqVXac,1977 +transformers/models/mpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mpt/__pycache__/configuration_mpt.cpython-310.pyc,, +transformers/models/mpt/__pycache__/modeling_mpt.cpython-310.pyc,, +transformers/models/mpt/configuration_mpt.py,sha256=ya7QViklVzl49I67jUR1CsJjITpJWlf2YxERvDr0oCA,11328 +transformers/models/mpt/modeling_mpt.py,sha256=cA-7cCbE_8pM6O6mlnAmD31QUdIlJONXmOzFEIgZrc0,40784 +transformers/models/mra/__init__.py,sha256=CotdFTXkFtz90MDv55my886vc-0VBxs8h3mnGs-z7WQ,2254 +transformers/models/mra/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mra/__pycache__/configuration_mra.cpython-310.pyc,, +transformers/models/mra/__pycache__/convert_mra_pytorch_to_pytorch.cpython-310.pyc,, +transformers/models/mra/__pycache__/modeling_mra.cpython-310.pyc,, +transformers/models/mra/configuration_mra.py,sha256=OKxqjOaXJHU1_Ly2Dv0CvOUtk93HnKYim6U2v6-Qkrw,6606 +transformers/models/mra/convert_mra_pytorch_to_pytorch.py,sha256=LhaVlQ4q88gtewg-geRYZ748xQ3brLLhyDIo-OGWSdI,4247 +transformers/models/mra/modeling_mra.py,sha256=5EBeNs9HhkLV6bt28XZmmDB1DHetneADa7ipxVFZa8E,61949 +transformers/models/mt5/__init__.py,sha256=q5f0AWvlyU1eQjk0OXCpMZ4OM3qNDq35Pv6RuxrWQeI,3597 +transformers/models/mt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mt5/__pycache__/configuration_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_flax_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_mt5.cpython-310.pyc,, +transformers/models/mt5/__pycache__/modeling_tf_mt5.cpython-310.pyc,, +transformers/models/mt5/configuration_mt5.py,sha256=3G5sz5XV_HSRRV4pCDbPhbKlIdJgKxq7Yd6fcisPvXQ,7900 +transformers/models/mt5/modeling_flax_mt5.py,sha256=1p8D9st-unpG0rcRGDrUQG__3GIFa77Wst8cYgOGVng,4243 +transformers/models/mt5/modeling_mt5.py,sha256=CfKL55AKbeodV1K78F1gyJT4RdNfWmwY1DxNcj9WsbI,113084 +transformers/models/mt5/modeling_tf_mt5.py,sha256=9Stq04drvy7iyZaptOzmDAWsUzXsKoTFTNsvCjceq_E,3326 +transformers/models/musicgen/__init__.py,sha256=EY9dwTvFbwcUcdSclI-kp8xvRO24giI4UJMAmiOWIr0,2099 +transformers/models/musicgen/__pycache__/__init__.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/configuration_musicgen.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/convert_musicgen_transformers.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/modeling_musicgen.cpython-310.pyc,, +transformers/models/musicgen/__pycache__/processing_musicgen.cpython-310.pyc,, +transformers/models/musicgen/configuration_musicgen.py,sha256=HGXJukbgs6Kv_-Tic1aPV9PdCFuRB53x5pI_UdQigbI,11485 +transformers/models/musicgen/convert_musicgen_transformers.py,sha256=F-F2BnXZYxNcRjxFDs6OjL1Zy1VxKXVtbHY2dZKXuPY,9397 +transformers/models/musicgen/modeling_musicgen.py,sha256=fpRO52LgAwSIcTu7VOiDEpuWHNu3goz3uUY5xy_-7-A,144184 +transformers/models/musicgen/processing_musicgen.py,sha256=wJE7gvyKPFVyMj5O_pD1Tg1BCC3RizsRIyHo_eV4_os,5666 +transformers/models/musicgen_melody/__init__.py,sha256=juLVRBOSmHDQx5sK1_EOJwdsEVlAMeLeGsNoMWBvuN8,2822 +transformers/models/musicgen_melody/__pycache__/__init__.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/configuration_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/convert_musicgen_melody_transformers.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/feature_extraction_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/modeling_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/__pycache__/processing_musicgen_melody.cpython-310.pyc,, +transformers/models/musicgen_melody/configuration_musicgen_melody.py,sha256=Brm08IlkCxkcvX1SwAApwVNUwoA0FKTOpa3QKd2l3rE,12596 +transformers/models/musicgen_melody/convert_musicgen_melody_transformers.py,sha256=xH9oSDc7IibPUzBRVy-Ej49ahmPirUKS65zJGDv8eso,11355 +transformers/models/musicgen_melody/feature_extraction_musicgen_melody.py,sha256=XC80TogbFCW4uoyqrQmYyzji_oQMaPZu8eXKYvo5zTU,15226 +transformers/models/musicgen_melody/modeling_musicgen_melody.py,sha256=fB8MJdxl5sCF7gx5pnniPr-dJw6aiDaDeKVo1akqGJg,139123 +transformers/models/musicgen_melody/processing_musicgen_melody.py,sha256=4DbgucxyP7S7l0ndOkLnQzYgT6oaSLF1_KERckJYBEs,8633 +transformers/models/mvp/__init__.py,sha256=w3eswhHeLn9gayC1Cl8kfkkMGtD036aJeZF2541NmqM,2536 +transformers/models/mvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/mvp/__pycache__/configuration_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/modeling_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp.cpython-310.pyc,, +transformers/models/mvp/__pycache__/tokenization_mvp_fast.cpython-310.pyc,, +transformers/models/mvp/configuration_mvp.py,sha256=ojj9wCM4voL2IzXUIGCfV1IazatycYCfboTx1OiFL7g,8409 +transformers/models/mvp/modeling_mvp.py,sha256=g03ncnRytUZNOGrb1pdX3vUW0f_G6USCuqWjALlWXcQ,92806 +transformers/models/mvp/tokenization_mvp.py,sha256=JlR6l5XT5U4eU_20FRb69tmEXvexJa1d88moJ3jxj3E,16192 +transformers/models/mvp/tokenization_mvp_fast.py,sha256=Pa8ZaGtDrfhrWLnB9FPsO2OGU131E1l5HEAS25Nv6bc,12268 +transformers/models/nat/__init__.py,sha256=YY8yjsIBbTC1eZRAnR4_p_gHQ3n4JyywB2G1JQuM4AQ,1776 +transformers/models/nat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nat/__pycache__/configuration_nat.cpython-310.pyc,, +transformers/models/nat/__pycache__/modeling_nat.cpython-310.pyc,, +transformers/models/nat/configuration_nat.py,sha256=WEKNfKWwVlH-SR92Jvr4J3JJocqRukbIwWBlKkzS2EE,7068 +transformers/models/nat/modeling_nat.py,sha256=GXRWTDUeICyQKG2PS6w6GV_wC-tsBdeElbJZ0ACndIQ,39963 +transformers/models/nezha/__init__.py,sha256=ae3hJzlO_gAa20enOImKo15phpgIXk2_Zt8tVLAY3MU,2233 +transformers/models/nezha/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nezha/__pycache__/configuration_nezha.cpython-310.pyc,, +transformers/models/nezha/__pycache__/modeling_nezha.cpython-310.pyc,, +transformers/models/nezha/configuration_nezha.py,sha256=sGon9On54UmX--XHnq_XJO6nn99Q5XKP4kbXJnMD234,4911 +transformers/models/nezha/modeling_nezha.py,sha256=lAnbauROwjhz8d_b9TvYB6f0ENV-gTBdQVHxVe-ShXs,74706 +transformers/models/nllb/__init__.py,sha256=tM7_FdmE7zOQm68GoRQiRt1jbYfPea9kC24QJSSMgIE,1868 +transformers/models/nllb/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb.cpython-310.pyc,, +transformers/models/nllb/__pycache__/tokenization_nllb_fast.cpython-310.pyc,, +transformers/models/nllb/tokenization_nllb.py,sha256=eDOOWSyY_i2El1iJLeyIyw_L_6R-uTsnwqqBRvDBGGk,21177 +transformers/models/nllb/tokenization_nllb_fast.py,sha256=PrDf5r9NPXtZvV2i1DSqPr9IsEvU4cbSmSVdV4xE20A,16417 +transformers/models/nllb_moe/__init__.py,sha256=ULdz8wrqlqfamWMIQpjmmkPJPPznr34f2JxkYkqquCQ,1978 +transformers/models/nllb_moe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/configuration_nllb_moe.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/convert_nllb_moe_sharded_original_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/nllb_moe/__pycache__/modeling_nllb_moe.cpython-310.pyc,, +transformers/models/nllb_moe/configuration_nllb_moe.py,sha256=wguMH_tP2_CxQ1L6lt79MhspBlQ8qtcY4UOEcVyqv4U,11268 +transformers/models/nllb_moe/convert_nllb_moe_sharded_original_checkpoint_to_pytorch.py,sha256=c9Zab9qVzNESk0U2exJNaoDwUQo_Q7ZpcZHViZjqTQQ,6477 +transformers/models/nllb_moe/modeling_nllb_moe.py,sha256=ZbkFDhVGw9jFn48JuNcNDPTSpCJnqyNQZ6vmKx1T5XQ,85158 +transformers/models/nougat/__init__.py,sha256=2cSw40yf-T81USela2GvWs-NSXWHkOa6zJ_3BO7QSCY,1914 +transformers/models/nougat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nougat/__pycache__/convert_nougat_to_hf.cpython-310.pyc,, +transformers/models/nougat/__pycache__/image_processing_nougat.cpython-310.pyc,, +transformers/models/nougat/__pycache__/processing_nougat.cpython-310.pyc,, +transformers/models/nougat/__pycache__/tokenization_nougat_fast.cpython-310.pyc,, +transformers/models/nougat/convert_nougat_to_hf.py,sha256=S6wb6SK-46EHmBvoNSu8n-C1RgbOwzL7XBtCSmTHLrM,10941 +transformers/models/nougat/image_processing_nougat.py,sha256=AfDySnr8HCJcNiMRLP8WM1Nl7d6ey7RFbLtd6bho2ts,24253 +transformers/models/nougat/processing_nougat.py,sha256=65OZ7-XvFeiEwFjEi69ZDY931w6NvHTHGo9EixCVxKU,6731 +transformers/models/nougat/tokenization_nougat_fast.py,sha256=6RGSauQ6VpkBv0PPvpyrJ_-Qhc3K1N8WlJ-WGgPr_s0,24702 +transformers/models/nystromformer/__init__.py,sha256=80Fr1KQ5iZtS-bmWIrqfo26_Yp43SbHRv_YSloD2J4I,2337 +transformers/models/nystromformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/configuration_nystromformer.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/convert_nystromformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/nystromformer/__pycache__/modeling_nystromformer.cpython-310.pyc,, +transformers/models/nystromformer/configuration_nystromformer.py,sha256=kdFsYx-kvXoKCrgT4gpuAMaNOxsHXc1oDHWCUuplkDw,6472 +transformers/models/nystromformer/convert_nystromformer_original_pytorch_checkpoint_to_pytorch.py,sha256=8K5IGFosME-LAljFLuTc09oce1IwxZDcxw1KPHsamqc,4197 +transformers/models/nystromformer/modeling_nystromformer.py,sha256=tcVoT_hFFbc6kb496I4aOvQvoTCrA8hmU8E5czgqa9A,48742 +transformers/models/olmo/__init__.py,sha256=FEnfRTkA2TAavu2m_O1LssOT6zevDYs0035_E5i5-t8,1732 +transformers/models/olmo/__pycache__/__init__.cpython-310.pyc,, +transformers/models/olmo/__pycache__/configuration_olmo.cpython-310.pyc,, +transformers/models/olmo/__pycache__/convert_olmo_weights_to_hf.cpython-310.pyc,, +transformers/models/olmo/__pycache__/modeling_olmo.cpython-310.pyc,, +transformers/models/olmo/configuration_olmo.py,sha256=RcmB0knGCKxFQFDH5Gh2ypu45Gb7yiovCPSLw-K-EH4,9005 +transformers/models/olmo/convert_olmo_weights_to_hf.py,sha256=SI91Kn_B_m0oel2kuJ2LUMGqfaNZL4Q4sT2ydqNYZlE,9413 +transformers/models/olmo/modeling_olmo.py,sha256=i6cpv1QYrXvoU_BkCczyE3TgUn_aQN0xFJR8MI4aYZM,62608 +transformers/models/oneformer/__init__.py,sha256=mhWiuUMUOFF1ba9KLNdNJYPYScCLxlZ61WiyO995jjo,2402 +transformers/models/oneformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/configuration_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/convert_to_hf_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/image_processing_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-310.pyc,, +transformers/models/oneformer/__pycache__/processing_oneformer.cpython-310.pyc,, +transformers/models/oneformer/configuration_oneformer.py,sha256=9JvCU8bi_KcdFMbWzvrhztW06x10xmTSwLqPzyCME8w,13639 +transformers/models/oneformer/convert_to_hf_oneformer.py,sha256=yBWS0SE1sGS9UqCzX2EdbhAiIWvBCumSBwutJ8VQFF4,50691 +transformers/models/oneformer/image_processing_oneformer.py,sha256=mwnXNsryPyA2Vc5IyxhPZGKV907ygpCEWAD1TCz_8c4,61446 +transformers/models/oneformer/modeling_oneformer.py,sha256=VhdI5egJWGD24nKHzh6BZOwCdt4W1_IyZljAUVYy4D0,143634 +transformers/models/oneformer/processing_oneformer.py,sha256=ahso8fGMLGb078QfY8T5o1bDj5OaptoMbIxiTIJGM7c,9377 +transformers/models/openai/__init__.py,sha256=5Y0BYw7AWmCFdxKdBMd4-wTi9wj6-8lX7Ii1WvFlfA8,3658 +transformers/models/openai/__pycache__/__init__.cpython-310.pyc,, +transformers/models/openai/__pycache__/configuration_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/convert_openai_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/openai/__pycache__/modeling_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/modeling_tf_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/tokenization_openai.cpython-310.pyc,, +transformers/models/openai/__pycache__/tokenization_openai_fast.cpython-310.pyc,, +transformers/models/openai/configuration_openai.py,sha256=K4R5f-M6amdHH9UkD4QKj_CRS1Liv-237nLhFFzX0ho,7180 +transformers/models/openai/convert_openai_original_tf_checkpoint_to_pytorch.py,sha256=nAomaHvwIi5gFuedK1WtT61GCu5tBxLE5zj6bY-fjGo,2666 +transformers/models/openai/modeling_openai.py,sha256=pGf2wwZOugId3hdeccKHrGhQjemKSgnd18meWURAY18,38348 +transformers/models/openai/modeling_tf_openai.py,sha256=ay7nG6vZa9WUpyIHfnywScXat8KzLxpYngFq5uE8vQw,41157 +transformers/models/openai/tokenization_openai.py,sha256=atE_RCEqT2cAW4F_w3bjgol1hrkRZW23DlclRpI-PMI,15161 +transformers/models/openai/tokenization_openai_fast.py,sha256=f-Q8i5p_-QqKcwn4FAz-7Mso5rrAJ5ea3l9qCA237q0,2522 +transformers/models/opt/__init__.py,sha256=MQ8MhQamtoySbkT8WbqZ48mMUxp5Ae_UGX2Sl3HKPEc,2977 +transformers/models/opt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/opt/__pycache__/configuration_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/convert_opt_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_flax_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_opt.cpython-310.pyc,, +transformers/models/opt/__pycache__/modeling_tf_opt.cpython-310.pyc,, +transformers/models/opt/configuration_opt.py,sha256=szA8kNmDaySrS0-dKc9xnkjsj93n3oEr06jGUnflxWE,6660 +transformers/models/opt/convert_opt_original_pytorch_checkpoint_to_pytorch.py,sha256=7dHR6Tk9BBuFMEmHOxbu0jDf-gOnYFPsPLLH6SsA1gI,3858 +transformers/models/opt/modeling_flax_opt.py,sha256=MHJpXRbl4u1JcgWkV58DmS6n0wEOTYpZBeOJQFzdBT0,31541 +transformers/models/opt/modeling_opt.py,sha256=v3orKDmKUqV9UzYrRdOWepHWbepaINPo4tIluNocWx4,67821 +transformers/models/opt/modeling_tf_opt.py,sha256=SoVD0Dmrgak3O6SH2Qtlgn_2LFgfmHMM5hhAibKcVBI,49554 +transformers/models/owlv2/__init__.py,sha256=fvzKBoWfoB8-9hZKeId1Qvy3p_N9PLgsGoXzrg-fBzI,2606 +transformers/models/owlv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/configuration_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/convert_owlv2_to_hf.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/image_processing_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/modeling_owlv2.cpython-310.pyc,, +transformers/models/owlv2/__pycache__/processing_owlv2.cpython-310.pyc,, +transformers/models/owlv2/configuration_owlv2.py,sha256=6WJ3kNm8nuP3pr0zPF3vsJV-PUjRAUaG8PUL1DnaKfA,15569 +transformers/models/owlv2/convert_owlv2_to_hf.py,sha256=rF02k9XWTswf4P4ZZ76ekB3be6pRsFJLtbuWaJpyx3Y,22018 +transformers/models/owlv2/image_processing_owlv2.py,sha256=0KXB-hkcMZozinbj3XNCV5DrU0488Ljsan-FBoTX9I8,26857 +transformers/models/owlv2/modeling_owlv2.py,sha256=SICNuQQV1cFFwjg3Oe47ynrV4NOJKnruW3uQAz_EPGM,82637 +transformers/models/owlv2/processing_owlv2.py,sha256=WUAZC5nLIqVLseH1odt8F32mHZV2R2iaGe1eWq-9dMY,10046 +transformers/models/owlvit/__init__.py,sha256=zBsZnxDQ28eWv3rpN77KfHfIQPv4sIurjn-kNoykQyo,2915 +transformers/models/owlvit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/configuration_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/convert_owlvit_original_flax_to_hf.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/feature_extraction_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/image_processing_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/modeling_owlvit.cpython-310.pyc,, +transformers/models/owlvit/__pycache__/processing_owlvit.cpython-310.pyc,, +transformers/models/owlvit/configuration_owlvit.py,sha256=DiQQkU-sAB3v7wWvdaPqLONaHSFDcnRZOgecihzcurI,16760 +transformers/models/owlvit/convert_owlvit_original_flax_to_hf.py,sha256=tofzNZcVROwfYoV7pV6u50Am3TFm-XmuJEAGwNvRT9o,13988 +transformers/models/owlvit/feature_extraction_owlvit.py,sha256=yPO8FbUw3YabKbsV_ozKpIr6JixO9knVw1eMIHeiCtY,1186 +transformers/models/owlvit/image_processing_owlvit.py,sha256=vYcwjzcsheXUv-ZQARjVwuJGK6rJuAkQPy6GQPWE7uo,28604 +transformers/models/owlvit/modeling_owlvit.py,sha256=-79hyl8AnLg61C96IO-ziTZOSwif7sRdbdy_Z1nlKw8,76269 +transformers/models/owlvit/processing_owlvit.py,sha256=0nSZZV8HtYmywaCfUqMCWYadqAO3QtMi8S-Jt_y8ai0,11042 +transformers/models/patchtsmixer/__init__.py,sha256=z9KtbxxAyoNMB0DkWBDvpxmgfZMzx5B056p1nlLjhIE,2204 +transformers/models/patchtsmixer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/patchtsmixer/__pycache__/configuration_patchtsmixer.cpython-310.pyc,, +transformers/models/patchtsmixer/__pycache__/modeling_patchtsmixer.cpython-310.pyc,, +transformers/models/patchtsmixer/configuration_patchtsmixer.py,sha256=zIYc8MJd5-IyOtYtelhj4MyyRSSfpkQ-IsfkRKDdPb4,12636 +transformers/models/patchtsmixer/modeling_patchtsmixer.py,sha256=rvIs9cO-TCB3XfsiOqfnaqljnCfgbhOs9Ox9IutnyzY,87930 +transformers/models/patchtst/__init__.py,sha256=AyK9VUDx2iphFn8IMvgt49apReqE0VBTxrjDwE6fRhc,2071 +transformers/models/patchtst/__pycache__/__init__.cpython-310.pyc,, +transformers/models/patchtst/__pycache__/configuration_patchtst.cpython-310.pyc,, +transformers/models/patchtst/__pycache__/modeling_patchtst.cpython-310.pyc,, +transformers/models/patchtst/configuration_patchtst.py,sha256=f6-QffpEI96dwoJa3VaF4pe5yA4WsHe5TdUzWHnGQTA,12588 +transformers/models/patchtst/modeling_patchtst.py,sha256=Ik1QEFo2AFRqRH8WetZqaTBZZcve6Hy-PzOgwrYC5cc,91775 +transformers/models/pegasus/__init__.py,sha256=SXHYeNzkJrHfERo9lhqyvu3S75BYDmqceiFfim50Y_g,4111 +transformers/models/pegasus/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/configuration_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/convert_pegasus_tf_to_pytorch.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_flax_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/modeling_tf_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus.cpython-310.pyc,, +transformers/models/pegasus/__pycache__/tokenization_pegasus_fast.cpython-310.pyc,, +transformers/models/pegasus/configuration_pegasus.py,sha256=A_bUgZAQkLLed1QVTetk4iv5HEYZ5o83cCbKwLECN8I,7571 +transformers/models/pegasus/convert_pegasus_tf_to_pytorch.py,sha256=9geJowNAukZc9FE2OEq0pXQi6ynw9k-2NFtlmISxpUg,5359 +transformers/models/pegasus/modeling_flax_pegasus.py,sha256=NbaPRG_BeTrZQbbZCxUOWxwdgSKSrHWkjTicOP3Yhvk,65974 +transformers/models/pegasus/modeling_pegasus.py,sha256=cLGjeXqI0QHoVj5WzpaKG42A8NSgmvqwKuiw5specCU,80560 +transformers/models/pegasus/modeling_tf_pegasus.py,sha256=8dfcnMG6muIhoLDDU-p3LCmnFX5itzOzSQipqm5mIeo,74202 +transformers/models/pegasus/tokenization_pegasus.py,sha256=zRyVOMqZunsKvEp0Hq4ZIdP8fwlMASOB_bTKk_TNaPg,13125 +transformers/models/pegasus/tokenization_pegasus_fast.py,sha256=eOwadGhA-cEAkwkDVL7AXO79GqF04XFP6IzsYA2jBuI,9942 +transformers/models/pegasus_x/__init__.py,sha256=M7Ef6UH-lQ53z-17c-XQi5nmmi-uVz8UKFHQe71LDVU,1828 +transformers/models/pegasus_x/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pegasus_x/__pycache__/configuration_pegasus_x.cpython-310.pyc,, +transformers/models/pegasus_x/__pycache__/modeling_pegasus_x.cpython-310.pyc,, +transformers/models/pegasus_x/configuration_pegasus_x.py,sha256=6-gA3-ZuyoXH05JLW0ByC6bZwRlHSGiMkyrHVQBszNw,8187 +transformers/models/pegasus_x/modeling_pegasus_x.py,sha256=Gsa3XrpmqiInfdYlLTxJAJn8ulySoGwjQUdZk2yZRdQ,75728 +transformers/models/perceiver/__init__.py,sha256=y-6ZMYh3FfGpj9A1gZafPXrfGKJoGKEenKlJT9ZZEw8,3293 +transformers/models/perceiver/__pycache__/__init__.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/configuration_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/convert_perceiver_haiku_to_pytorch.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/feature_extraction_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/image_processing_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/modeling_perceiver.cpython-310.pyc,, +transformers/models/perceiver/__pycache__/tokenization_perceiver.cpython-310.pyc,, +transformers/models/perceiver/configuration_perceiver.py,sha256=iRH1_I7sIbZ7cHN9VX8RCk7Rkr8L2-CKPRcdkVFejLo,12256 +transformers/models/perceiver/convert_perceiver_haiku_to_pytorch.py,sha256=f8p0sPVQv19tMDKkIM8IfTg60-SYX9MMABAzstxFt7k,21286 +transformers/models/perceiver/feature_extraction_perceiver.py,sha256=0lW_qh5ONtUwr0ARM9RB9hizA76wL6fmeofDrhbIsXI,1207 +transformers/models/perceiver/image_processing_perceiver.py,sha256=cAMSnIE8lGaciJZNu6BxdT4YccgYPwYPTZOjP5GQOVY,17940 +transformers/models/perceiver/modeling_perceiver.py,sha256=mfr-5rrSXQd5A-_QrbUscd88mg0zKAEJtg0R3mGqfTM,146577 +transformers/models/perceiver/tokenization_perceiver.py,sha256=VOWp64riIrTTB7oqvLBq7N6_U515ZWzaaVpwSx7SncI,8020 +transformers/models/persimmon/__init__.py,sha256=gp5VkpnXik0R_PBRitY6UBMcBDMmL41N8o1LjPW_Hmo,1835 +transformers/models/persimmon/__pycache__/__init__.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/configuration_persimmon.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/convert_persimmon_weights_to_hf.cpython-310.pyc,, +transformers/models/persimmon/__pycache__/modeling_persimmon.cpython-310.pyc,, +transformers/models/persimmon/configuration_persimmon.py,sha256=2aaAXelQi6liHfBO-Y3GD7P01viYvrRMZffRzcsmfE4,7766 +transformers/models/persimmon/convert_persimmon_weights_to_hf.py,sha256=F3NFcbCWD-UxFwgp2h-Nv78_M0p0LELPq4re30ZNIjU,4644 +transformers/models/persimmon/modeling_persimmon.py,sha256=3WjBbCWVV-WpYE1ygrSm68hZzugnSaE1yLrez8s9QVk,47096 +transformers/models/phi/__init__.py,sha256=cSkf7i5ur4JTXt8gWalgbD-HFoJeFjMVTH3u5IOfICE,1971 +transformers/models/phi/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phi/__pycache__/configuration_phi.cpython-310.pyc,, +transformers/models/phi/__pycache__/convert_phi_weights_to_hf.cpython-310.pyc,, +transformers/models/phi/__pycache__/modeling_phi.cpython-310.pyc,, +transformers/models/phi/configuration_phi.py,sha256=DAf4-DIrEm9Kf9PJ1jmlFAr_qNx8PHqSJXf-TFnTXdI,9181 +transformers/models/phi/convert_phi_weights_to_hf.py,sha256=XrjgtZm6GZQx01rZ0q52g6e4ajyZhl8n02QNchAD6BQ,7685 +transformers/models/phi/modeling_phi.py,sha256=HOwh6THQTkU-uHtWyW9kSlM1LihW2e3zsjrkXoAm30s,68404 +transformers/models/phobert/__init__.py,sha256=JDAAoG6FOpN1o5kgFBbHkoko9NsiioFi-ZAeAgR79nY,955 +transformers/models/phobert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/phobert/__pycache__/tokenization_phobert.cpython-310.pyc,, +transformers/models/phobert/tokenization_phobert.py,sha256=vVIbAd64za3WGuIBg_oE1P08ZjyO_k_G6cozkOhsvnI,13093 +transformers/models/pix2struct/__init__.py,sha256=VSpzQStsFkcbIF3aftcNle95WQ7-cZzuWwDhjgzK-IU,2701 +transformers/models/pix2struct/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/configuration_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/convert_pix2struct_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/image_processing_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/modeling_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/__pycache__/processing_pix2struct.cpython-310.pyc,, +transformers/models/pix2struct/configuration_pix2struct.py,sha256=K9WDf0uYHsBnri4cOD35chHbumq4BFy5Yam9xa0Js1U,17392 +transformers/models/pix2struct/convert_pix2struct_original_pytorch_to_hf.py,sha256=m_S-9oxyN4PQafRbWQIP-G0NUDrTqxOmr8IwiHNCOuU,5886 +transformers/models/pix2struct/image_processing_pix2struct.py,sha256=snQZl3jqenJyk_wbmXK_hZJKO2Z5PyYEVFdVn1oeI6o,19727 +transformers/models/pix2struct/modeling_pix2struct.py,sha256=PRv5wLiWuc_vFOrYNwullQ0jgzY9OFaJWAb1iXzgQgc,82752 +transformers/models/pix2struct/processing_pix2struct.py,sha256=YFwg3KSy0SKXAkBucCTOwsOFSm7pFYj-M6bCViLYVqU,6960 +transformers/models/plbart/__init__.py,sha256=uNjyVJsOGh5eb2iNYSc7av9uNk-n3xB6rLv3BSRBKoY,2429 +transformers/models/plbart/__pycache__/__init__.cpython-310.pyc,, +transformers/models/plbart/__pycache__/configuration_plbart.cpython-310.pyc,, +transformers/models/plbart/__pycache__/convert_plbart_original_checkpoint_to_torch.cpython-310.pyc,, +transformers/models/plbart/__pycache__/modeling_plbart.cpython-310.pyc,, +transformers/models/plbart/__pycache__/tokenization_plbart.cpython-310.pyc,, +transformers/models/plbart/configuration_plbart.py,sha256=3QlFT2I53WTd037uiBMl8EfFvMrqjxuwoatvsFFWHmk,8601 +transformers/models/plbart/convert_plbart_original_checkpoint_to_torch.py,sha256=BOXNudNSr1xevmHnvNpa_4ya3Q89m6J4lndQhCWSLB8,3553 +transformers/models/plbart/modeling_plbart.py,sha256=3fksu2VaEcuJHTbBJbk8x_t5W53PD3szCoJOgABWrVI,84390 +transformers/models/plbart/tokenization_plbart.py,sha256=GE-X-wTX7ML8tS8_-9SjfPSUs4fkq5gRt_6hOyGqbnk,18746 +transformers/models/poolformer/__init__.py,sha256=fzMbnIpAxBApWl0QVCU965q9km5dySep9Hjhml26r68,2586 +transformers/models/poolformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/configuration_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/convert_poolformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/feature_extraction_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/image_processing_poolformer.cpython-310.pyc,, +transformers/models/poolformer/__pycache__/modeling_poolformer.cpython-310.pyc,, +transformers/models/poolformer/configuration_poolformer.py,sha256=cJ_No-RVvQ6cBkyhYFldiliiZQ_gP1Q3JqsACbN0C8U,5677 +transformers/models/poolformer/convert_poolformer_original_to_pytorch.py,sha256=Vvlp7ju7kr2sg1NdXKma6vYGABjs4sVhPKhgFKPJRpk,7947 +transformers/models/poolformer/feature_extraction_poolformer.py,sha256=KDL4tg7hxwzQKYmGc6jMZfzeD9UCTb00oNfbejIjzmk,1214 +transformers/models/poolformer/image_processing_poolformer.py,sha256=fObDfm06UHCQ3fl6JeLmKM-UREdAiV5RPlwO4aYCaCQ,18325 +transformers/models/poolformer/modeling_poolformer.py,sha256=En2zhLB5rHAuki31Ov0tRay52-tXyNr1svHM3BtwyQc,17840 +transformers/models/pop2piano/__init__.py,sha256=wxMmbwwAuqcGF8MimtfwAf4JPJ5D8x8up-q4yRlwU5E,3819 +transformers/models/pop2piano/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/configuration_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/convert_pop2piano_weights_to_hf.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/feature_extraction_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/modeling_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/processing_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/__pycache__/tokenization_pop2piano.cpython-310.pyc,, +transformers/models/pop2piano/configuration_pop2piano.py,sha256=QCVZ_BJ7XvAV0bD9-G4tV_NXJPwTgqMeAgL-ofESzTw,6030 +transformers/models/pop2piano/convert_pop2piano_weights_to_hf.py,sha256=eZuC9RFueLoOmsaGWMa-6hNQyLBLTg9WXlRQRuiQerA,8626 +transformers/models/pop2piano/feature_extraction_pop2piano.py,sha256=SBNQB6aol_Uan2p_z33IQue9y4exatqd80XyzHGBoqY,19839 +transformers/models/pop2piano/modeling_pop2piano.py,sha256=QZ1lv2nJbsjlbIHD7oRh8MpvgdpWP-ULF04RckDN2E4,65620 +transformers/models/pop2piano/processing_pop2piano.py,sha256=ytBqku-v0wCqeK4_JVd-0SNCI7jmYltMb5wDzagn6V4,5525 +transformers/models/pop2piano/tokenization_pop2piano.py,sha256=Y3grUs2_4YvgUDxDAhe4hBBJe0RyAZq_ofx11jw1M5A,32677 +transformers/models/prophetnet/__init__.py,sha256=1w4cY9QLl0elN9_oFDScwrb0F12-54b5ylPrxCiqpFw,2157 +transformers/models/prophetnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/configuration_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/convert_prophetnet_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/modeling_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/__pycache__/tokenization_prophetnet.cpython-310.pyc,, +transformers/models/prophetnet/configuration_prophetnet.py,sha256=IGfCeoZGihbqnOTXCFnA_jebhsWVsfOrIBUZ7To_LBg,8973 +transformers/models/prophetnet/convert_prophetnet_original_pytorch_checkpoint_to_pytorch.py,sha256=EzgNdUzWNQowTUpyfXO-_RBZEw0sa5sVA1b7jbqFUxU,7055 +transformers/models/prophetnet/modeling_prophetnet.py,sha256=L6Fh66H5gaaQAmoUyhMng4UwbF6OsSVZ8Nsu3WB4BeQ,115458 +transformers/models/prophetnet/tokenization_prophetnet.py,sha256=5vc6UgMSkJlybAN5nDfDCeqxkItxl-1RUFsWLfX0LPg,20874 +transformers/models/pvt/__init__.py,sha256=FxRer-Bn0NI00eTjXYOlUzVNJMH50lB78JEWPk1BNuw,2384 +transformers/models/pvt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pvt/__pycache__/configuration_pvt.cpython-310.pyc,, +transformers/models/pvt/__pycache__/convert_pvt_to_pytorch.cpython-310.pyc,, +transformers/models/pvt/__pycache__/image_processing_pvt.cpython-310.pyc,, +transformers/models/pvt/__pycache__/modeling_pvt.cpython-310.pyc,, +transformers/models/pvt/configuration_pvt.py,sha256=gf4nBOZrbywy8Wxsy85BwEVPObgPo9oHkXHDc4HXlz8,7015 +transformers/models/pvt/convert_pvt_to_pytorch.py,sha256=1DIHp33moj_2LrWws9x02AZ9qRrVMCQ3jifvV3SxmFc,9738 +transformers/models/pvt/image_processing_pvt.py,sha256=dRcMJCdWkBPZek4hG6gbJ2zyDGRBWbpEGm4caGJZAIc,14267 +transformers/models/pvt/modeling_pvt.py,sha256=uZAEEDvOJpHIx-VxChXuiU60c_DSE7Wgv4THk_1SYE4,28350 +transformers/models/pvt_v2/__init__.py,sha256=juUzRcgqzQAI5MHUbyhwB3lIeQeTk05FR9n3YQFWAQo,1832 +transformers/models/pvt_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/configuration_pvt_v2.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/convert_pvt_v2_to_pytorch.cpython-310.pyc,, +transformers/models/pvt_v2/__pycache__/modeling_pvt_v2.cpython-310.pyc,, +transformers/models/pvt_v2/configuration_pvt_v2.py,sha256=-vu-7Yg-hVJ6S5FSyqKTqX8M7w0cDO_S8k2UEKYQy6c,7963 +transformers/models/pvt_v2/convert_pvt_v2_to_pytorch.py,sha256=OqYTYB1bssEh4C-AwCFG0VDDcEWZa1Su5kUkrn_UcOo,12077 +transformers/models/pvt_v2/modeling_pvt_v2.py,sha256=iQL_48n_xGDak3-7A2TDLixYEa8t-HCdzLkuBhVnlrw,29417 +transformers/models/qdqbert/__init__.py,sha256=x3xI7kd5kpsjAvYJT8SrR5_uCeInhVA8repNZFRtXhU,2402 +transformers/models/qdqbert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qdqbert/__pycache__/configuration_qdqbert.cpython-310.pyc,, +transformers/models/qdqbert/__pycache__/modeling_qdqbert.cpython-310.pyc,, +transformers/models/qdqbert/configuration_qdqbert.py,sha256=ZEEbypDoWw0fwcOV-5H24BmaP0fzcBxra3l-HI7yC0Y,5787 +transformers/models/qdqbert/modeling_qdqbert.py,sha256=FNtfupgOXaUvmlH9OiTzImEVx86ZV78U3-JGIAMpH80,77285 +transformers/models/qwen2/__init__.py,sha256=9gokBZ-g_YdJeUBfioDa7ZRVQdTgZ_nNQA03axWYwEw,2354 +transformers/models/qwen2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/configuration_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/modeling_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2.cpython-310.pyc,, +transformers/models/qwen2/__pycache__/tokenization_qwen2_fast.cpython-310.pyc,, +transformers/models/qwen2/configuration_qwen2.py,sha256=caoAxItLWG4oMDOv4Rzp34w-m7xRYT4RzQYXsQPFWCs,6729 +transformers/models/qwen2/modeling_qwen2.py,sha256=8cehF87SDuYy5MF9CZgoYzBNbjC7RPDM3r1LJ93iWa0,63883 +transformers/models/qwen2/tokenization_qwen2.py,sha256=y9hRJ6oYYRa_4UyoQUPU_BlsrnTPKoEByiCQ3zelSmE,13913 +transformers/models/qwen2/tokenization_qwen2_fast.py,sha256=dwuIyJcKjM58NS8TLAUNhiYpjdMxdTgj0iXJCQ2j7rE,5159 +transformers/models/qwen2_moe/__init__.py,sha256=zrwptL-PWmTeTM7Z8JcUxeUiY1dViTHY0LGvP4BhNpg,1829 +transformers/models/qwen2_moe/__pycache__/__init__.cpython-310.pyc,, +transformers/models/qwen2_moe/__pycache__/configuration_qwen2_moe.cpython-310.pyc,, +transformers/models/qwen2_moe/__pycache__/modeling_qwen2_moe.cpython-310.pyc,, +transformers/models/qwen2_moe/configuration_qwen2_moe.py,sha256=-DQ4llhbz9NXF77T5IfSZCMoO3KQexubDMTqITb1nos,8538 +transformers/models/qwen2_moe/modeling_qwen2_moe.py,sha256=tOra9gJt6OoN0X-GtMg-UCRmP8umduJ29kPFVHyF220,73498 +transformers/models/rag/__init__.py,sha256=omMwtpcTWBHYKZvt8NIxbACHhICmYWfeTgiC7O4U88g,2426 +transformers/models/rag/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rag/__pycache__/configuration_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/modeling_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/modeling_tf_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/retrieval_rag.cpython-310.pyc,, +transformers/models/rag/__pycache__/tokenization_rag.cpython-310.pyc,, +transformers/models/rag/configuration_rag.py,sha256=9B2B7I_Ep2pduixD8ZTJfBz1ZLPYhE3cioN8xDmrWZk,8339 +transformers/models/rag/modeling_rag.py,sha256=3Z76u5RQI5rfrEs3mECzu43gN9msavXjt35CUaFDNRo,85799 +transformers/models/rag/modeling_tf_rag.py,sha256=kEbSfcPwE94BqHh_h94XjoPd5OJcN5aQ8vNu23-rkUU,88806 +transformers/models/rag/retrieval_rag.py,sha256=DVxhTiqqcQzFtDruk_mx8oprFI7D5l9HGjuM17xvzPg,29923 +transformers/models/rag/tokenization_rag.py,sha256=O5gPSIP0dOyYEe5k4VjcCttsbAoAAZ6338z0IsWF690,4576 +transformers/models/realm/__init__.py,sha256=k3gccDAsk5YJYrjrd8hOZCc1q8KJR2EMoGhvEdF-OTU,2675 +transformers/models/realm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/realm/__pycache__/configuration_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/modeling_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/retrieval_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/tokenization_realm.cpython-310.pyc,, +transformers/models/realm/__pycache__/tokenization_realm_fast.cpython-310.pyc,, +transformers/models/realm/configuration_realm.py,sha256=k74z55xCo5QoRHXicrYAt2aPYxmYNvU_0JvyHa_o_cc,7653 +transformers/models/realm/modeling_realm.py,sha256=_HWjXt0j-I5WfgypfkqZhXeUAIrKVNtZ7w0vFQL-UuM,84062 +transformers/models/realm/retrieval_realm.py,sha256=86jQyu1U8QePlahXS8rGD_E6TlvEqQeqg21qSsAno-M,6370 +transformers/models/realm/tokenization_realm.py,sha256=XhYz17ej45dQtk35h1peHxcyVaS-hrE6Gu_FCJA4pJY,23127 +transformers/models/realm/tokenization_realm_fast.py,sha256=PuyK8CEVibBXCVGxrwKnz3siAvcr0JbUGD6DCLM1Yi0,10950 +transformers/models/recurrent_gemma/__init__.py,sha256=gUE-KRPGeD_b-aklMGn9oDbnSdZ8twDOQXUxL2zWkIo,1708 +transformers/models/recurrent_gemma/__pycache__/__init__.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/configuration_recurrent_gemma.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/convert_recurrent_gemma_to_hf.cpython-310.pyc,, +transformers/models/recurrent_gemma/__pycache__/modeling_recurrent_gemma.cpython-310.pyc,, +transformers/models/recurrent_gemma/configuration_recurrent_gemma.py,sha256=R70TEAu3_3PwRdVJD7kOrQ5DbmuMQ6Z36jF0yWCyqVU,7714 +transformers/models/recurrent_gemma/convert_recurrent_gemma_to_hf.py,sha256=jZGkZ2FmNFWsZXz37gf86NjLRFbgLTK6C-ZO6-JChks,7965 +transformers/models/recurrent_gemma/modeling_recurrent_gemma.py,sha256=FO_judafGpCu8s-ChBW25WIrMKrR9ALbzCYkO3idH8s,43285 +transformers/models/reformer/__init__.py,sha256=MKhG4aefK429UY32oYQbVTLm1T2L_SIYS_TNnrWnTwA,3139 +transformers/models/reformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/reformer/__pycache__/configuration_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/convert_reformer_trax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/reformer/__pycache__/modeling_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer.cpython-310.pyc,, +transformers/models/reformer/__pycache__/tokenization_reformer_fast.cpython-310.pyc,, +transformers/models/reformer/configuration_reformer.py,sha256=KnktX4WZXYzBWzuKUFEk0DD8sMEqTMuv476IDrRW5P4,13266 +transformers/models/reformer/convert_reformer_trax_checkpoint_to_pytorch.py,sha256=axn3FvdtVSdQT5V5u1-sfJ3sMV3YpEU6r5B10bTYZ8o,7818 +transformers/models/reformer/modeling_reformer.py,sha256=-6XZjj2memPGxK8ENCrEjSrKIUA-wKFpd7LiQYTdh74,115220 +transformers/models/reformer/tokenization_reformer.py,sha256=999VPmUyPWWb1wYfzo6GJ_-qKRw1ooSwZubUZ_gJMKA,6728 +transformers/models/reformer/tokenization_reformer_fast.py,sha256=oJutU_vcsfO8wONlB9zPIiVwOlmcsPm8hMKR80elYMc,4247 +transformers/models/regnet/__init__.py,sha256=KQR1LgyjMxE0d-7nACPCHiRXo0rSm93vfcy8puDXbuE,3168 +transformers/models/regnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/regnet/__pycache__/configuration_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/convert_regnet_seer_10b_to_pytorch.cpython-310.pyc,, +transformers/models/regnet/__pycache__/convert_regnet_to_pytorch.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_flax_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_regnet.cpython-310.pyc,, +transformers/models/regnet/__pycache__/modeling_tf_regnet.cpython-310.pyc,, +transformers/models/regnet/configuration_regnet.py,sha256=rksbsyBauYuJK0Kk0sB1qpFqn2igmdy_G0SCyGw57Go,4044 +transformers/models/regnet/convert_regnet_seer_10b_to_pytorch.py,sha256=zDPbUZRiO0lJl7hdUztm9JnUAbOI1Wv5wyHZdCKQ-d0,11770 +transformers/models/regnet/convert_regnet_to_pytorch.py,sha256=lvSaB1ny0EKvS4KfhTpbNjdrYI6xE1zmYctM_O_a_Ak,18719 +transformers/models/regnet/modeling_flax_regnet.py,sha256=2Ao7eODWcHufpZoNbGC4FbX6tZVE2bfWWrZSMbPGcMg,28410 +transformers/models/regnet/modeling_regnet.py,sha256=PBAvLx6VQ5OMFrjdXW5gu1fplONoUdItEe0SI_gt11E,17282 +transformers/models/regnet/modeling_tf_regnet.py,sha256=5Rrqu4B-mKgdRVzROCZ0iVycKJ9s6-yRAEbJ8aIrxao,24402 +transformers/models/rembert/__init__.py,sha256=XC3xr6aUNReL6SzFXr6TyAWPg9EXiBFl4o225gmkNQQ,4514 +transformers/models/rembert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rembert/__pycache__/configuration_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/convert_rembert_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/rembert/__pycache__/modeling_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/modeling_tf_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert.cpython-310.pyc,, +transformers/models/rembert/__pycache__/tokenization_rembert_fast.cpython-310.pyc,, +transformers/models/rembert/configuration_rembert.py,sha256=qGyEjUnMzOvTdhB69fUbbz4cOEgb_hmFsPV4-K7_Bcs,7339 +transformers/models/rembert/convert_rembert_tf_checkpoint_to_pytorch.py,sha256=C-TS1MrtQHTxK3j5HUKwlcrQItW24T7_iPvtt8KGbAU,2208 +transformers/models/rembert/modeling_rembert.py,sha256=W0I5fogXdxKg0HjW8gxx8GqAUdXXq2DjTI53IYwBnb8,68242 +transformers/models/rembert/modeling_tf_rembert.py,sha256=yyNfB5wAlnoSOaTCw3mMlWiHiE4Wu1m19U4WOJDlauU,77785 +transformers/models/rembert/tokenization_rembert.py,sha256=36jnFfhXiaSSyGHHGQng6gcHPTBmApNRIFZ0acudQLk,10593 +transformers/models/rembert/tokenization_rembert_fast.py,sha256=QZxF21vv0eR_-SXTateBSIzUpE_ksyX6yNdvpUR_4Lk,9997 +transformers/models/resnet/__init__.py,sha256=n63hjzrOOmaIXaDS0F9thB531jarpWDBkXmgFaMBRbo,3216 +transformers/models/resnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/resnet/__pycache__/configuration_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/convert_resnet_to_pytorch.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_flax_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_resnet.cpython-310.pyc,, +transformers/models/resnet/__pycache__/modeling_tf_resnet.cpython-310.pyc,, +transformers/models/resnet/configuration_resnet.py,sha256=iYo3k5z_nHTI0auXFXHux1Z2KukX5UKFXAV1zdF4z8o,6117 +transformers/models/resnet/convert_resnet_to_pytorch.py,sha256=ShZl8Ob5ElrgRujQCoGXWdIY_99UICrWqiHdSzFdOHc,7287 +transformers/models/resnet/modeling_flax_resnet.py,sha256=uJMz2FgVXm6ffwjiorCHkuPbCRra8VdN1vYILRuIgxY,24607 +transformers/models/resnet/modeling_resnet.py,sha256=h66AeS7VpdGODycEN_aOpdF-j4xUGPUiinyE9Y2_CtA,19362 +transformers/models/resnet/modeling_tf_resnet.py,sha256=jWdPo8kjVlReTCpPExtovB_uDvmFgZhkXCn41fIcl9I,23752 +transformers/models/roberta/__init__.py,sha256=GvGX0z6XPZtwkfCh4K2xagGOK0tlW0DT91QVQhTcA4o,5091 +transformers/models/roberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roberta/__pycache__/configuration_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/convert_roberta_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_flax_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/modeling_tf_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta.cpython-310.pyc,, +transformers/models/roberta/__pycache__/tokenization_roberta_fast.cpython-310.pyc,, +transformers/models/roberta/configuration_roberta.py,sha256=udsSFUQ1_ZIXg_-F0EZL33gsSziMuugAnO32_dDJE9Q,7359 +transformers/models/roberta/convert_roberta_original_pytorch_checkpoint_to_pytorch.py,sha256=MmHtq9AhcXXd-V8Fz0XWC8n-PL-S1MSdFhTCVM6Cksk,8002 +transformers/models/roberta/modeling_flax_roberta.py,sha256=Bz5VgKKwWnVVmRFyHD11Ug7IlvgwOLIMbGI0lBkMHt8,56976 +transformers/models/roberta/modeling_roberta.py,sha256=jtqJFONRl66bRXy_xY2O-oQ5BWECpsk6DVQxLPl00So,71188 +transformers/models/roberta/modeling_tf_roberta.py,sha256=QLF5cfwQbjcHvdzcnx87a8s5Vz7qOJXO3GTPXGtPsHg,79979 +transformers/models/roberta/tokenization_roberta.py,sha256=4Ft2MWhG4ESEO2yJ0a_8jaUU98IxIH2oLaorhotpC1w,16451 +transformers/models/roberta/tokenization_roberta_fast.py,sha256=I_QlLewfaCL3Apb-tpGleYJ4hUErNMY6uTSajTDBa18,11422 +transformers/models/roberta_prelayernorm/__init__.py,sha256=C9bA_ah_10OCt_LUT1bsOJTUjSt6boV2frOKBtHCes4,5391 +transformers/models/roberta_prelayernorm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/configuration_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/convert_roberta_prelayernorm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_flax_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/__pycache__/modeling_tf_roberta_prelayernorm.cpython-310.pyc,, +transformers/models/roberta_prelayernorm/configuration_roberta_prelayernorm.py,sha256=ciwjO--yU5b9Iy8NgIFvpOTyYfw7Ph0urUs0Mi6_yOI,7920 +transformers/models/roberta_prelayernorm/convert_roberta_prelayernorm_original_pytorch_checkpoint_to_pytorch.py,sha256=ti9rttSVMs3SemlZrVQFkDKKHBubrk29d4lQkpkI3Ro,2975 +transformers/models/roberta_prelayernorm/modeling_flax_roberta_prelayernorm.py,sha256=zMZKU2wl45qTh4ex3R9bf1PUVF12uC5vaVxIXQNqLNk,60537 +transformers/models/roberta_prelayernorm/modeling_roberta_prelayernorm.py,sha256=15wxCekW0VI80tCofOE_v68JhsqDd_g6Kh5NBk9XADI,73789 +transformers/models/roberta_prelayernorm/modeling_tf_roberta_prelayernorm.py,sha256=yUztEknHx5y5SCYeGD4AqrZg24VpCMh6C96Oq3xCISs,83157 +transformers/models/roc_bert/__init__.py,sha256=ItDlyJx76hWJLT_159wnQgdWC82bT-TG_FpFzjRqXaU,2875 +transformers/models/roc_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/configuration_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/modeling_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/__pycache__/tokenization_roc_bert.cpython-310.pyc,, +transformers/models/roc_bert/configuration_roc_bert.py,sha256=6M-ypuBQmDyE9Xb0wUipQ5GbE5aCh_fcAzaMQN_h-vU,8599 +transformers/models/roc_bert/modeling_roc_bert.py,sha256=rFs2KtEfNEDeyLBkwYJFRQznmz1jU-R39NxJSoFMc40,92994 +transformers/models/roc_bert/tokenization_roc_bert.py,sha256=PTfS5M-zkr_j65IkmdDcTdrcFiyP2J2VSIpCNoFB2vM,50197 +transformers/models/roformer/__init__.py,sha256=1EFy2Zdn9AdraO-fmIpYg1q_HLYq-7rT5qDL_8Gurnc,5333 +transformers/models/roformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/roformer/__pycache__/configuration_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/convert_roformer_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_flax_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/modeling_tf_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_roformer_fast.cpython-310.pyc,, +transformers/models/roformer/__pycache__/tokenization_utils.cpython-310.pyc,, +transformers/models/roformer/configuration_roformer.py,sha256=qjJAt1_uh5PrBZrVXouHR2zJWQuAWTgeUtxB7xL_UeI,6904 +transformers/models/roformer/convert_roformer_original_tf_checkpoint_to_pytorch.py,sha256=G57qbbWpRH06sm041u6D3BdNE7mCPSDvlaNLOZjWdvY,2240 +transformers/models/roformer/modeling_flax_roformer.py,sha256=ammOyHhVp0FbXflVtY-RpMH8-nX3lYhb0A_lG6JJg0A,39100 +transformers/models/roformer/modeling_roformer.py,sha256=DugUhmdn5vqueKfTxE6McTCens2KznBT8NRLzrrcpjo,69220 +transformers/models/roformer/modeling_tf_roformer.py,sha256=U1ZtEFFTT3vfIuuIM4MTZg4DPs-lJdbNV7nXlAk3kZ8,66018 +transformers/models/roformer/tokenization_roformer.py,sha256=k0JWRQlo_yDxGQ7AvvHVVbMA--Q8fCdGJW-5F38TsC0,21993 +transformers/models/roformer/tokenization_roformer_fast.py,sha256=b11QDgBAetkJGh9VDCyxItOEHcKbXzplEKshs1WRMEQ,6678 +transformers/models/roformer/tokenization_utils.py,sha256=0ciH13qW2kCa5my1rPwfwAuSXX-jGzN0nzemvGvOBxw,2652 +transformers/models/rwkv/__init__.py,sha256=2uUo3Zi2By-3QKG7YkrEqllvFG4_SqJZ-NeplOxHCD4,1780 +transformers/models/rwkv/__pycache__/__init__.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/configuration_rwkv.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/convert_rwkv_checkpoint_to_hf.cpython-310.pyc,, +transformers/models/rwkv/__pycache__/modeling_rwkv.cpython-310.pyc,, +transformers/models/rwkv/configuration_rwkv.py,sha256=Qrlj9KTeC6X8ii7uCuQ5_X1EzBn5kPFzuVud08XsbF8,5273 +transformers/models/rwkv/convert_rwkv_checkpoint_to_hf.py,sha256=oXXZN2tt_yWCRAkqpE6-7kDPMy4PyKaYmpMZwsH-IUE,6994 +transformers/models/rwkv/modeling_rwkv.py,sha256=dXDCWFosWKhINGhVgfs0P6BYaVB-X-oArEXI7txHYS4,37800 +transformers/models/sam/__init__.py,sha256=1wiFtdU-_NON6yx4QfFBk4vrfwN4hHv7JEA3CSGq_wU,2980 +transformers/models/sam/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sam/__pycache__/configuration_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/convert_sam_to_hf.cpython-310.pyc,, +transformers/models/sam/__pycache__/image_processing_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/modeling_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/modeling_tf_sam.cpython-310.pyc,, +transformers/models/sam/__pycache__/processing_sam.cpython-310.pyc,, +transformers/models/sam/configuration_sam.py,sha256=uUgPZxv3jV5YgW6ZNAGxCk2LomLnK6JqqPD1ZFX0PFk,13858 +transformers/models/sam/convert_sam_to_hf.py,sha256=bt3PXRVYpRlgu6Q7j5MoPredmVyY6t6xuOcfQlVCuSs,8542 +transformers/models/sam/image_processing_sam.py,sha256=8NImaUzsQDShcLgZG-NESbJY7_vgZ_3RooJPToRd37k,67313 +transformers/models/sam/modeling_sam.py,sha256=D0Ha4zH0g1O1nad5_WDsz4onPwA4onQKDIq0u07Obik,64760 +transformers/models/sam/modeling_tf_sam.py,sha256=8H4KOdytPwyeEnFIXiWNumo7Cb2EfYcmzH1NsFsCEgc,75549 +transformers/models/sam/processing_sam.py,sha256=qPln4ga6UimrOQ-nf7_ATDvn5L7q3xMEG7YQaXmHWjc,10930 +transformers/models/seamless_m4t/__init__.py,sha256=PRZMtfk0WN3i0ZSvQbv8wgqp4dOREyIvkgzx5obqn7I,3706 +transformers/models/seamless_m4t/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/configuration_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/convert_fairseq2_to_hf.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/feature_extraction_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/modeling_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/processing_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t.cpython-310.pyc,, +transformers/models/seamless_m4t/__pycache__/tokenization_seamless_m4t_fast.cpython-310.pyc,, +transformers/models/seamless_m4t/configuration_seamless_m4t.py,sha256=0MjMfjtCGwB8bbWtLDhE1r_8zmtR7PIoh4zmlnWitBs,23568 +transformers/models/seamless_m4t/convert_fairseq2_to_hf.py,sha256=F2AQrS9rfpktVBSXvFLmND9gMtASSEOMlYPQ6v8VDdU,15960 +transformers/models/seamless_m4t/feature_extraction_seamless_m4t.py,sha256=pSStJq6iPGHLWGDiIWN-ZuGBmYSbTkT2ISrFK7Bj7W8,13561 +transformers/models/seamless_m4t/modeling_seamless_m4t.py,sha256=SQjQgeGDC-_fe23_jJprmhlilqmaqDcHo79ql5Nbpaw,201413 +transformers/models/seamless_m4t/processing_seamless_m4t.py,sha256=OrPvDJkAAIuoWglyxt1Z4H993tm-AyX3OxDcu4Gmps0,5893 +transformers/models/seamless_m4t/tokenization_seamless_m4t.py,sha256=4xSGOSx3XzpeY0nCAt0MxB0abOH8MQWNhIjp4SVcN1Y,25999 +transformers/models/seamless_m4t/tokenization_seamless_m4t_fast.py,sha256=9x1jPIO7Bb_WmFeYsZuTBT_hGucbsJshD_tJ5RBHE68,19884 +transformers/models/seamless_m4t_v2/__init__.py,sha256=eIGJqmaWPYi--eaUhctnu8W9EIihWP-uJsOORWLKVxg,2159 +transformers/models/seamless_m4t_v2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/configuration_seamless_m4t_v2.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/convert_fairseq2_to_hf.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/__pycache__/modeling_seamless_m4t_v2.cpython-310.pyc,, +transformers/models/seamless_m4t_v2/configuration_seamless_m4t_v2.py,sha256=JTbmVGjOBvUXbdYKiTMKRb2vhg6C4fn8DbgD4oXUvGY,24428 +transformers/models/seamless_m4t_v2/convert_fairseq2_to_hf.py,sha256=B3ChRBL4biKHRNsLhAKRsZ547XyxI1uwiywDUC6jKXo,15084 +transformers/models/seamless_m4t_v2/modeling_seamless_m4t_v2.py,sha256=xjvyUcchcgP-3UeLeCOfBZZzRJXZbYjbmrzZAyjF7zM,228147 +transformers/models/segformer/__init__.py,sha256=T1k_hhB2iCL8zOY3rcG9erX0JbBS--OgU27-G0ZxR2o,3676 +transformers/models/segformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/segformer/__pycache__/configuration_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/convert_segformer_original_to_pytorch.cpython-310.pyc,, +transformers/models/segformer/__pycache__/feature_extraction_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/image_processing_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/modeling_segformer.cpython-310.pyc,, +transformers/models/segformer/__pycache__/modeling_tf_segformer.cpython-310.pyc,, +transformers/models/segformer/configuration_segformer.py,sha256=FaD33v_h9BpMpYLpBhN_w58U7SYcCqCf0ygaLJeYj04,7467 +transformers/models/segformer/convert_segformer_original_to_pytorch.py,sha256=UXWvoxIi_vor0L5yPuqD7wUuy-vzSNtypQcrpLkTZFc,17092 +transformers/models/segformer/feature_extraction_segformer.py,sha256=yaRckmbmTyh1Oow3PnHLsjW4MURaWqddhTzG-PVcywk,1207 +transformers/models/segformer/image_processing_segformer.py,sha256=KO7UmIFZ-4MchZSg6PE3bp1ERgvez5EF_52CnxQZ-Co,23364 +transformers/models/segformer/modeling_segformer.py,sha256=X93UhhYjUNpFtYil3yceHHHbAL9me8Jo565jgzU7pwc,35414 +transformers/models/segformer/modeling_tf_segformer.py,sha256=S2Sv3dyD2s7SorgzNI8AWSNzD5cpulGrTNPzHT-cEB8,43722 +transformers/models/seggpt/__init__.py,sha256=wJaoAc_RPANBcGRc6ErzsvLzxW1zKGRi6YWCxHq77y0,2284 +transformers/models/seggpt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/configuration_seggpt.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/convert_seggpt_to_hf.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/image_processing_seggpt.cpython-310.pyc,, +transformers/models/seggpt/__pycache__/modeling_seggpt.cpython-310.pyc,, +transformers/models/seggpt/configuration_seggpt.py,sha256=HN1eDgAOfj7pWQYuNdtaRw1TaWh8X41MER3ZcbDIL7A,6563 +transformers/models/seggpt/convert_seggpt_to_hf.py,sha256=IsB0yzLF9kH5Lz4oBFLpMOeDLdC-SKOYDtFZhcpL6iA,9779 +transformers/models/seggpt/image_processing_seggpt.py,sha256=wdcV4Fl_lhPZCop2Rw5R_xoVpWN5Zv_2LQO0XY10zKc,31163 +transformers/models/seggpt/modeling_seggpt.py,sha256=ytf4BmWo_WrOmWYMT74tX06jvw4CAFYhfvszAy_PlSI,45300 +transformers/models/sew/__init__.py,sha256=VG7sYJFBweKB5Cb9lzyRYdjeG0olDM7cIQIUy4XQR8M,1778 +transformers/models/sew/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sew/__pycache__/configuration_sew.cpython-310.pyc,, +transformers/models/sew/__pycache__/convert_sew_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/sew/__pycache__/modeling_sew.cpython-310.pyc,, +transformers/models/sew/configuration_sew.py,sha256=Uuuteey5yStUZYZnJVO7l7-w0SdZ3jBoxv33BPPr26M,14277 +transformers/models/sew/convert_sew_original_pytorch_checkpoint_to_pytorch.py,sha256=TzlAoTl1DQUm3bhNxDlpXoxe-u1ZcMMbhrQsefGbFog,12745 +transformers/models/sew/modeling_sew.py,sha256=Rp11q4KUUzGo_rkwo9KTLBjzyqYEtxToEmFuLxNIRpk,53344 +transformers/models/sew_d/__init__.py,sha256=5d5VSrW-sTwr3H0e2js1KsRL7SM4GPiRPY9Hl_gVjWk,1804 +transformers/models/sew_d/__pycache__/__init__.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/configuration_sew_d.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/convert_sew_d_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/sew_d/__pycache__/modeling_sew_d.cpython-310.pyc,, +transformers/models/sew_d/configuration_sew_d.py,sha256=AFcSDc3pw6Jwwwg-4WI5SXxoHSy_W7B9H993EiVjOgQ,16447 +transformers/models/sew_d/convert_sew_d_original_pytorch_checkpoint_to_pytorch.py,sha256=OeszH3N5vz1FbXoF-d-w6wDJ2A2MxvUMn9uDMpU7bro,13575 +transformers/models/sew_d/modeling_sew_d.py,sha256=IXvsgRJkhN8QFUx5r_LjJmc8j-TEIj1z7crS5S3BjNg,73708 +transformers/models/siglip/__init__.py,sha256=vuoROawTSIHtXkVVxhysxf-Cx7s3QCEMfvkUsJCxO7M,3124 +transformers/models/siglip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/siglip/__pycache__/configuration_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/convert_siglip_to_hf.cpython-310.pyc,, +transformers/models/siglip/__pycache__/image_processing_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/modeling_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/processing_siglip.cpython-310.pyc,, +transformers/models/siglip/__pycache__/tokenization_siglip.cpython-310.pyc,, +transformers/models/siglip/configuration_siglip.py,sha256=rrbWUFosgjjTsvOWjeMNueqIh5f3oEgg87zwxnwSYIw,13625 +transformers/models/siglip/convert_siglip_to_hf.py,sha256=Rg5BhRWVeIKxc9Dz0ZUCjhG3hasNtrORlIcOYoV7xS0,20830 +transformers/models/siglip/image_processing_siglip.py,sha256=wwUHCSEJENYaBPKyeSO3uEPixk4RQCVMFUhzrC2Q5BM,11775 +transformers/models/siglip/modeling_siglip.py,sha256=m_GDDo0NOE_O6_TgOnkoGJs0iQZ0au2McD54_ezKTUs,56299 +transformers/models/siglip/processing_siglip.py,sha256=x5A9CKyzNzOF0udXvMVQ4hMFBCbAdH-WnLAXqop75zk,7302 +transformers/models/siglip/tokenization_siglip.py,sha256=wflQIXvIGvoXsrTKeGGuZUo7kesIVnxFS5VBZvuiRfI,15953 +transformers/models/speech_encoder_decoder/__init__.py,sha256=987NzBteEbQy0IYY43B_JKolw2BbyX6Ox9s__xH0daQ,2037 +transformers/models/speech_encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/configuration_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/convert_mbart_wav2vec2_seq2seq_original_to_pytorch.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/convert_speech_to_text_wav2vec2_seq2seq_original_to_pytorch.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_flax_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/__pycache__/modeling_speech_encoder_decoder.cpython-310.pyc,, +transformers/models/speech_encoder_decoder/configuration_speech_encoder_decoder.py,sha256=7hzCE73LcHbiq3b4pTsMdSwjtl4izOtoZE-ldVs8Bx4,4575 +transformers/models/speech_encoder_decoder/convert_mbart_wav2vec2_seq2seq_original_to_pytorch.py,sha256=EtCwDPHsete4dhXGu8OwkbRx7-47vbHRKUrb8j-6M2c,14754 +transformers/models/speech_encoder_decoder/convert_speech_to_text_wav2vec2_seq2seq_original_to_pytorch.py,sha256=04swyKsxEHHieCLUFPKzubV4W0ES1mZtbkgv-UDt7po,11971 +transformers/models/speech_encoder_decoder/modeling_flax_speech_encoder_decoder.py,sha256=i8GFLLxYQSh2uj6IAZNkGglUOt5C3VbSNvevYsoqSOs,44643 +transformers/models/speech_encoder_decoder/modeling_speech_encoder_decoder.py,sha256=U064X5_0R8t-uuU6z1S3025DqGhgRF7wz3Rg4cg7Kx4,32266 +transformers/models/speech_to_text/__init__.py,sha256=y2bX48UezdcJd_0EyTBq6xLWHL0vup-noE235__AYw8,3491 +transformers/models/speech_to_text/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/configuration_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/convert_s2t_fairseq_to_tfms.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/feature_extraction_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/modeling_tf_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/processing_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/__pycache__/tokenization_speech_to_text.cpython-310.pyc,, +transformers/models/speech_to_text/configuration_speech_to_text.py,sha256=qlnx21XZwtdAt2HLOml5VKB2lme0nUbOvU6pvIxQH_s,9882 +transformers/models/speech_to_text/convert_s2t_fairseq_to_tfms.py,sha256=v-5aSPwuCKCtqwU8gREj9wA2nm14Z97tg6wQ3S47gos,4478 +transformers/models/speech_to_text/feature_extraction_speech_to_text.py,sha256=bW4mXxoo1FKXFhfvstyPbWm8fMRMN1G7KXwkGN-vdxw,13176 +transformers/models/speech_to_text/modeling_speech_to_text.py,sha256=EKAWOKIJEStDhWZRMhW6ay1USDff-rHLz02Zq7CZjig,64505 +transformers/models/speech_to_text/modeling_tf_speech_to_text.py,sha256=wsWLyBQcxrsVHXTQs0wZTJiHkTCdjyx1w5xs0C6Qw60,74423 +transformers/models/speech_to_text/processing_speech_to_text.py,sha256=dtDsYvPg-jn-O5iiVDPH5154wOEDglsODuF4dPn7XYc,4818 +transformers/models/speech_to_text/tokenization_speech_to_text.py,sha256=u9cXinU0FieTCD4LcmB6sU2lK5DYjkN8RyGU3seKOt4,11400 +transformers/models/speech_to_text_2/__init__.py,sha256=zkmS9-WZTXByVUJqkt094wHCOT4zyVLO4Rn3B0JBCSo,2166 +transformers/models/speech_to_text_2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/configuration_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/modeling_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/processing_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/__pycache__/tokenization_speech_to_text_2.cpython-310.pyc,, +transformers/models/speech_to_text_2/configuration_speech_to_text_2.py,sha256=UQJCW5VUlSN_ZLxK5OM4GpGZLcyUkNPTA9DcEMgafGk,6108 +transformers/models/speech_to_text_2/modeling_speech_to_text_2.py,sha256=ZBFbuStKESxeJBfnkI418I_L6D5R1h-gB7srUWTk8OI,44113 +transformers/models/speech_to_text_2/processing_speech_to_text_2.py,sha256=J3Uv4HX7Y5zndYa3ZIROcEuLEfrw2piJC53AZmSkGnY,4790 +transformers/models/speech_to_text_2/tokenization_speech_to_text_2.py,sha256=YwsmogjE2We9H6o5hDPvDIBH_BRq4xUu6uTmd7AGNEI,8403 +transformers/models/speecht5/__init__.py,sha256=rI6eMJ1n9U8Mtn17i83U2qOhvcOQJudmFYU9roGYUno,2971 +transformers/models/speecht5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/configuration_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/convert_hifigan.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/convert_speecht5_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/feature_extraction_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/modeling_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/number_normalizer.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/processing_speecht5.cpython-310.pyc,, +transformers/models/speecht5/__pycache__/tokenization_speecht5.cpython-310.pyc,, +transformers/models/speecht5/configuration_speecht5.py,sha256=l2YOkdso_mS0eJaELrbOsZmtndbAQejRep43DuRgsFw,23646 +transformers/models/speecht5/convert_hifigan.py,sha256=CL9GSX_bimjm_hU2rE55MaNvTUjTtWD6qCtqNMaXy7I,4241 +transformers/models/speecht5/convert_speecht5_original_pytorch_checkpoint_to_pytorch.py,sha256=AyAjaeibe3002YZRT2maq1Yi8-iP1j7Ahs5qxYMjiJ0,17194 +transformers/models/speecht5/feature_extraction_speecht5.py,sha256=lcKx3NaIXx0PGITRKP0kA8SZK75kd1Sn8PNHLBn-ST0,17809 +transformers/models/speecht5/modeling_speecht5.py,sha256=LGB_28sFN-mTFUti-G7ejvH2Hxnxgn6ic31YtDnyXEw,153388 +transformers/models/speecht5/number_normalizer.py,sha256=cxnEUdHSISW5eAo15cLuVkZa65zMFuMFaJ8zAOQCsAA,7019 +transformers/models/speecht5/processing_speecht5.py,sha256=smqFdqKJQp9Vm1FDfmj7EvJeAZKSPB6u2AZMfsjsQa0,7562 +transformers/models/speecht5/tokenization_speecht5.py,sha256=dTeIcz0oFUY8Rg7vCkCWuJQotKIcvNZWXj9DYPDtO9Q,8913 +transformers/models/splinter/__init__.py,sha256=vo990AmnOkGy7xWuzB4qaAfJNrtFFLOImR4mlSl_jJ8,2532 +transformers/models/splinter/__pycache__/__init__.cpython-310.pyc,, +transformers/models/splinter/__pycache__/configuration_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/modeling_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter.cpython-310.pyc,, +transformers/models/splinter/__pycache__/tokenization_splinter_fast.cpython-310.pyc,, +transformers/models/splinter/configuration_splinter.py,sha256=IcoRrF-fIuVO4fJc4Q0ufyyOOhJuUF8RgUrOvsyA-Fc,5695 +transformers/models/splinter/modeling_splinter.py,sha256=pM9_J0yHh_-HoN2HgWOE2ZcIOvrQMCrE5_4VuepLH30,53249 +transformers/models/splinter/tokenization_splinter.py,sha256=6y_XSErTLNRpI2faZTsiA5AJ6K-kOJVuhtUKv0flhvY,20920 +transformers/models/splinter/tokenization_splinter_fast.py,sha256=t-gbV9OTlANeZQ_XLiV5GYpp9qZW9i7VllaLKf47ztI,8565 +transformers/models/squeezebert/__init__.py,sha256=G8bhLM5DmRO6oIXmZT-W71i8hZK9589XpyLuwIs6W3M,2996 +transformers/models/squeezebert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/configuration_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/modeling_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert.cpython-310.pyc,, +transformers/models/squeezebert/__pycache__/tokenization_squeezebert_fast.cpython-310.pyc,, +transformers/models/squeezebert/configuration_squeezebert.py,sha256=fLc9Szprw4a7p5Gi6b1TItqq9xccjprrolanKRXDgJI,7347 +transformers/models/squeezebert/modeling_squeezebert.py,sha256=FqEipgCvcfke3--lpcyJauaCZk2quBYCUqjfUO5Rkhw,45027 +transformers/models/squeezebert/tokenization_squeezebert.py,sha256=jTVxnrL-DWDzP7VopDyagQlSN161QuIto05TyUk9-z0,20893 +transformers/models/squeezebert/tokenization_squeezebert_fast.py,sha256=J22q1PJ-qa7ymcvvpmiX2ft2OxUDHi2Gdiny4rOiOZM,7819 +transformers/models/stablelm/__init__.py,sha256=DfGQ8YT2zSeiNRGOhIhypn-IFNOkXmqIt4BHzq8KnSU,1824 +transformers/models/stablelm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/stablelm/__pycache__/configuration_stablelm.cpython-310.pyc,, +transformers/models/stablelm/__pycache__/modeling_stablelm.cpython-310.pyc,, +transformers/models/stablelm/configuration_stablelm.py,sha256=4gI6bRhKYMaI00wDpHnCiKpLzh6NpzMZB0gc4yRMyHo,9450 +transformers/models/stablelm/modeling_stablelm.py,sha256=rrD8f5ToAfVkwH_J3ZcQlaMm1DbI3CnQ-nvAmpYNYTw,65166 +transformers/models/starcoder2/__init__.py,sha256=qUoxxHVVueu5KFeV8LWAoMmtBfwnYVjA-pdoCnho7tQ,1851 +transformers/models/starcoder2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/configuration_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/__pycache__/modeling_starcoder2.cpython-310.pyc,, +transformers/models/starcoder2/configuration_starcoder2.py,sha256=3H_EPAOpblcoJKB68G3SgDdSdM-jugjWP3Uwhf5Lqj4,6938 +transformers/models/starcoder2/modeling_starcoder2.py,sha256=MMrkh6uflnMxaMW7TJlSrU_iCsSnbsXP21jHcl1dSO0,63992 +transformers/models/superpoint/__init__.py,sha256=v0DSf2EqaAYJyCh2DMbwCXzVnPMF8SzuOUVqP4GOwV8,2334 +transformers/models/superpoint/__pycache__/__init__.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/configuration_superpoint.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/convert_superpoint_to_pytorch.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/image_processing_superpoint.cpython-310.pyc,, +transformers/models/superpoint/__pycache__/modeling_superpoint.cpython-310.pyc,, +transformers/models/superpoint/configuration_superpoint.py,sha256=ry1MX8YgMekp7XkPlpFFgKv3c7IXgdW_0RZeRTnDPNg,4205 +transformers/models/superpoint/convert_superpoint_to_pytorch.py,sha256=tO1P6yqW46LY1hnWIJPOs4KjW0uZWkiVWW-GTOXbJGg,7243 +transformers/models/superpoint/image_processing_superpoint.py,sha256=Om_ry5alSPtghMVDfFXI2CwDYRNm4siwZGYPmqdNFlE,12510 +transformers/models/superpoint/modeling_superpoint.py,sha256=e93BoVcM7xmyvIoGWB02wSW6WSE6ps0lLaByU0qqwDY,21535 +transformers/models/swiftformer/__init__.py,sha256=y3EVx2oOV5GldnIhqN1uK316Lf68wv3IsTE4HGd2DSc,1990 +transformers/models/swiftformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/configuration_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/convert_swiftformer_original_to_hf.cpython-310.pyc,, +transformers/models/swiftformer/__pycache__/modeling_swiftformer.cpython-310.pyc,, +transformers/models/swiftformer/configuration_swiftformer.py,sha256=l5ZtRSZsELMkjsDQzY8q_fa07Hoa6p6p_fHveqABM6c,5303 +transformers/models/swiftformer/convert_swiftformer_original_to_hf.py,sha256=HsppMeVG__p-Z4sCLcGLnDhXP-AFe6ewWiifyEFL-xA,6239 +transformers/models/swiftformer/modeling_swiftformer.py,sha256=6nOd2r567sT-mC7SdBnGh75tTOZfAMkT5w43rUgwZh4,23089 +transformers/models/swin/__init__.py,sha256=lsSSO-igADN2rI7RV55GBIB-GG8mRQNnsT9A6J8IFtk,2703 +transformers/models/swin/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swin/__pycache__/configuration_swin.cpython-310.pyc,, +transformers/models/swin/__pycache__/convert_swin_simmim_to_pytorch.cpython-310.pyc,, +transformers/models/swin/__pycache__/convert_swin_timm_to_pytorch.cpython-310.pyc,, +transformers/models/swin/__pycache__/modeling_swin.cpython-310.pyc,, +transformers/models/swin/__pycache__/modeling_tf_swin.cpython-310.pyc,, +transformers/models/swin/configuration_swin.py,sha256=Ijjcs9LVMF3lZ8VlT4qRJYwrVHQfzsOd5xhDIAXMRls,8001 +transformers/models/swin/convert_swin_simmim_to_pytorch.py,sha256=Zb67GMulOozvN1L66EmQ9gKtLVUmyaWYgq_zPPdbGKs,6627 +transformers/models/swin/convert_swin_timm_to_pytorch.py,sha256=WKAiiEOxnv4_yjbLVsU9M50iwE_x0QEvbXrMZK1W_7Q,5805 +transformers/models/swin/modeling_swin.py,sha256=C0Gyrppbrkz-Zambvbkwkkz4Vs89tOXDazz00aF1cbM,60090 +transformers/models/swin/modeling_tf_swin.py,sha256=CvFCCZ6fnqqcWk1r8wTS6UuNRpFjZKsjRT8AyVapd5k,70774 +transformers/models/swin2sr/__init__.py,sha256=Nx5kG4ltMIhcqaGLYh7VYoju_qViNNYZGdGE0p-rz_4,2277 +transformers/models/swin2sr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/configuration_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/convert_swin2sr_original_to_pytorch.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/image_processing_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/__pycache__/modeling_swin2sr.cpython-310.pyc,, +transformers/models/swin2sr/configuration_swin2sr.py,sha256=d8FEyDhHTyNREZZ9hj0JUSDtxt0110KkGmjdWPITC0k,6911 +transformers/models/swin2sr/convert_swin2sr_original_to_pytorch.py,sha256=eZ1q75t9Na8iF_KkMXK9hHb0O0KyX9Bv1JhO3r94ZLA,11355 +transformers/models/swin2sr/image_processing_swin2sr.py,sha256=9GDG_McVWO6VSAZd64WZkSij78wIlxAq2LYVmyyfeeU,9544 +transformers/models/swin2sr/modeling_swin2sr.py,sha256=ceR_m_Noodnro2p817Xzqn_RLGdGLBDUvXt-GiMG_1o,50720 +transformers/models/swinv2/__init__.py,sha256=wYBHIbUFdjRY2cLLBWgHOOvE1ZNk6UD6Hj2qYYR2i5Q,1921 +transformers/models/swinv2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/configuration_swinv2.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/convert_swinv2_timm_to_pytorch.cpython-310.pyc,, +transformers/models/swinv2/__pycache__/modeling_swinv2.cpython-310.pyc,, +transformers/models/swinv2/configuration_swinv2.py,sha256=C4XGeME3RYIsVagj4f73sgXIhBr7kmkL7unq97rNycA,7617 +transformers/models/swinv2/convert_swinv2_timm_to_pytorch.py,sha256=OMyAAcVPs9DTojiHQCvLo7uTtaChsd1ANTY4IkS7iUY,7687 +transformers/models/swinv2/modeling_swinv2.py,sha256=MGvMlW02iYjDLJx0S8Jjy6eJsCsN9bx5vNaQ9gwmxqc,63826 +transformers/models/switch_transformers/__init__.py,sha256=71GlCMK0XfSUSoxmTxWjj-vmLJImHjlJjtUWkptdalA,2484 +transformers/models/switch_transformers/__pycache__/__init__.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/configuration_switch_transformers.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/convert_big_switch.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/convert_switch_transformers_original_flax_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/switch_transformers/__pycache__/modeling_switch_transformers.cpython-310.pyc,, +transformers/models/switch_transformers/configuration_switch_transformers.py,sha256=n3leQIeLxJhl2gfXlRF5MNWiTKI4d736uBCQ1UUJVf0,9116 +transformers/models/switch_transformers/convert_big_switch.py,sha256=wjMGjHXAqVool6fZQhdG_Av2Ujx9EDoZrtHC8RdDLk4,7659 +transformers/models/switch_transformers/convert_switch_transformers_original_flax_checkpoint_to_pytorch.py,sha256=AAJNkPcr_THjPN_8RUnOdBYbbYc6GOqXdgdjhx9FZyw,7593 +transformers/models/switch_transformers/modeling_switch_transformers.py,sha256=pGKIFiCGvhyTNZ-WlgJDuP7IFbWH0i47AcznpnyXhpI,87663 +transformers/models/t5/__init__.py,sha256=-WUyKPr21y-Gi15sZ8aW3vmykCW8tu5qZ6yKmOcOHso,4492 +transformers/models/t5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/t5/__pycache__/configuration_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5x_checkpoint_to_flax.cpython-310.pyc,, +transformers/models/t5/__pycache__/convert_t5x_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_flax_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/modeling_tf_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/tokenization_t5.cpython-310.pyc,, +transformers/models/t5/__pycache__/tokenization_t5_fast.cpython-310.pyc,, +transformers/models/t5/configuration_t5.py,sha256=qUYQqgNSrXX1ggcobIQKUYaTNPV-HNE0CrXN93vWiAQ,7366 +transformers/models/t5/convert_t5_original_tf_checkpoint_to_pytorch.py,sha256=83tKCwYRSRW7zXtm9cmszqtPhpw44cH8Cj0SWUSBgN0,2120 +transformers/models/t5/convert_t5x_checkpoint_to_flax.py,sha256=CET5s9wlNOt-VxT9eu-NOMdNS22kX6mhEZQ-ox2mLK0,10538 +transformers/models/t5/convert_t5x_checkpoint_to_pytorch.py,sha256=GTF0FYHDDDBl2tcYgHcirqHOI2KOE2YkDG4ekzjh_Ao,10483 +transformers/models/t5/modeling_flax_t5.py,sha256=QhELmI-3YNpbMz75xqrUxTLCrPgYowKh0pJVaiJvDCo,74166 +transformers/models/t5/modeling_t5.py,sha256=hA3NJN2Goy2qXXY7Rw0fOiyYZszhDSsgXfZ7RzPNwyY,108657 +transformers/models/t5/modeling_tf_t5.py,sha256=I8tf-3Fqmuvz2oY0_fgJ9eW5EMzgsx1cMd69zuJrE3Q,77178 +transformers/models/t5/tokenization_t5.py,sha256=i5JinhKDrDAAZAu4u5rDQkHTrT9H6r9mG2v-QmwWZ1s,20017 +transformers/models/t5/tokenization_t5_fast.py,sha256=3pG5tJIw9Kfz4B18U4NVjiNjnHNCB20sERqLv-EwgGk,10112 +transformers/models/table_transformer/__init__.py,sha256=WHdzgCB7BwXZeZveOSQ2fBQKNsrsRmpdP1f5C2MfYn4,2065 +transformers/models/table_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/configuration_table_transformer.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/convert_table_transformer_to_hf.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/convert_table_transformer_to_hf_no_timm.cpython-310.pyc,, +transformers/models/table_transformer/__pycache__/modeling_table_transformer.cpython-310.pyc,, +transformers/models/table_transformer/configuration_table_transformer.py,sha256=CqNTrOoDKW1sq64DPASZC2iZ_S_ez6sjzSTIY0Wr9As,13345 +transformers/models/table_transformer/convert_table_transformer_to_hf.py,sha256=ItWZNI8n3yj-0fP-kbly0kq8yrb7Bc5Nz2HeInHnPdA,15095 +transformers/models/table_transformer/convert_table_transformer_to_hf_no_timm.py,sha256=IJWfYRPya5zeVUqynktWlkiD7seeQdyU4kagQFXV4pU,21186 +transformers/models/table_transformer/modeling_table_transformer.py,sha256=0nyr-s-i2vHvIXyuTWm-eL_BsBA2ft4Zorht0bom3_0,95298 +transformers/models/tapas/__init__.py,sha256=uGhdu01xgzBDD5edwGpuFl94A2WmFd6FA_U2YWJZReA,2952 +transformers/models/tapas/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tapas/__pycache__/configuration_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/convert_tapas_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/tapas/__pycache__/modeling_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/modeling_tf_tapas.cpython-310.pyc,, +transformers/models/tapas/__pycache__/tokenization_tapas.cpython-310.pyc,, +transformers/models/tapas/configuration_tapas.py,sha256=OBJ7wt_KLCVdnONe5t3aDU8TawqPwBXL9ck3Zq6JN4k,12361 +transformers/models/tapas/convert_tapas_original_tf_checkpoint_to_pytorch.py,sha256=OeIyLEtDJr1z2BEKH0bJNJOR5ZrxRyGM8RpMSC3TgHQ,5049 +transformers/models/tapas/modeling_tapas.py,sha256=1MBm057_AUQUDHRHaFFaWjC4YzCvTeU8rAprTd4fE_w,110372 +transformers/models/tapas/modeling_tf_tapas.py,sha256=VOGS49RrTRyVf4cC9rSqESnxwlgK8VD53U-CjqeMIQU,112090 +transformers/models/tapas/tokenization_tapas.py,sha256=H3oMFMjQbcG0miRUN1Mb2GbLB8GgcTtjs3xPALbCIdU,117025 +transformers/models/time_series_transformer/__init__.py,sha256=dtXXYFY750gxXLggZYQWy2iaq88scX8TYl021UEZHVs,2069 +transformers/models/time_series_transformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/time_series_transformer/__pycache__/configuration_time_series_transformer.cpython-310.pyc,, +transformers/models/time_series_transformer/__pycache__/modeling_time_series_transformer.cpython-310.pyc,, +transformers/models/time_series_transformer/configuration_time_series_transformer.py,sha256=4R_JEPblfWt4O4uKeeb_QMa70E2clQE_31mU4uS-ZCk,11773 +transformers/models/time_series_transformer/modeling_time_series_transformer.py,sha256=V76hRofPT1LsDYQ5KY3lUP3KR26vG4YDDBj1H_hl5qg,88669 +transformers/models/timesformer/__init__.py,sha256=eugQ_QcHxuxaGByRRLWyZZ_0ic66Mcz5qdwW_Qt-Nyg,1862 +transformers/models/timesformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/configuration_timesformer.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/convert_timesformer_to_pytorch.cpython-310.pyc,, +transformers/models/timesformer/__pycache__/modeling_timesformer.cpython-310.pyc,, +transformers/models/timesformer/configuration_timesformer.py,sha256=mc2SDsVJVLD0DyIZoSUHWVbFWZjyrzhJAkWqvF2yyAY,5638 +transformers/models/timesformer/convert_timesformer_to_pytorch.py,sha256=TjOfPbEC4oVb5tlOgU2m9g36OBizDEEjm0bbcZz6Mq8,10176 +transformers/models/timesformer/modeling_timesformer.py,sha256=pWWTP1SPSvaGPjwl91oIXZk_FImXJ54d5wFabm2ZNxA,35253 +transformers/models/timm_backbone/__init__.py,sha256=rn9y1wXicP1g6IiI_tSWu7fnt5q_x6hfu3g9yQvovEU,1624 +transformers/models/timm_backbone/__pycache__/__init__.cpython-310.pyc,, +transformers/models/timm_backbone/__pycache__/configuration_timm_backbone.cpython-310.pyc,, +transformers/models/timm_backbone/__pycache__/modeling_timm_backbone.cpython-310.pyc,, +transformers/models/timm_backbone/configuration_timm_backbone.py,sha256=PR-F13KbCSBdKgA8ASNh-gok8TLUFY1_7ke32AaasmA,3153 +transformers/models/timm_backbone/modeling_timm_backbone.py,sha256=AXDH5tWEWZYY7mTOWCwsiEvoImk-NdXBLw-EUEMqH4M,6614 +transformers/models/trocr/__init__.py,sha256=jevvndvNkGFaA2smYGtlhOnpGG5U6gIhmuwONgXNyeM,1818 +transformers/models/trocr/__pycache__/__init__.cpython-310.pyc,, +transformers/models/trocr/__pycache__/configuration_trocr.cpython-310.pyc,, +transformers/models/trocr/__pycache__/convert_trocr_unilm_to_pytorch.cpython-310.pyc,, +transformers/models/trocr/__pycache__/modeling_trocr.cpython-310.pyc,, +transformers/models/trocr/__pycache__/processing_trocr.cpython-310.pyc,, +transformers/models/trocr/configuration_trocr.py,sha256=O8vsr9zsshJLWLlyOXnhsukb93CbYmrYgCcD9U4ZP5c,6620 +transformers/models/trocr/convert_trocr_unilm_to_pytorch.py,sha256=7I6jyQ1hl9k_fweOgeMgKypDSSf4zL-7tjIoY09sprk,10166 +transformers/models/trocr/modeling_trocr.py,sha256=dpkbnfRc_KNxUk4WfxoYtcxmsJ5BsmvGs-jsnuQCJ1E,45377 +transformers/models/trocr/processing_trocr.py,sha256=-iyJv7DCOlG-iKtKhtKmgbQKyU4eGydKGJDeLmBFML4,5745 +transformers/models/tvlt/__init__.py,sha256=3hHJeODpJMJ9_06AAz0fAV7QCRljLoJcfXc69YypO9M,2687 +transformers/models/tvlt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/configuration_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/feature_extraction_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/image_processing_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/modeling_tvlt.cpython-310.pyc,, +transformers/models/tvlt/__pycache__/processing_tvlt.cpython-310.pyc,, +transformers/models/tvlt/configuration_tvlt.py,sha256=DNVoahCkmhinPBR6ELUSZ3ydsYmd4J-gqoEsUCOFdfA,8718 +transformers/models/tvlt/feature_extraction_tvlt.py,sha256=peyeHHDn8S6X6bQIf3rWs4fWwPYSjabGC0f106x35W4,10555 +transformers/models/tvlt/image_processing_tvlt.py,sha256=D7MBYY1GG8_FRtnxy6UQ_dmeCJIVhZrf7GtzvOX1A80,20085 +transformers/models/tvlt/modeling_tvlt.py,sha256=7uX3H2gkh0nrZhiGkad-qBUODceNNSTq35R8uXctBrM,57371 +transformers/models/tvlt/processing_tvlt.py,sha256=JaLjfV68tRz-Ts55YzccFCltQO4yZDTNW6DAreychSQ,3506 +transformers/models/tvp/__init__.py,sha256=nMCJ05vKe35hpbNHygmLeBkYUXDH2ZZLB5U5Ij0DG6A,2366 +transformers/models/tvp/__pycache__/__init__.cpython-310.pyc,, +transformers/models/tvp/__pycache__/configuration_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/image_processing_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/modeling_tvp.cpython-310.pyc,, +transformers/models/tvp/__pycache__/processing_tvp.cpython-310.pyc,, +transformers/models/tvp/configuration_tvp.py,sha256=-ubk9NYjQwXVQTAebBSlYB1fpiKe1GYYa0xLgIeJm2E,10107 +transformers/models/tvp/image_processing_tvp.py,sha256=SiQUmjVpDimWZz_U-4U4rGX6iOw8Qh_WD5PZ5LAu70w,23178 +transformers/models/tvp/modeling_tvp.py,sha256=MxvrQ4iy4V1AApzRUsC5Cer0CudLYg55bcDgiK-17mQ,38791 +transformers/models/tvp/processing_tvp.py,sha256=6fJAgekPIOw95GpQ7b1_y76KGbC03upX9uH8XlbGdKE,6981 +transformers/models/udop/__init__.py,sha256=78SSiXPuOw6Y1OrVRWawWtLCcV3-vKqZLqKi7rWoQ4M,2864 +transformers/models/udop/__pycache__/__init__.cpython-310.pyc,, +transformers/models/udop/__pycache__/configuration_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/convert_udop_to_hf.cpython-310.pyc,, +transformers/models/udop/__pycache__/modeling_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/processing_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/tokenization_udop.cpython-310.pyc,, +transformers/models/udop/__pycache__/tokenization_udop_fast.cpython-310.pyc,, +transformers/models/udop/configuration_udop.py,sha256=qydT2Xc0oZ38m1MKJMVJwZqXLJUlPRFGwZZ8rgzyxVs,7747 +transformers/models/udop/convert_udop_to_hf.py,sha256=oPyHBW-tWHhWidgG9JGOl3e0s8vpF-xM1uZ8ecV-IEI,14414 +transformers/models/udop/modeling_udop.py,sha256=OWuvU145FRqbL-LAbLOkOFBHmYfomqUIpvGlFLAFIqg,94428 +transformers/models/udop/processing_udop.py,sha256=4r21EuC0M2gF5GAl9EuSiQ5l80sv7TjiEP_v6J5saqc,10119 +transformers/models/udop/tokenization_udop.py,sha256=hz6Ujnim0Ck6wBSjKAPNElw5gkW9EpprCHW5QRPE1Qw,71020 +transformers/models/udop/tokenization_udop_fast.py,sha256=kNTtZNUjVgGKgWeulE8Lq4hMAHtCWbRFPPU3hO65UpM,49159 +transformers/models/umt5/__init__.py,sha256=wcKbkdS_suuZCQs52Oz0lBegIa0QDSPZW2Q-XBpM3ns,1908 +transformers/models/umt5/__pycache__/__init__.cpython-310.pyc,, +transformers/models/umt5/__pycache__/configuration_umt5.cpython-310.pyc,, +transformers/models/umt5/__pycache__/convert_umt5_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/umt5/__pycache__/modeling_umt5.cpython-310.pyc,, +transformers/models/umt5/configuration_umt5.py,sha256=qXrQJ7Nwj1G6q8zo6UpqZ9HoVgbGDuOvdFUU63Qf7BA,7636 +transformers/models/umt5/convert_umt5_checkpoint_to_pytorch.py,sha256=mKcFjDTUYzC4S2faD9UMTQTIl5nwGbOp4QkcFxEEdv8,12070 +transformers/models/umt5/modeling_umt5.py,sha256=UZ7AFCi3sYo1ilhRVPADZ7R_RqxhW1R3xhg0akfbiII,86424 +transformers/models/unispeech/__init__.py,sha256=n4jtlc-pPF37uUx7mgB1GDnL2lQ-eKDI8xOLVVp840E,2018 +transformers/models/unispeech/__pycache__/__init__.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/configuration_unispeech.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/convert_unispeech_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech/__pycache__/modeling_unispeech.cpython-310.pyc,, +transformers/models/unispeech/configuration_unispeech.py,sha256=1tUJoio0_kMqDZFGf9-P30P_IzWk8DJcW4VY4Z1jJnA,17556 +transformers/models/unispeech/convert_unispeech_original_pytorch_checkpoint_to_pytorch.py,sha256=bwfIAusfhFih5WJEIIokApShfuYhJoirPltvRz2-T7Y,11340 +transformers/models/unispeech/modeling_unispeech.py,sha256=9ACsIJ7h3ys4izC483Pyg8t9Oe8BFALEgeYsdXYivIk,72582 +transformers/models/unispeech_sat/__init__.py,sha256=gAf8t9qZaufCDyIyJICzCQTvrmV825BDZUKQoa08DhE,2267 +transformers/models/unispeech_sat/__pycache__/__init__.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/configuration_unispeech_sat.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/convert_unispeech_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/convert_unispeech_sat_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/unispeech_sat/__pycache__/modeling_unispeech_sat.cpython-310.pyc,, +transformers/models/unispeech_sat/configuration_unispeech_sat.py,sha256=eFDK8KQX0hkBC1EIhbiU1H2pGw3KlgRpXUWnRQD-Dvo,18902 +transformers/models/unispeech_sat/convert_unispeech_original_s3prl_checkpoint_to_pytorch.py,sha256=CnSYjNr7S7Mqa7Feosf9Dx7eQTYScVHG-QprNkY8uLk,4870 +transformers/models/unispeech_sat/convert_unispeech_sat_original_pytorch_checkpoint_to_pytorch.py,sha256=NK_vA71Eq2q9P1x3ol-2Jlqjkv-Mi3NlXO9Ra7QUQsQ,9289 +transformers/models/unispeech_sat/modeling_unispeech_sat.py,sha256=NKUhfHTttkSEfnmCoxPeHRstxh6mTkdIWZy1kf1Z_IQ,86732 +transformers/models/univnet/__init__.py,sha256=aeEydP4QFet-MOxxwOZMKE-jGUG1spoCfXwMmESP27Y,1842 +transformers/models/univnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/univnet/__pycache__/configuration_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/convert_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/feature_extraction_univnet.cpython-310.pyc,, +transformers/models/univnet/__pycache__/modeling_univnet.cpython-310.pyc,, +transformers/models/univnet/configuration_univnet.py,sha256=eCemBN3MVYdqCoS4svOqO_R5hh8PhY4zVf_Ncqzcw94,6828 +transformers/models/univnet/convert_univnet.py,sha256=R2gqXfz8Oq2rwIUU01V7T_oSoDGG2A4Gety-R80Yn24,6364 +transformers/models/univnet/feature_extraction_univnet.py,sha256=snAVdQ5ClFX_Sw7upgvWyzJq4bUNRelRQaxcWxgHIgA,22821 +transformers/models/univnet/modeling_univnet.py,sha256=DaGTSHqwEXfp-nuUcCxbtaKVVS9s5jQFgAQ3uY40_n4,26874 +transformers/models/upernet/__init__.py,sha256=z2avy6tP_WpANiGPA5RCxT_9yPp0PfEDlfUjL9rQsXM,1535 +transformers/models/upernet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/upernet/__pycache__/configuration_upernet.cpython-310.pyc,, +transformers/models/upernet/__pycache__/convert_convnext_upernet_to_pytorch.cpython-310.pyc,, +transformers/models/upernet/__pycache__/convert_swin_upernet_to_pytorch.cpython-310.pyc,, +transformers/models/upernet/__pycache__/modeling_upernet.cpython-310.pyc,, +transformers/models/upernet/configuration_upernet.py,sha256=SoforpobnR_iSTAHHWAOON_zUZ8F5674SqjDMVyy2Ts,6719 +transformers/models/upernet/convert_convnext_upernet_to_pytorch.py,sha256=l_CJoXwANEE9rm5mwpHwbusIoJLmN8jNGjxsj6WhZrk,10271 +transformers/models/upernet/convert_swin_upernet_to_pytorch.py,sha256=lHV8SE_bZnxOo-zEJ21S2nY449uPVc3bpcl2JGKNEjA,14026 +transformers/models/upernet/modeling_upernet.py,sha256=_D-8NctwkTf3hfuAhawE3DEw_WjeOF8C31MkBLhJXWQ,17136 +transformers/models/videomae/__init__.py,sha256=Yrv0_yOkvyL6slti-bw1oFR8t8VO8-6b40yF0Lf2uV4,2519 +transformers/models/videomae/__pycache__/__init__.cpython-310.pyc,, +transformers/models/videomae/__pycache__/configuration_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/convert_videomae_to_pytorch.cpython-310.pyc,, +transformers/models/videomae/__pycache__/feature_extraction_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/image_processing_videomae.cpython-310.pyc,, +transformers/models/videomae/__pycache__/modeling_videomae.cpython-310.pyc,, +transformers/models/videomae/configuration_videomae.py,sha256=shuybmQw9SgQTm4r8TokU0kunxMqwhYgAnXHFbFhxbQ,6670 +transformers/models/videomae/convert_videomae_to_pytorch.py,sha256=rq2nT2ZJekra1G38kM2DH_qOvcZBDQFNgbCvH3mKZjY,13989 +transformers/models/videomae/feature_extraction_videomae.py,sha256=Hg5wmFhkbncqR3nfvtevV6msaUEqvLBf4mtO4aICYTI,1200 +transformers/models/videomae/image_processing_videomae.py,sha256=yMZGcXFd8YmK1uwf9tqOFtvild9yOAf8rJeXVxX3oNo,17000 +transformers/models/videomae/modeling_videomae.py,sha256=6xS4RxjkkQD-lXg4ZUwo4N_zVT_oa24pmEozn3mIE18,47382 +transformers/models/vilt/__init__.py,sha256=-fruuGWD0urXmb7STgXnrF3QY8J6Z6lfJuTneeL_BsM,2788 +transformers/models/vilt/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vilt/__pycache__/configuration_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/convert_vilt_original_to_pytorch.cpython-310.pyc,, +transformers/models/vilt/__pycache__/feature_extraction_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/image_processing_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/modeling_vilt.cpython-310.pyc,, +transformers/models/vilt/__pycache__/processing_vilt.cpython-310.pyc,, +transformers/models/vilt/configuration_vilt.py,sha256=HxGlUnrFYqyePPCmDqdSKBaJTm-FlLeU1h5SR612N9E,6885 +transformers/models/vilt/convert_vilt_original_to_pytorch.py,sha256=IUSgkjLMZRUBuozW7OzL6TtD_jkO7ZfH51H6x6Qgjdk,12882 +transformers/models/vilt/feature_extraction_vilt.py,sha256=dC0Glwc_rDX7zqp8BxRtzaLogQGI4I4CjQCgxU7UORw,1172 +transformers/models/vilt/image_processing_vilt.py,sha256=9U68LczTq1t6iLBT46MCGeQ5PHJxcb9zjmgJHFtN8qg,23619 +transformers/models/vilt/modeling_vilt.py,sha256=LJte9B6mu9yuXmlm_oxaYLBJMG3yrP4MwueehfxiYD8,64971 +transformers/models/vilt/processing_vilt.py,sha256=0iOal8dCaE7JCQlZjbJ1-sHGxpDPZgUkMowEbxFRF2Q,6079 +transformers/models/vipllava/__init__.py,sha256=6lR_RtZD-Jzj6ZMOjo3JYuFRaBjVKmXquzPOB38z33k,1740 +transformers/models/vipllava/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/configuration_vipllava.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/convert_vipllava_weights_to_hf.cpython-310.pyc,, +transformers/models/vipllava/__pycache__/modeling_vipllava.cpython-310.pyc,, +transformers/models/vipllava/configuration_vipllava.py,sha256=UV6zEz55NxeJvaq4SgIdNbTqGymDgJkISbDQAxgOyXc,5813 +transformers/models/vipllava/convert_vipllava_weights_to_hf.py,sha256=u64-lOXDE0JMGhkGYJEtyrOh3gpeJtxSDC_dC08mc2c,4794 +transformers/models/vipllava/modeling_vipllava.py,sha256=YK7xeHpgR0m7muwu1a-VISpySVtJ0ERmv0s6Fc9TnBQ,29871 +transformers/models/vision_encoder_decoder/__init__.py,sha256=IRQsS-4Bz-cm6B97rSoeC62Z1l1wns0XVDZwBn1KBIU,2627 +transformers/models/vision_encoder_decoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/configuration_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_flax_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_tf_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/__pycache__/modeling_vision_encoder_decoder.cpython-310.pyc,, +transformers/models/vision_encoder_decoder/configuration_vision_encoder_decoder.py,sha256=6x7tdTBOrsvKOMy12NCtbPatY2qaqOJaVIGGxy3uPDw,8273 +transformers/models/vision_encoder_decoder/modeling_flax_vision_encoder_decoder.py,sha256=q2Tzd_KS4rB81YZk3zzb3KjtghP6vaPY4Snz_Kh52qQ,41535 +transformers/models/vision_encoder_decoder/modeling_tf_vision_encoder_decoder.py,sha256=-7ASqN2Qu4Ehcwr0WF0MTnrb28Fj3fCGFzGinhuQXak,36239 +transformers/models/vision_encoder_decoder/modeling_vision_encoder_decoder.py,sha256=TN-V-wtM_3E9psma7_p-GWcJL8nB4wHmhXDB_cMbKAY,34606 +transformers/models/vision_text_dual_encoder/__init__.py,sha256=kULrtY2Ie2eigdn63xnoEqRUlmKm31D9mUCJs4F62Lo,2730 +transformers/models/vision_text_dual_encoder/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/configuration_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_flax_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_tf_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/modeling_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/__pycache__/processing_vision_text_dual_encoder.cpython-310.pyc,, +transformers/models/vision_text_dual_encoder/configuration_vision_text_dual_encoder.py,sha256=E7pT_zGc0uq9uzfKSBE6QiYjgSAotq0zYuC1bnzE5F0,4895 +transformers/models/vision_text_dual_encoder/modeling_flax_vision_text_dual_encoder.py,sha256=JP4ppqdIEvRfbpCtf0b3bJQcURI8YVvyTHe8wDRCRJg,26314 +transformers/models/vision_text_dual_encoder/modeling_tf_vision_text_dual_encoder.py,sha256=stdg94SN9NhHPelgqWBOJt-X7c4fBohXcBhDIl_TE68,28641 +transformers/models/vision_text_dual_encoder/modeling_vision_text_dual_encoder.py,sha256=kqB-zueOo28U1qXKRoR1njEyX6xRm45r0faBUKYH4wQ,24939 +transformers/models/vision_text_dual_encoder/processing_vision_text_dual_encoder.py,sha256=Wxw-ShdBxDkWK76hcJjHrvySp-uW0yrTvoqWouovhy8,6929 +transformers/models/visual_bert/__init__.py,sha256=OSQEpz1R0NjH9WvGkfsXKq_9LJTGfrHscqYd2xl9S_4,2235 +transformers/models/visual_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/configuration_visual_bert.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/convert_visual_bert_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/visual_bert/__pycache__/modeling_visual_bert.cpython-310.pyc,, +transformers/models/visual_bert/configuration_visual_bert.py,sha256=jpeHAEPrt9V8wm7SwFld7ekTGkvGOZoRaYANBLEu0nA,6838 +transformers/models/visual_bert/convert_visual_bert_original_pytorch_checkpoint_to_pytorch.py,sha256=BpXgEZ-5LdGIa0NK6BDZd_5VhKCqeWuu2oOQyUqcSRQ,5158 +transformers/models/visual_bert/modeling_visual_bert.py,sha256=KASEThu9KrqrNcC0m9klG8Xqy3reEHMamy_dXEIQjxo,69279 +transformers/models/vit/__init__.py,sha256=Kw3Pan4rUcu6RQsA7u-DpxMlmbzdmrA7GA3ha3nYO5k,3598 +transformers/models/vit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit/__pycache__/configuration_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/convert_dino_to_pytorch.cpython-310.pyc,, +transformers/models/vit/__pycache__/convert_vit_timm_to_pytorch.cpython-310.pyc,, +transformers/models/vit/__pycache__/feature_extraction_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/image_processing_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_flax_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_tf_vit.cpython-310.pyc,, +transformers/models/vit/__pycache__/modeling_vit.cpython-310.pyc,, +transformers/models/vit/configuration_vit.py,sha256=EU6gd1A2VfAlpr3LEVl_iIGCgKNfuH99psLBaxDCNOw,5708 +transformers/models/vit/convert_dino_to_pytorch.py,sha256=CIkbWDBEgW5jmSWWoPZOosLLqCFiUz8oYgnj48JdtSM,8854 +transformers/models/vit/convert_vit_timm_to_pytorch.py,sha256=LY_UklTkw47xwnCcY8AzVFH-6g5B8t3GTuQ0PbyZyn0,10890 +transformers/models/vit/feature_extraction_vit.py,sha256=R-W_HNOybSpKxKGKfo4iDB4zGTRHeW1cq-29iwnbVl4,1165 +transformers/models/vit/image_processing_vit.py,sha256=ZTVKB_q7T0qGYcQG6VnMVxzog4VUwTKiC10-nFrUoyY,14185 +transformers/models/vit/modeling_flax_vit.py,sha256=KsTqlse5b5euRgYXhrXoNqCNvo0LEPBGuU_b0uNO0yo,25340 +transformers/models/vit/modeling_tf_vit.py,sha256=Ycwa5F6KssyHFtmpVhbOxq2XY1q36PX8wzOVGrOlgqA,37328 +transformers/models/vit/modeling_vit.py,sha256=QC7XTMBhtUyv1Csn2SrCOxB_kI7BsaNiFm65in4G75s,35594 +transformers/models/vit_hybrid/__init__.py,sha256=kJffDq49Rz34fkQnLISzCp18xqXkVFOIWciOsZMjc2I,2316 +transformers/models/vit_hybrid/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/configuration_vit_hybrid.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/convert_vit_hybrid_timm_to_pytorch.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/image_processing_vit_hybrid.cpython-310.pyc,, +transformers/models/vit_hybrid/__pycache__/modeling_vit_hybrid.cpython-310.pyc,, +transformers/models/vit_hybrid/configuration_vit_hybrid.py,sha256=zqfiKBj316i2Bapp2cCnIJM28XStCBVXuM6fhnVJwSs,8330 +transformers/models/vit_hybrid/convert_vit_hybrid_timm_to_pytorch.py,sha256=MymDN5E1N5g1g5k0mK0M-F2VeYy_Me-hRWdVNTRFocA,13413 +transformers/models/vit_hybrid/image_processing_vit_hybrid.py,sha256=aeHej-2dOTuFlDFTObIZH8hG1HXoefpyQPgH3owjo9A,16390 +transformers/models/vit_hybrid/modeling_vit_hybrid.py,sha256=Q4TUzxCKvFQCh2YmS7_WA6s-cwj53X4NtZ99imU3e90,31865 +transformers/models/vit_mae/__init__.py,sha256=-w9MTkUgGkYCX6q37upqBk7x-8g247YxYGVVAEJkIzk,2428 +transformers/models/vit_mae/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/configuration_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/convert_vit_mae_to_pytorch.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/modeling_tf_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/__pycache__/modeling_vit_mae.cpython-310.pyc,, +transformers/models/vit_mae/configuration_vit_mae.py,sha256=1fEivwPlBPZdnAt-CMbSHCkocm1NFjkn4-BJ2ar2aaY,6443 +transformers/models/vit_mae/convert_vit_mae_to_pytorch.py,sha256=Nj4Y5LS8H7xbyWNeLE9Vn0NFyXSQQYEcj1QQMzN1Hdg,7516 +transformers/models/vit_mae/modeling_tf_vit_mae.py,sha256=QBtXTmOdrC21lPOajqx6WCWKl2JVeDpeUM31oUFMYJ8,52979 +transformers/models/vit_mae/modeling_vit_mae.py,sha256=jidR9wteggmQ3Km2l8mO9W6bKkPlx_cWHNqw1vZdau8,42771 +transformers/models/vit_msn/__init__.py,sha256=4VVe0aSuBzHjTg4X2nuVet-9DgD5_dWlFkbLAr4bilc,1783 +transformers/models/vit_msn/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/configuration_vit_msn.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/convert_msn_to_pytorch.cpython-310.pyc,, +transformers/models/vit_msn/__pycache__/modeling_vit_msn.cpython-310.pyc,, +transformers/models/vit_msn/configuration_vit_msn.py,sha256=pJwLjgbPa6005C1Dsu6okI9xtjZ8c30Ktbx5Rz2D1r8,4936 +transformers/models/vit_msn/convert_msn_to_pytorch.py,sha256=1xBjqvbviFkGxhi_xq2956R7qZpFEBdKPNOQYb-SoIA,9841 +transformers/models/vit_msn/modeling_vit_msn.py,sha256=uQBGkcoCD2fEYkkUKRQuYBKJOowraBF4En3nvx503PQ,29649 +transformers/models/vitdet/__init__.py,sha256=Vaafapb4IUbKPzQUqPjhX6nvt14CTKlV51QneeQpTmc,1764 +transformers/models/vitdet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitdet/__pycache__/configuration_vitdet.cpython-310.pyc,, +transformers/models/vitdet/__pycache__/modeling_vitdet.cpython-310.pyc,, +transformers/models/vitdet/configuration_vitdet.py,sha256=lg4mW4J0xANSdpwR-RFxnnwtea04yn1n_cojrlUXbDg,7612 +transformers/models/vitdet/modeling_vitdet.py,sha256=1pfKZly7hJQwqzteIg-0GoIKL_V_6YvbTBnimwrdn4o,34925 +transformers/models/vitmatte/__init__.py,sha256=tl-h8_VOAHRT7VtJJJ-SFSl5lkHxfVEdDaCtm4ksJIg,2239 +transformers/models/vitmatte/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/configuration_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/convert_vitmatte_to_hf.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/image_processing_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/__pycache__/modeling_vitmatte.cpython-310.pyc,, +transformers/models/vitmatte/configuration_vitmatte.py,sha256=znJb9kFDk2x-5qTzSNJXP-snhRz8aefBcjO3mJFYgxw,6443 +transformers/models/vitmatte/convert_vitmatte_to_hf.py,sha256=1xctm78nmCLelPMqGJepxSyq5saKgA4by5CTzyxRPvc,6404 +transformers/models/vitmatte/image_processing_vitmatte.py,sha256=xeHDZXC_dJIBwbCt93GZlIJSMtgCaKZTyni3TiITGl8,13844 +transformers/models/vitmatte/modeling_vitmatte.py,sha256=xD57245zAanRx2O_N3dVHj6GFY8Ju03rcZp7fbZyH0M,12824 +transformers/models/vits/__init__.py,sha256=JoVFhlJ0-hhxN3ND-JsESyEcsihDbT6j0WPmIH9DjCA,1887 +transformers/models/vits/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vits/__pycache__/configuration_vits.cpython-310.pyc,, +transformers/models/vits/__pycache__/convert_original_checkpoint.cpython-310.pyc,, +transformers/models/vits/__pycache__/modeling_vits.cpython-310.pyc,, +transformers/models/vits/__pycache__/tokenization_vits.cpython-310.pyc,, +transformers/models/vits/configuration_vits.py,sha256=CveTctkJe70Jj99XrxpEYZHtWHHRQDWKHOryGwWgkiA,13955 +transformers/models/vits/convert_original_checkpoint.py,sha256=N6rRzBaJlMxRwT7u33kUyJKy-4fFTWTD6nu_RTTOGt0,18610 +transformers/models/vits/modeling_vits.py,sha256=cAOA8IBL_kwvhlSl7hJuSAtQ8UrPXZFr-N9R_j4xmN0,66229 +transformers/models/vits/tokenization_vits.py,sha256=jpRsDM97USMwMqZOy_Uf5DriSRkRtc5wG4CcBGcjq_A,8951 +transformers/models/vivit/__init__.py,sha256=Ajx0pvLrGGMBJruIaFHvqJiQyAM9BI9qLRi-5kyRT10,2441 +transformers/models/vivit/__pycache__/__init__.cpython-310.pyc,, +transformers/models/vivit/__pycache__/configuration_vivit.cpython-310.pyc,, +transformers/models/vivit/__pycache__/convert_vivit_flax_to_pytorch.cpython-310.pyc,, +transformers/models/vivit/__pycache__/image_processing_vivit.cpython-310.pyc,, +transformers/models/vivit/__pycache__/modeling_vivit.cpython-310.pyc,, +transformers/models/vivit/configuration_vivit.py,sha256=RLmQO66YtE6Y7dTp0zNzoYHeW2r0ocYbEmXSIkd-U9E,5212 +transformers/models/vivit/convert_vivit_flax_to_pytorch.py,sha256=yIwLQOx8eT-8AuYf_3KTfLwabCBdC1z_Z0WZDr4a7mM,9111 +transformers/models/vivit/image_processing_vivit.py,sha256=T9A7bohqmXrFK3rab9KMhtXnIQpUoFHod5GMmZPQLGw,19552 +transformers/models/vivit/modeling_vivit.py,sha256=bOUA9ovkebm7dNsv0QRkKRfNHoNTGm0R7SyGoZGTYDY,29977 +transformers/models/wav2vec2/__init__.py,sha256=eN9LbGY56T2Kz38zw3ChsiOkOHprtc4CgQjT8DSrUds,4139 +transformers/models/wav2vec2/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/configuration_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/convert_wav2vec2_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/convert_wav2vec2_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/feature_extraction_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_flax_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_tf_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/modeling_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/processing_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/__pycache__/tokenization_wav2vec2.cpython-310.pyc,, +transformers/models/wav2vec2/configuration_wav2vec2.py,sha256=197c8uGexJC97Lzt9jFU5xzFdXCnDImJwSr81-LROdg,20149 +transformers/models/wav2vec2/convert_wav2vec2_original_pytorch_checkpoint_to_pytorch.py,sha256=hhc_QSStY43_pj4bIQf0TUWfiJo1KGkPuMTl16dP-ng,14293 +transformers/models/wav2vec2/convert_wav2vec2_original_s3prl_checkpoint_to_pytorch.py,sha256=CMjcWPEsvvPpX-OlMUJQxHNDErbJbDVqVSCoqo-9hDk,4838 +transformers/models/wav2vec2/feature_extraction_wav2vec2.py,sha256=D-yqFIpwjn_7LYJUmdnelRsn4qsoUrkZGX4Qsp5Y9CY,11511 +transformers/models/wav2vec2/modeling_flax_wav2vec2.py,sha256=iLm6d5m0LYQs0qKqg3Tdx7I6vgCB5QCmFY6MYrKu0RA,57331 +transformers/models/wav2vec2/modeling_tf_wav2vec2.py,sha256=D1izr4FsmoI_9NIDaQZ6M6AzPK7XvbMHG8zE00EUfRI,78708 +transformers/models/wav2vec2/modeling_wav2vec2.py,sha256=UmQRi14Gah0qPvR7pSLKc0rxNfD4hvJ4BxgVpkARKLQ,106697 +transformers/models/wav2vec2/processing_wav2vec2.py,sha256=82JBzFgQxV5ZQgRYmMj3gqf3pxL8Q8nfdwnhsuUUZjU,7137 +transformers/models/wav2vec2/tokenization_wav2vec2.py,sha256=ck1JrMUbJ06enzXRHaP-qFbAOWO6AdYUc1Bj116arFw,38384 +transformers/models/wav2vec2_bert/__init__.py,sha256=yBuhwgvNayh1tKpyXnLCSmw877fgVbtI16Xag8BK6Wo,2300 +transformers/models/wav2vec2_bert/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/configuration_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/convert_wav2vec2_seamless_checkpoint.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/modeling_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/__pycache__/processing_wav2vec2_bert.cpython-310.pyc,, +transformers/models/wav2vec2_bert/configuration_wav2vec2_bert.py,sha256=O38bgI-_nHVAjD7HZdazAit6da0SEdjEwE1Ow8TjaI4,18182 +transformers/models/wav2vec2_bert/convert_wav2vec2_seamless_checkpoint.py,sha256=MFwGdbwNt4jDlGDG6cc9T5PhKEd-PjFMUOci533PLG8,7420 +transformers/models/wav2vec2_bert/modeling_wav2vec2_bert.py,sha256=EJswXn0y88fJkdGBVn3NszOUGxF9nG_L-PMHhO4kepU,74577 +transformers/models/wav2vec2_bert/processing_wav2vec2_bert.py,sha256=DWMQCIdzOHFXFQA8ReGS-HLHfQYhUTpuj7jLMHZ8th0,7449 +transformers/models/wav2vec2_conformer/__init__.py,sha256=w6Z-Rd5ONNTFI-ioN5VvNPhW842-_rKASoHN6lGeJx4,2375 +transformers/models/wav2vec2_conformer/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/configuration_wav2vec2_conformer.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/convert_wav2vec2_conformer_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/__pycache__/modeling_wav2vec2_conformer.cpython-310.pyc,, +transformers/models/wav2vec2_conformer/configuration_wav2vec2_conformer.py,sha256=m9LH8bwBZl9DRD2bkVgrCsZsUPqAuGbqhzfvkSjrPMk,20961 +transformers/models/wav2vec2_conformer/convert_wav2vec2_conformer_original_pytorch_checkpoint_to_pytorch.py,sha256=D8rojgR8DRaqVTZwYXd2qykIKlKf7EnMM6h3PzYPS0M,13382 +transformers/models/wav2vec2_conformer/modeling_wav2vec2_conformer.py,sha256=V2x9vTprzKy6HUMucDeF6aNrXJQT1tQuS09Qd3yXZkg,95597 +transformers/models/wav2vec2_phoneme/__init__.py,sha256=E2xRyViyzCISV8XE7YQ1gx5Wlx9_ACoPDB6ZZEm9bWo,993 +transformers/models/wav2vec2_phoneme/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_phoneme/__pycache__/tokenization_wav2vec2_phoneme.cpython-310.pyc,, +transformers/models/wav2vec2_phoneme/tokenization_wav2vec2_phoneme.py,sha256=Ef4t56FJIAezlTc8x5hwqWwhjCXrfI7qL_-lC6ShIbI,23162 +transformers/models/wav2vec2_with_lm/__init__.py,sha256=d_lvk8QAia4BIKN7d_Uy3HdRqrDp_ZJHTDZ-nkHKwPA,981 +transformers/models/wav2vec2_with_lm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wav2vec2_with_lm/__pycache__/processing_wav2vec2_with_lm.cpython-310.pyc,, +transformers/models/wav2vec2_with_lm/processing_wav2vec2_with_lm.py,sha256=rB38_Sef9FlkFFd_AqJwbEraRdcp5wi1fNV1e7he7F8,29522 +transformers/models/wavlm/__init__.py,sha256=puMYnJLkFpkYKq7oH_ziapvzFYZMOyTHDqpN8IxzJPw,1959 +transformers/models/wavlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/configuration_wavlm.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/convert_wavlm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/convert_wavlm_original_s3prl_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/wavlm/__pycache__/modeling_wavlm.cpython-310.pyc,, +transformers/models/wavlm/configuration_wavlm.py,sha256=UqozZdZFuFUjrJlyZ9XyiMXnDZoG7QHMVH6Ct2O68-I,18634 +transformers/models/wavlm/convert_wavlm_original_pytorch_checkpoint_to_pytorch.py,sha256=tYQiS5CUNYoMWyxKnmkmDG6VW0lwapFxTrDSz4Pprm0,8580 +transformers/models/wavlm/convert_wavlm_original_s3prl_checkpoint_to_pytorch.py,sha256=Yo4K3ZxH5KXS3gCD7KTakUviJABV-gJGJHXFeV5Sc9I,4814 +transformers/models/wavlm/modeling_wavlm.py,sha256=CmZSbC2ajBx7QwhnqCoxb0atEcnXhFvUpBsje4qiG8E,78592 +transformers/models/whisper/__init__.py,sha256=Y9nksRYJ-dCwFFdnagINwcqEMrdRG7AtPKWRB4uXlmM,4346 +transformers/models/whisper/__pycache__/__init__.cpython-310.pyc,, +transformers/models/whisper/__pycache__/configuration_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/convert_openai_to_hf.cpython-310.pyc,, +transformers/models/whisper/__pycache__/english_normalizer.cpython-310.pyc,, +transformers/models/whisper/__pycache__/feature_extraction_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/generation_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_flax_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_tf_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/modeling_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/processing_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper.cpython-310.pyc,, +transformers/models/whisper/__pycache__/tokenization_whisper_fast.cpython-310.pyc,, +transformers/models/whisper/configuration_whisper.py,sha256=rIVNZdLcuKu9n6coSkJsRsu4ZRgiCvK6TVTAHeiPsik,17010 +transformers/models/whisper/convert_openai_to_hf.py,sha256=yKg38L_4ogsB_rM8Yti91pWeiuS-Qxq-AIqvzedK_bU,14965 +transformers/models/whisper/english_normalizer.py,sha256=MTJ16OhstprR2X8owfEJmONqkoSHHyzztENejmEhSBM,22822 +transformers/models/whisper/feature_extraction_whisper.py,sha256=KVHCcp-M_s1ibQJkauM6vxCAQmj_Gtw9w33LOq6MkP4,14347 +transformers/models/whisper/generation_whisper.py,sha256=ufmzrp1iDp2smu4Kx6tRlLYnuQgpG19nnUeGhg7FwSk,84051 +transformers/models/whisper/modeling_flax_whisper.py,sha256=s4sI__pmItZAAJxzmgU8f1jy3Dk4fAn9uGyy6TAaJnM,73587 +transformers/models/whisper/modeling_tf_whisper.py,sha256=vaRenYGUVvzdLZNYNHT73T_IPIpy524zeHCMkPaej5w,84868 +transformers/models/whisper/modeling_whisper.py,sha256=U1FJTN82j784XLlBcca2Uj0o4DcNYF5c8Qv4J_pH1v8,105522 +transformers/models/whisper/processing_whisper.py,sha256=pO6wtcywcJq-lkA2rNrdINEvj7_6fjWvAUv7HWn70gE,3891 +transformers/models/whisper/tokenization_whisper.py,sha256=IIiOLgu7JOKl0zxROi4sJsyN6ZU3MN0l3jsNXM2pdD4,54506 +transformers/models/whisper/tokenization_whisper_fast.py,sha256=2rlVcnHDxEwm6F2arkR8x_CAZUIc2PpdtAXO9qwvLvE,28841 +transformers/models/x_clip/__init__.py,sha256=zWhh0KIKf1OaB3EezBv6YkgaxTESvEesITGqhiZYgHs,2053 +transformers/models/x_clip/__pycache__/__init__.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/configuration_x_clip.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/convert_x_clip_original_pytorch_to_hf.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/modeling_x_clip.cpython-310.pyc,, +transformers/models/x_clip/__pycache__/processing_x_clip.cpython-310.pyc,, +transformers/models/x_clip/configuration_x_clip.py,sha256=6tzgPQVDl1473h_ffpqbhuqdFonlrnlw8RNyls-EA4o,20407 +transformers/models/x_clip/convert_x_clip_original_pytorch_to_hf.py,sha256=WzXe8IKqSz4Bi78EIvRA6C3QiLL4c-SpARggHjIWtt4,18066 +transformers/models/x_clip/modeling_x_clip.py,sha256=PvV64zMhStEsh3DYna4Es4ZGMOs5aqVATp94rAe_X-g,70185 +transformers/models/x_clip/processing_x_clip.py,sha256=vuwuN_pNagPMfdvGJrSbhQVTslOHBMGFgYV2xD9BHsw,6897 +transformers/models/xglm/__init__.py,sha256=gSzCOADmOA0n4CxfKEhESj32_WqQ6ae6e0QjYyaJ-gs,3871 +transformers/models/xglm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xglm/__pycache__/configuration_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/convert_xglm_original_ckpt_to_trfms.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_flax_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_tf_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/modeling_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm.cpython-310.pyc,, +transformers/models/xglm/__pycache__/tokenization_xglm_fast.cpython-310.pyc,, +transformers/models/xglm/configuration_xglm.py,sha256=2hhVobBTfmh_msU3wC7ZgJvdpOSAlyo83CcwR9Fld3U,5943 +transformers/models/xglm/convert_xglm_original_ckpt_to_trfms.py,sha256=9fjXP40nMFbiI9H0VV66Buqk9JQrPhAFERCOBYHl_7g,2325 +transformers/models/xglm/modeling_flax_xglm.py,sha256=5-ubc4mqp9vhZFUUcyy8FzwwbS_xHpIA6pWIC9keOcg,33117 +transformers/models/xglm/modeling_tf_xglm.py,sha256=CaD9Z4Vg3L-hV5st7Tf0yrSeggoEHs5aH4I2v7LpIyU,45376 +transformers/models/xglm/modeling_xglm.py,sha256=3AB9uJ08yuLM-lxwuYSo5ccaZK9wAwDaUV6pMPZRdpw,38697 +transformers/models/xglm/tokenization_xglm.py,sha256=YsFY375ftb3luEKiJI7UqrmRin2rByJhad6DOppocRI,12482 +transformers/models/xglm/tokenization_xglm_fast.py,sha256=7Cicl8Ilnoq_RbwK136hhQgBiL6-tf0h3vjxifUwAzE,7588 +transformers/models/xlm/__init__.py,sha256=tYpOIDQrMDWgJJ-OTPmX2NZngDrxqo47NRfA1dyNQgY,3292 +transformers/models/xlm/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm/__pycache__/configuration_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/convert_xlm_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlm/__pycache__/modeling_tf_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/modeling_xlm.cpython-310.pyc,, +transformers/models/xlm/__pycache__/tokenization_xlm.cpython-310.pyc,, +transformers/models/xlm/configuration_xlm.py,sha256=1JiLuR2V2BNnh_wbuT9rNGP5r96PlyUa_zhGaLpHCqw,11105 +transformers/models/xlm/convert_xlm_original_pytorch_checkpoint_to_pytorch.py,sha256=R2wBMzp-IIiBhTOHrgYacy3bX79BN1dh_DdHcO7fE1Y,2934 +transformers/models/xlm/modeling_tf_xlm.py,sha256=OA5OnPerp84VrehstTjbHBTsEgnbqpHs_hutVY1YkCY,56510 +transformers/models/xlm/modeling_xlm.py,sha256=KuCcnqkr_qocGTJGL7Ylq1O2pnP0uDcNwn_wvAutqaA,54772 +transformers/models/xlm/tokenization_xlm.py,sha256=Q1Wtl0sHLqYwC6T5AK4qFt7PqMWzZxkS417X9V8Muvk,24448 +transformers/models/xlm_prophetnet/__init__.py,sha256=_YI-mEgntKjkMoW1RztiRlYdwvonIVpmO2ZQjm6Gezc,2615 +transformers/models/xlm_prophetnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_prophetnet/__pycache__/configuration_xlm_prophetnet.cpython-310.pyc,, +transformers/models/xlm_prophetnet/__pycache__/modeling_xlm_prophetnet.cpython-310.pyc,, +transformers/models/xlm_prophetnet/__pycache__/tokenization_xlm_prophetnet.cpython-310.pyc,, +transformers/models/xlm_prophetnet/configuration_xlm_prophetnet.py,sha256=qJmRUY1xj-yXQWQKKpt6unyn6CvY5PMC07yQo5-v_vo,9022 +transformers/models/xlm_prophetnet/modeling_xlm_prophetnet.py,sha256=BlLGRuSxuEc0rJn8_x7NAnRwcMW0T17H9OdaoF92VM0,119413 +transformers/models/xlm_prophetnet/tokenization_xlm_prophetnet.py,sha256=Pr5C0xf5mHrUV3ICfzBzOSmUOieBWxSUdzviGJeVxBE,13270 +transformers/models/xlm_roberta/__init__.py,sha256=Uhk9z5Xv2w8KrHfe0Hzc5ndpgmn5k6_dcZw6OCWye1A,5825 +transformers/models/xlm_roberta/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/configuration_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_flax_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_tf_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/modeling_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta.cpython-310.pyc,, +transformers/models/xlm_roberta/__pycache__/tokenization_xlm_roberta_fast.cpython-310.pyc,, +transformers/models/xlm_roberta/configuration_xlm_roberta.py,sha256=Xm1j7VzQay7AQknB0V-ddFyNQI8_5xPKVFio0vB5hRc,7617 +transformers/models/xlm_roberta/modeling_flax_xlm_roberta.py,sha256=ryQSpeUg7--uLV--jn71z6j6cN6_H4X0T9ZwokhcUC4,58553 +transformers/models/xlm_roberta/modeling_tf_xlm_roberta.py,sha256=onbEA6X_HSc7T05WkwXYTZj85bEL0GOD2zW1zuXn4E4,81930 +transformers/models/xlm_roberta/modeling_xlm_roberta.py,sha256=nDtRHKoLSvo2Ig1up_-Nu29FLYJqT-e4YvFSxO_58LE,72881 +transformers/models/xlm_roberta/tokenization_xlm_roberta.py,sha256=zs0jtuN2rWdeTq_LNge0rM3b6L5eXxik3UajWKQcoH8,12707 +transformers/models/xlm_roberta/tokenization_xlm_roberta_fast.py,sha256=LB8BJZiCSS5YDwCcxR7uB_5PLf_ROPWV2sTj_xoicfo,7922 +transformers/models/xlm_roberta_xl/__init__.py,sha256=Q3eFSJ5cKAt-2cJLXKdWW28TLujRqjebIBzlqSvK0U4,2405 +transformers/models/xlm_roberta_xl/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/configuration_xlm_roberta_xl.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/convert_xlm_roberta_xl_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/__pycache__/modeling_xlm_roberta_xl.cpython-310.pyc,, +transformers/models/xlm_roberta_xl/configuration_xlm_roberta_xl.py,sha256=JeVuxCicCGlzPIBIDyAntpHnZ2SDMubwaJKnwWvlKtw,7369 +transformers/models/xlm_roberta_xl/convert_xlm_roberta_xl_original_pytorch_checkpoint_to_pytorch.py,sha256=zVa6azx9rd33D3JkH2uqJ6W20TosJyWi9eLm3LNtc5U,8228 +transformers/models/xlm_roberta_xl/modeling_xlm_roberta_xl.py,sha256=Gy4ZVXeVFlXrg7AMYTBLf8SzqSnH0KZSGZdI9TH2JhA,68996 +transformers/models/xlnet/__init__.py,sha256=-jvIW4RkN8qTjJPEEmIvK6pO8c9NB0Q4JlzY7CWHWUI,4288 +transformers/models/xlnet/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/configuration_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/convert_xlnet_original_tf_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/modeling_tf_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/modeling_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet.cpython-310.pyc,, +transformers/models/xlnet/__pycache__/tokenization_xlnet_fast.cpython-310.pyc,, +transformers/models/xlnet/configuration_xlnet.py,sha256=UkS71BwRCEE9DaX8iaNku3oOFYYuS4O3orBVmoA-ZYE,11023 +transformers/models/xlnet/convert_xlnet_original_tf_checkpoint_to_pytorch.py,sha256=iodIP1W2FNMjel9V31jR7RcHqs8zGX8TK3YdQ65lEbk,3688 +transformers/models/xlnet/modeling_tf_xlnet.py,sha256=CYAX9HUghU-ALClD9tb54x1q04DdcJLnyAJ-KMw-Fsc,77705 +transformers/models/xlnet/modeling_xlnet.py,sha256=Ioir1-wykC8dZZjU65JZVUTZxt7Ka-DpOkUFWKHVi5I,92938 +transformers/models/xlnet/tokenization_xlnet.py,sha256=L1LK5KSQu-_SYbMiHY3YVjRe8mN4RwpzbLce8ZGfTMI,15703 +transformers/models/xlnet/tokenization_xlnet_fast.py,sha256=B9tuPW-QKQeHEZKZcdiUm0cvWQHjpdQsikmR5nQbYro,9366 +transformers/models/xmod/__init__.py,sha256=uoKu7ACrFCEwDUwL06kwYCcUbHt9P3bLIcHLtMtjw-I,2325 +transformers/models/xmod/__pycache__/__init__.cpython-310.pyc,, +transformers/models/xmod/__pycache__/configuration_xmod.cpython-310.pyc,, +transformers/models/xmod/__pycache__/convert_xmod_original_pytorch_checkpoint_to_pytorch.cpython-310.pyc,, +transformers/models/xmod/__pycache__/modeling_xmod.cpython-310.pyc,, +transformers/models/xmod/configuration_xmod.py,sha256=kRuyyFhh55KorV0Yjxn7OIQnzr05lFFbUHJxMp8Vv6Q,9206 +transformers/models/xmod/convert_xmod_original_pytorch_checkpoint_to_pytorch.py,sha256=yFSAtXjxbAy6uXBg2XinRbk3VSEBOsWj1ugBhVNrGjQ,9859 +transformers/models/xmod/modeling_xmod.py,sha256=SH9MjgN2bUIF7y530NXoQt02YvSpT04n6u_UknRIUu4,76287 +transformers/models/yolos/__init__.py,sha256=DwUvf4HvS249i-g_ykayoDwxJnO7yH4pUJ7UhDE36iY,2400 +transformers/models/yolos/__pycache__/__init__.cpython-310.pyc,, +transformers/models/yolos/__pycache__/configuration_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/convert_yolos_to_pytorch.cpython-310.pyc,, +transformers/models/yolos/__pycache__/feature_extraction_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/image_processing_yolos.cpython-310.pyc,, +transformers/models/yolos/__pycache__/modeling_yolos.cpython-310.pyc,, +transformers/models/yolos/configuration_yolos.py,sha256=xaESSqrvFRXeIwA77KZxUD7qjhfStVwooJ6q6ry9p8A,7669 +transformers/models/yolos/convert_yolos_to_pytorch.py,sha256=g9sI7E-yfoyuXLc2OlN5bFxkc6ZTM243T1Wi8eUwnT0,11259 +transformers/models/yolos/feature_extraction_yolos.py,sha256=0ebN1Be4y86C2yyN2rMQ9AbguEDjcQ7fkabropUpwcs,1481 +transformers/models/yolos/image_processing_yolos.py,sha256=dDa1FAxtaZ2-R1AOTNAOxXfoTwnHWyu7-Pk-NeaCR70,63183 +transformers/models/yolos/modeling_yolos.py,sha256=4p4u0SKMQ2ax6WX0r1YGrsBLLq-QtnX933I34F7JPEg,58511 +transformers/models/yoso/__init__.py,sha256=oV8Bo29EwsQRWVZy2nIaea2ArpOnhkENfp0nFfSKcB4,2074 +transformers/models/yoso/__pycache__/__init__.cpython-310.pyc,, +transformers/models/yoso/__pycache__/configuration_yoso.cpython-310.pyc,, +transformers/models/yoso/__pycache__/convert_yoso_pytorch_to_pytorch.cpython-310.pyc,, +transformers/models/yoso/__pycache__/modeling_yoso.cpython-310.pyc,, +transformers/models/yoso/configuration_yoso.py,sha256=-HIYc9aPMfMxkOwSL8LgT08TOdIhL_Oy9IvTCuVayz8,6785 +transformers/models/yoso/convert_yoso_pytorch_to_pytorch.py,sha256=VjPOSLINfkiaHx8M3dTNMdC8hXh3M1yyhIQ9t4Vzqk0,4115 +transformers/models/yoso/modeling_yoso.py,sha256=byuWK88BLAkNntMf8X1a4aka7tgCrK56pixQeMAIKVs,54720 +transformers/onnx/__init__.py,sha256=wALLY4TPOK2iPrFcfZf_WiEmTRAU6dAWHElxGdexr58,1548 +transformers/onnx/__main__.py,sha256=JZ9ZmeRsnDitwTMWb-dFT8W9AEmMoMKLQ3SvbyCkY0w,9497 +transformers/onnx/__pycache__/__init__.cpython-310.pyc,, +transformers/onnx/__pycache__/__main__.cpython-310.pyc,, +transformers/onnx/__pycache__/config.cpython-310.pyc,, +transformers/onnx/__pycache__/convert.cpython-310.pyc,, +transformers/onnx/__pycache__/features.cpython-310.pyc,, +transformers/onnx/__pycache__/utils.cpython-310.pyc,, +transformers/onnx/config.py,sha256=zPDgC_HSLmMeqPkcLv_Y8EfbfLLEDLqPrvrfQCRyhl8,32556 +transformers/onnx/convert.py,sha256=ZSh9jQE6B6cCxhlSbKLHxNmj48HkXXdl-HF7iGtZy5k,19369 +transformers/onnx/features.py,sha256=GSuwZj760THxAkDmJYROt43La0GaY-bA19j2bE-XYVI,28264 +transformers/onnx/utils.py,sha256=39Uw_GkFBsTb6ZvMIHRTnI289aQDhc6hwfEapaBGE-o,3625 +transformers/optimization.py,sha256=SxPwa3FVqwNB2BSxVThnZhB5Hiu3bW3h-897ea1S-Jw,36329 +transformers/optimization_tf.py,sha256=HCVXeXok1IdVtFxO_SodBQ2TAvfkF_YkhdU7hXuy9Dg,16855 +transformers/pipelines/__init__.py,sha256=eeKQJ-QECHkGFCys8BCneUpkZ80Q0rBqqgF-YQnpHb0,51248 +transformers/pipelines/__pycache__/__init__.cpython-310.pyc,, +transformers/pipelines/__pycache__/audio_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/audio_utils.cpython-310.pyc,, +transformers/pipelines/__pycache__/automatic_speech_recognition.cpython-310.pyc,, +transformers/pipelines/__pycache__/base.cpython-310.pyc,, +transformers/pipelines/__pycache__/conversational.cpython-310.pyc,, +transformers/pipelines/__pycache__/depth_estimation.cpython-310.pyc,, +transformers/pipelines/__pycache__/document_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/feature_extraction.cpython-310.pyc,, +transformers/pipelines/__pycache__/fill_mask.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_feature_extraction.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_segmentation.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_to_image.cpython-310.pyc,, +transformers/pipelines/__pycache__/image_to_text.cpython-310.pyc,, +transformers/pipelines/__pycache__/mask_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/object_detection.cpython-310.pyc,, +transformers/pipelines/__pycache__/pt_utils.cpython-310.pyc,, +transformers/pipelines/__pycache__/question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/table_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/text2text_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_generation.cpython-310.pyc,, +transformers/pipelines/__pycache__/text_to_audio.cpython-310.pyc,, +transformers/pipelines/__pycache__/token_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/video_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/visual_question_answering.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_audio_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_image_classification.cpython-310.pyc,, +transformers/pipelines/__pycache__/zero_shot_object_detection.cpython-310.pyc,, +transformers/pipelines/audio_classification.py,sha256=bWia-wQ7hNfj0RsR7BuG7Yq_B1-Vwka7E-xVVAZB820,8821 +transformers/pipelines/audio_utils.py,sha256=x5JXEWedeMlYcz32JS5HLWBTpy0FPXJvCns_WnXYOnA,9137 +transformers/pipelines/automatic_speech_recognition.py,sha256=pXpTGtfRCdoLAeRRa-IwOnr9lmLviBdmkFoSa08lhHo,37956 +transformers/pipelines/base.py,sha256=ybBFbzYKKzaDvrMZOJTAuRlZRiw97n8TbgH741YcCBA,56728 +transformers/pipelines/conversational.py,sha256=WCbEcYS1Rejwaa_IwGLEIw5FrsRr01FtCQUuV9yPgiI,14730 +transformers/pipelines/depth_estimation.py,sha256=cghYx32OHn4xlqFSlzQ8ryA8fyDC7dt6c-X3ll8xEkA,4477 +transformers/pipelines/document_question_answering.py,sha256=_2lGgDvlwapWVR11a8L4RUIAI23wfjfhF-d2qXX_Xc8,23553 +transformers/pipelines/feature_extraction.py,sha256=Ar_hPljY1Fa_xAsRYX4cCCss1vf-iC5uuKYHp3rejd0,3374 +transformers/pipelines/fill_mask.py,sha256=jnZMK5aZyxlttXtzUISh3ZgvbcI7dIj-nB3Fk37N7Qw,11634 +transformers/pipelines/image_classification.py,sha256=VZgMpoN0Q0wVvdRRSVkzn_B_B6BonvgUA3-ptjVl6w0,8591 +transformers/pipelines/image_feature_extraction.py,sha256=KGFNi5skdOd9bc9GXDBBiqzTPpW986keROZFIMw2-ms,4636 +transformers/pipelines/image_segmentation.py,sha256=ABQM2DBouXYAqQyvofMvybwcVLRdM-YqrHsM6yKJf_s,9124 +transformers/pipelines/image_to_image.py,sha256=phQzbKf01swnGcSfWcm3dQ4ZMrxIW99s8_HTQj533ts,4938 +transformers/pipelines/image_to_text.py,sha256=w46iSfXuDXhkv-hKJE_P2TBKWwEvysdEf6cfXuHlZQs,7996 +transformers/pipelines/mask_generation.py,sha256=kJtIjpCHPouBeLD88JpSV1lROXLctgY7Bqy3XFJ_Jj0,13108 +transformers/pipelines/object_detection.py,sha256=TFPHpG6u1cdxvvM_XEv7eXo79KV8_aobOuRsh47IBpM,7931 +transformers/pipelines/pt_utils.py,sha256=tVI0GBVS7wbsbDEWeFLKju7fVG7I5-xVscndq-ykRko,12768 +transformers/pipelines/question_answering.py,sha256=BMmqntQHVdDukTmluGTKYnZnfbcy8EKYZE31nmaE06U,29886 +transformers/pipelines/table_question_answering.py,sha256=cq-xxL2izvKZIDHlCuFkKsiqmXEe37KyO4YeYZyCqQA,19830 +transformers/pipelines/text2text_generation.py,sha256=XfaCd5zKtAWfmZEIi8FfWFGWDqkbBoFliJw73kw6Q2c,17230 +transformers/pipelines/text_classification.py,sha256=PLnCk29dHn8J_wYpQPMUTDRaBKUUBw-4JnEKHLS--fM,10471 +transformers/pipelines/text_generation.py,sha256=FDRrlJP3bV1Q96B6RoDHQ1kfXx660Q5G7_Z9hqbX7xI,18230 +transformers/pipelines/text_to_audio.py,sha256=XoYuTtUWYXTJGwjlcQO74Tp2fWsXBEA6u3eRyGevWVo,8345 +transformers/pipelines/token_classification.py,sha256=nw-DEE_Pw8gZHjYi3xAONcLcAIQikgwrJRpchq6PxtU,26713 +transformers/pipelines/video_classification.py,sha256=m8jtUP4_0F1HtNEyjcabAVKa5WrWTzagAVP5JM1DH1s,5398 +transformers/pipelines/visual_question_answering.py,sha256=Ukk93_x3hqhtiL9g0c7kPtaPziLMkODQrn-_NXu9p_4,6817 +transformers/pipelines/zero_shot_audio_classification.py,sha256=2aNgax0toCNMP6r2JuFNl6ytOgU3glFJ_v5EuhFf7vg,6711 +transformers/pipelines/zero_shot_classification.py,sha256=WLgjtF0fOEaCiQb9QUu9vcNfJLP9M5nRnJGTgXgRKKU,12347 +transformers/pipelines/zero_shot_image_classification.py,sha256=gTo4C1fMa_Ljhing7OMUf7pxX7NH8Wert-tO-2CRybY,6844 +transformers/pipelines/zero_shot_object_detection.py,sha256=MBs9217WUE3Fc_Jdc-gOtO2i38B3-2yVxAsnlXaVyks,9472 +transformers/processing_utils.py,sha256=-gERmK7YE0yI0gA_Xw1A0-E5ZWM2-V2kTKTmZ1P3OtI,22729 +transformers/pytorch_utils.py,sha256=MQrkW99x_iymGVpZbqO30RGRCEvGiU-IM-TuDdAvDwE,11856 +transformers/quantizers/__init__.py,sha256=hCprQnoI20-O1FSMSRgD-P9_NKEzN7kEfY66_BrQxz0,699 +transformers/quantizers/__pycache__/__init__.cpython-310.pyc,, +transformers/quantizers/__pycache__/auto.cpython-310.pyc,, +transformers/quantizers/__pycache__/base.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_aqlm.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_awq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_4bit.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_bnb_8bit.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_gptq.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizer_quanto.cpython-310.pyc,, +transformers/quantizers/__pycache__/quantizers_utils.cpython-310.pyc,, +transformers/quantizers/auto.py,sha256=rhAGKewmFRPnrGovnytUwUoOEhxoSuBBn3HKDz1wJ7Y,6851 +transformers/quantizers/base.py,sha256=auFO3aXVfp3ztN3Hg8fh_YhGyQvy5IbHxXioBCBS3TY,9145 +transformers/quantizers/quantizer_aqlm.py,sha256=cBO72I147eZzE_9W041rcY1GB5FCcxd3_ynKPGFbZWc,3681 +transformers/quantizers/quantizer_awq.py,sha256=-F7nKTAk3RbrOZMoXMUDM5PUiCoq8YRffEwEQrDEb_s,5137 +transformers/quantizers/quantizer_bnb_4bit.py,sha256=vnDjztTP4CpDXjer_rE1EnXrlQmizXYDfIBM7SKAFBM,14574 +transformers/quantizers/quantizer_bnb_8bit.py,sha256=X2nEGlEj6RYLvkmBDCN8srb8jMMFCAIdBMRVgvTcUO0,13000 +transformers/quantizers/quantizer_gptq.py,sha256=ZWNQY2WF6mzUV-SwYg1PZIM0kZ3JJyYGe3gF2mZcZ58,3878 +transformers/quantizers/quantizer_quanto.py,sha256=-qIlgLelWfk3ExYnWBul2z2L7B9Nw7ijxgDmC8g3Ygw,7847 +transformers/quantizers/quantizers_utils.py,sha256=6bgmf8mLxow6gXonTFX7PLfqFsf6plUj7DOeXnXhwMM,1066 +transformers/safetensors_conversion.py,sha256=1998DSxTsl99crqBPfNXRq7Xb6ABRc5Ts4R5oxUM9p0,4570 +transformers/sagemaker/__init__.py,sha256=fKtKAHamz_CLL9jPGCa2E-1n8RmuS-58qGtzZuKc3qg,730 +transformers/sagemaker/__pycache__/__init__.cpython-310.pyc,, +transformers/sagemaker/__pycache__/trainer_sm.cpython-310.pyc,, +transformers/sagemaker/__pycache__/training_args_sm.cpython-310.pyc,, +transformers/sagemaker/trainer_sm.py,sha256=7GsKLtjdMfKp98OwHD7RcBsl745OOwHAaBswkfLkfsE,1044 +transformers/sagemaker/training_args_sm.py,sha256=4ZnQhITfMwT0y2Y2MvkI11PEB_yfTX5Z7WrPKt0VXD8,5389 +transformers/testing_utils.py,sha256=fUUcoPYpuvCIN2WaLyokAWhgK8bm_ZuOU_eKi2CZGJ8,83993 +transformers/tf_utils.py,sha256=9TlTj8qlWobJ0e-lNx47m3Pu1eDY6S6dm5AIIekyNtw,10091 +transformers/time_series_utils.py,sha256=LjOgIvLmP0v6fJoqGo8lCD1kr3sXx9O_jmI-qJejtPU,7520 +transformers/tokenization_utils.py,sha256=SuyV-6xCXMhOqDdXExtGeXWUkjWt4gV3fz3PWjbjkuA,44595 +transformers/tokenization_utils_base.py,sha256=V30CjRRHJfokRpeOLvdRdPZADs7pweZuL2-zAurCmBk,199241 +transformers/tokenization_utils_fast.py,sha256=tpErvsUzI0RSiZJJtdmi7LbEuIltXnul9FrhAFCuIoM,37523 +transformers/tools/__init__.py,sha256=hI6M7zNUTyRE3BiZtL1VM8CcpYqxTrFR7lS0U6T7InM,2955 +transformers/tools/__pycache__/__init__.cpython-310.pyc,, +transformers/tools/__pycache__/agent_types.cpython-310.pyc,, +transformers/tools/__pycache__/agents.cpython-310.pyc,, +transformers/tools/__pycache__/base.cpython-310.pyc,, +transformers/tools/__pycache__/document_question_answering.cpython-310.pyc,, +transformers/tools/__pycache__/evaluate_agent.cpython-310.pyc,, +transformers/tools/__pycache__/image_captioning.cpython-310.pyc,, +transformers/tools/__pycache__/image_question_answering.cpython-310.pyc,, +transformers/tools/__pycache__/image_segmentation.cpython-310.pyc,, +transformers/tools/__pycache__/prompts.cpython-310.pyc,, +transformers/tools/__pycache__/python_interpreter.cpython-310.pyc,, +transformers/tools/__pycache__/speech_to_text.cpython-310.pyc,, +transformers/tools/__pycache__/text_classification.cpython-310.pyc,, +transformers/tools/__pycache__/text_question_answering.cpython-310.pyc,, +transformers/tools/__pycache__/text_summarization.cpython-310.pyc,, +transformers/tools/__pycache__/text_to_speech.cpython-310.pyc,, +transformers/tools/__pycache__/translation.cpython-310.pyc,, +transformers/tools/agent_types.py,sha256=6ZVzmPwWiMtJXKUZ33fKzfUFp-v_qfI901MKj2pbQRY,9093 +transformers/tools/agents.py,sha256=1t7eUTYriK4jIQMFcJvtYzsivDR3XEkeaFv_LcFVhCo,30737 +transformers/tools/base.py,sha256=L7OBvSj233hqZmuwn3R0Xfz7naTtWbbZrXxs8v1Rj7s,30612 +transformers/tools/document_question_answering.py,sha256=7qSMr0fQYadiGOoVMXNrImls3_O-hcdDbLrlSc3cvxU,3337 +transformers/tools/evaluate_agent.py,sha256=JvMKk9NoJLZTRnY_VAC_cSHWAO-Rx-Dl8Vt31kpBbfw,24721 +transformers/tools/image_captioning.py,sha256=x1PfWpDozWSZuue633XwEPPBTr_zEX9mgrYar-8LqXQ,1745 +transformers/tools/image_question_answering.py,sha256=UNOzIcmkckh1W1bqlj31h61eXGAZ1TZ831iqytyO4NQ,1969 +transformers/tools/image_segmentation.py,sha256=1BbHSYTz3q8DlTMHBnKdibp7JCHZydPdNoyl7TObfN8,2103 +transformers/tools/prompts.py,sha256=1YXY_A5Zfyd_rudKzB4ShQ9OR_E5bHeh9bcgBVt1ltQ,1558 +transformers/tools/python_interpreter.py,sha256=aSn1bnuQT9_xteXNcJdlmi39IzX1FZRqSaoGEQRS-PE,9999 +transformers/tools/speech_to_text.py,sha256=m3LCJxMpJykL9aD8rZ4H3ROGtt59LcLozw-6963XjCE,1482 +transformers/tools/text_classification.py,sha256=snyBoLTERnfl7YKKAgZctWhow6sEXQdS4bcWYUxJnyU,2475 +transformers/tools/text_question_answering.py,sha256=mGO3E0nL71Jzn4jeC2_RgLRDtuqbld77mQ2T7jw4aPc,1967 +transformers/tools/text_summarization.py,sha256=-8TY4P4LL4c7bQcD9y8Vi5Rfiaw8nAiY_aP5yXicq_g,1691 +transformers/tools/text_to_speech.py,sha256=vuJU2dC2d5J1kVdGjSBJCBdsTiOli2J7OabAposOFfA,2424 +transformers/tools/translation.py,sha256=fu05jVYbJUFmNvmwd4mjQOqzGt1JSy6QbpuAd2uChOE,8493 +transformers/trainer.py,sha256=RN4aGF7cV0fcmGLHaawobOmnAbDxLOueYCYMQdUgz2g,213748 +transformers/trainer_callback.py,sha256=apM_43C6-iJAl2TPmjS-8pFNvQQyp5yLLBbtCfPNWII,25174 +transformers/trainer_pt_utils.py,sha256=SjbjchrwMLfKKdezWApvgLGMieDcRaMqSjIsY7mgSHc,58875 +transformers/trainer_seq2seq.py,sha256=6oSCG9GlQmUBpasw3nFI_ngF6KCrxPixL91ob7CQMCk,17240 +transformers/trainer_utils.py,sha256=B0VzRksgwpXUVAbx56aacfEY-WSnpg1lo_rou2qTaEg,30861 +transformers/training_args.py,sha256=SfRhYwHUHBn3cjQDSSMZWrPAPbe7NaCr4MOguOzXSoM,145727 +transformers/training_args_seq2seq.py,sha256=k8qyPQAo5GWlcToN3tnzW7dE4xyP7i7HRjP_sgxlllA,4308 +transformers/training_args_tf.py,sha256=esUsNAj6kNNMu1LJLxfELJAJiTq7HD6fHz3GvI_mKJg,14570 +transformers/utils/__init__.py,sha256=VevayYs3KBHWmrALLFXXFF9P3ZMoGKlgJmmGeqtAqds,7656 +transformers/utils/__pycache__/__init__.cpython-310.pyc,, +transformers/utils/__pycache__/backbone_utils.cpython-310.pyc,, +transformers/utils/__pycache__/bitsandbytes.cpython-310.pyc,, +transformers/utils/__pycache__/constants.cpython-310.pyc,, +transformers/utils/__pycache__/doc.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_detectron2_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_flax_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_keras_nlp_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_music_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_pt_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_and_tokenizers_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_sentencepiece_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_speech_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tensorflow_text_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tf_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_tokenizers_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_torchaudio_objects.cpython-310.pyc,, +transformers/utils/__pycache__/dummy_vision_objects.cpython-310.pyc,, +transformers/utils/__pycache__/fx.cpython-310.pyc,, +transformers/utils/__pycache__/generic.cpython-310.pyc,, +transformers/utils/__pycache__/hp_naming.cpython-310.pyc,, +transformers/utils/__pycache__/hub.cpython-310.pyc,, +transformers/utils/__pycache__/import_utils.cpython-310.pyc,, +transformers/utils/__pycache__/logging.cpython-310.pyc,, +transformers/utils/__pycache__/model_parallel_utils.cpython-310.pyc,, +transformers/utils/__pycache__/notebook.cpython-310.pyc,, +transformers/utils/__pycache__/peft_utils.cpython-310.pyc,, +transformers/utils/__pycache__/quantization_config.cpython-310.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2.cpython-310.pyc,, +transformers/utils/__pycache__/sentencepiece_model_pb2_new.cpython-310.pyc,, +transformers/utils/__pycache__/versions.cpython-310.pyc,, +transformers/utils/backbone_utils.py,sha256=ruggZsHu9IJ3IVPa4Dvvvqx9Sj1mB-8P24C2VV7RPTo,16309 +transformers/utils/bitsandbytes.py,sha256=LzOKwcHWAxxZZv-7Ts9Q0vlEYvHd18affVgVbiR3Tzs,1040 +transformers/utils/constants.py,sha256=sZsUwOnA3CbtN1svs9YoaNLTTsAc9RVaITsgpf8K4iI,282 +transformers/utils/doc.py,sha256=eObKDEpC1z-05BNXHi1hYNjQMPsWSN1SNMa7IFkRmN8,40737 +transformers/utils/dummy_detectron2_objects.py,sha256=n7Pt_7sbVBNfohKGcOARB-ZcPcJRbjEAcoLd2vTXndU,340 +transformers/utils/dummy_essentia_and_librosa_and_pretty_midi_and_scipy_and_torch_objects.py,sha256=n6pY4s7zCII3dzo7Ejd0RviHa_pMateuDEwbbHgsTUY,902 +transformers/utils/dummy_flax_objects.py,sha256=ANFq3CYhCByAWqcFIY2z-DzVNizlaH6oGSMX0XmIz_Y,33561 +transformers/utils/dummy_keras_nlp_objects.py,sha256=AVWt2orICCUXi754bkavvqPzYO91PjER-FlUZAw2jZc,294 +transformers/utils/dummy_music_objects.py,sha256=1lxIebYUOdHJWMQ_T5IQgPgcO_wp_8YM_HGc3skuGVg,458 +transformers/utils/dummy_pt_objects.py,sha256=ztkcIY5tn6C537NhVQXj9JPIOa1hsp0g8K2Pkc4WV0U,232487 +transformers/utils/dummy_sentencepiece_and_tokenizers_objects.py,sha256=BgPLr8Wz8A-17K86x04N21CKXtWNQLJEWx2c4aZRqaA,286 +transformers/utils/dummy_sentencepiece_objects.py,sha256=KcSrwciSpiurqsxBoR34G5NuSrc2Clf1Q7N_CjanBlc,6455 +transformers/utils/dummy_speech_objects.py,sha256=9eFm1cjdsYOPBoAz9JTgP35Bg8WF2C9AZ_y1hFpKZdQ,465 +transformers/utils/dummy_tensorflow_text_objects.py,sha256=43V0IA2kb9gtuL0S1OL1eRFFxzQwKg4pPjMVuXUB5qg,306 +transformers/utils/dummy_tf_objects.py,sha256=7zY-UmprSrqdj16liMGgtfhXEnDPaOn6QGBW267EG5o,67955 +transformers/utils/dummy_tokenizers_objects.py,sha256=2Zywdoz7Nr1rA8fLFCx4F-JaKAcoSHBszpCFGuuAyAU,11456 +transformers/utils/dummy_torchaudio_objects.py,sha256=9A7Y4643_hTaqqZKlL-O524wRnrmNtODxisuDdO_7kU,488 +transformers/utils/dummy_vision_objects.py,sha256=xOxbsDSXdr172U3VwEh7VsQkvEuQBzunOUqMiozT-Bs,14893 +transformers/utils/fx.py,sha256=RuII5DVdwVWjmgEDF-0g57qpOpqwaPQhkuYn_oFVEd0,50589 +transformers/utils/generic.py,sha256=uIaZJ203H2zJOeEO4HI5W2SUyMunoV3Sr22voTONQ4s,23946 +transformers/utils/hp_naming.py,sha256=vqcOXcDOyqbISWo8-ClUJUOBVbZM1h08EcymTwcRthc,4979 +transformers/utils/hub.py,sha256=GvdfxYlPkPKr5yIXqv_wPBjAbC9gW8A-DvlTiQIbpLs,55844 +transformers/utils/import_utils.py,sha256=9pIoFqqYO7tSNAVMAl-ijjpHOONoczICu21nFGehWAE,53250 +transformers/utils/logging.py,sha256=X6FDZSn9Vbo81QHn80TGVsk9LGHe4OdWDCTCnCF5V7A,11609 +transformers/utils/model_parallel_utils.py,sha256=XbGU9IlFF59K_aplRxUGVnTfIZ9mpbLomKqQ08ooTew,2272 +transformers/utils/notebook.py,sha256=PiEiHpfuqxd3M1U3MPD8bmeO8bvtTbLfOxnL-cZWHQY,15558 +transformers/utils/peft_utils.py,sha256=as1XSRYa4-skewnlVom74qb-vgoZkGJtcXeNEUndAlo,5217 +transformers/utils/quantization_config.py,sha256=gnOvIE3HK3mha8PODRXV5-pnQMg0W5V12opy_rKmmLA,42093 +transformers/utils/sentencepiece_model_pb2.py,sha256=XiQs9uMEusfAZP6t6IBuTTX9yl7LiOyJEi7Ib-Wzmq0,50677 +transformers/utils/sentencepiece_model_pb2_new.py,sha256=FwTW0nkCiPCErmGk0s27BniKmkORcfnNk-w7NBGkCuA,6621 +transformers/utils/versions.py,sha256=C-Tqr4qGSHH64ygIBCSo8gA6azz7Dbzh8zdc_yjMkX8,4337 diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/WHEEL b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..57e3d840d59a650ac5bccbad5baeec47d155f0ad --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/entry_points.txt b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a7995ed6f21261a78509c57d57daba51ecf1a7d --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +transformers-cli = transformers.commands.transformers_cli:main diff --git a/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..976a2b1f3998279c10c413279a095be86bf69167 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/transformers-4.40.1.dist-info/top_level.txt @@ -0,0 +1 @@ +transformers diff --git a/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/INSTALLER b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/LICENSE b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..dcec4760996be2143d28a12718adc8f9e050bf29 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2016, Gregory Szorc +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/METADATA b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..d7be705bd9e275884a7c1ead91178b093b16afe5 --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/METADATA @@ -0,0 +1,61 @@ +Metadata-Version: 2.1 +Name: zstandard +Version: 0.22.0 +Summary: Zstandard bindings for Python +Home-page: https://github.com/indygreg/python-zstandard +Author: Gregory Szorc +Author-email: gregory.szorc@gmail.com +License: BSD +Keywords: zstandard,zstd,compression +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: C +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.8 +License-File: LICENSE +Requires-Dist: cffi >=1.11 ; platform_python_implementation == "PyPy" +Provides-Extra: cffi +Requires-Dist: cffi >=1.11 ; extra == 'cffi' + +================ +python-zstandard +================ + +| |ci-test| |ci-wheel| |ci-typing| |ci-sdist| |ci-anaconda| |ci-sphinx| + +This project provides Python bindings for interfacing with the +`Zstandard `_ compression library. A C extension +and CFFI interface are provided. + +The primary goal of the project is to provide a rich interface to the +underlying C API through a Pythonic interface while not sacrificing +performance. This means exposing most of the features and flexibility +of the C API while not sacrificing usability or safety that Python provides. + +The canonical home for this project is +https://github.com/indygreg/python-zstandard. + +For usage documentation, see https://python-zstandard.readthedocs.org/. + +.. |ci-test| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/test.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/test.yml + +.. |ci-wheel| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/wheel.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/wheel.yml + +.. |ci-typing| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/typing.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/typing.yml + +.. |ci-sdist| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/sdist.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/sdist.yml + +.. |ci-anaconda| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/anaconda.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/anaconda.yml + +.. |ci-sphinx| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/sphinx.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/sphinx.yml diff --git a/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/RECORD b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..26b60f0eddc30ceff11a25901732cb7a7152cf0d --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/RECORD @@ -0,0 +1,14 @@ +zstandard-0.22.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zstandard-0.22.0.dist-info/LICENSE,sha256=eI_oXlNySkCymQUcSryc901Csn55aDJbgIkcJfea20s,1484 +zstandard-0.22.0.dist-info/METADATA,sha256=ioMv0Bi7ECdt9Ll9mP93LfpR3Jm8KRzaXS2mE_w4S8I,2909 +zstandard-0.22.0.dist-info/RECORD,, +zstandard-0.22.0.dist-info/WHEEL,sha256=cD39NF6a3hkhaWoPQJng7gnGZRIfQsUCtwcedITCPtg,152 +zstandard-0.22.0.dist-info/top_level.txt,sha256=J-wj94pPadY4ipFaanrYBlrMblOSegEYS8o_LdogrpU,10 +zstandard/__init__.py,sha256=7CalB28kCVwqwflhLUHneyPK_Mt2xp_Ogt24ISHlQoQ,7102 +zstandard/__init__.pyi,sha256=kvP6Us9IPX4Ssfg5BwaE3_E7uuE1YzVUo9hTzYUcNHA,13938 +zstandard/__pycache__/__init__.cpython-310.pyc,, +zstandard/__pycache__/backend_cffi.cpython-310.pyc,, +zstandard/_cffi.cpython-310-x86_64-linux-gnu.so,sha256=uS7_ZwqYNCxo5PrUdagck_arC70Bt9BH1zj2U6FUKIY,11882792 +zstandard/backend_c.cpython-310-x86_64-linux-gnu.so,sha256=t3A7at18BBMylNMbf4DwJSD7IzAozR_rb7dUd7bQKXE,11287920 +zstandard/backend_cffi.py,sha256=hMQlr0fkN-E2ZUBqUszANzeYV9Y7ditGmiasKjfbAMg,152430 +zstandard/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/WHEEL b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..7b52c3f3e667e03ca4b2a8b53a94655a796beafc --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/top_level.txt b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..864700d2b3e63509d1e25eff308c0a99386bb4ac --- /dev/null +++ b/llmeval-env/lib/python3.10/site-packages/zstandard-0.22.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zstandard