Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/filelock/__init__.py +52 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_error.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_util.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/version.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/_api.py +341 -0
- env-llmeval/lib/python3.10/site-packages/filelock/_error.py +30 -0
- env-llmeval/lib/python3.10/site-packages/filelock/_soft.py +47 -0
- env-llmeval/lib/python3.10/site-packages/filelock/_unix.py +68 -0
- env-llmeval/lib/python3.10/site-packages/filelock/_util.py +52 -0
- env-llmeval/lib/python3.10/site-packages/filelock/_windows.py +65 -0
- env-llmeval/lib/python3.10/site-packages/filelock/py.typed +0 -0
- env-llmeval/lib/python3.10/site-packages/filelock/version.py +16 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py +6 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +150 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py +37 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py +32 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py +415 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py +111 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py +139 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/_version.py +1 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__init__.py +26 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py +6 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/providers.py +133 -0
env-llmeval/lib/python3.10/site-packages/filelock/__init__.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
A platform independent file lock that supports the with-statement.
|
3 |
+
|
4 |
+
.. autodata:: filelock.__version__
|
5 |
+
:no-value:
|
6 |
+
|
7 |
+
"""
|
8 |
+
|
9 |
+
from __future__ import annotations
|
10 |
+
|
11 |
+
import sys
|
12 |
+
import warnings
|
13 |
+
from typing import TYPE_CHECKING
|
14 |
+
|
15 |
+
from ._api import AcquireReturnProxy, BaseFileLock
|
16 |
+
from ._error import Timeout
|
17 |
+
from ._soft import SoftFileLock
|
18 |
+
from ._unix import UnixFileLock, has_fcntl
|
19 |
+
from ._windows import WindowsFileLock
|
20 |
+
from .version import version
|
21 |
+
|
22 |
+
#: version of the project as a string
|
23 |
+
__version__: str = version
|
24 |
+
|
25 |
+
|
26 |
+
if sys.platform == "win32": # pragma: win32 cover
|
27 |
+
_FileLock: type[BaseFileLock] = WindowsFileLock
|
28 |
+
else: # pragma: win32 no cover # noqa: PLR5501
|
29 |
+
if has_fcntl:
|
30 |
+
_FileLock: type[BaseFileLock] = UnixFileLock
|
31 |
+
else:
|
32 |
+
_FileLock = SoftFileLock
|
33 |
+
if warnings is not None:
|
34 |
+
warnings.warn("only soft file lock is available", stacklevel=2)
|
35 |
+
|
36 |
+
if TYPE_CHECKING:
|
37 |
+
FileLock = SoftFileLock
|
38 |
+
else:
|
39 |
+
#: Alias for the lock, which should be used for the current platform.
|
40 |
+
FileLock = _FileLock
|
41 |
+
|
42 |
+
|
43 |
+
__all__ = [
|
44 |
+
"AcquireReturnProxy",
|
45 |
+
"BaseFileLock",
|
46 |
+
"FileLock",
|
47 |
+
"SoftFileLock",
|
48 |
+
"Timeout",
|
49 |
+
"UnixFileLock",
|
50 |
+
"WindowsFileLock",
|
51 |
+
"__version__",
|
52 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_error.cpython-310.pyc
ADDED
Binary file (1.45 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/_util.cpython-310.pyc
ADDED
Binary file (1.51 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/__pycache__/version.cpython-310.pyc
ADDED
Binary file (496 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/filelock/_api.py
ADDED
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import contextlib
|
4 |
+
import logging
|
5 |
+
import os
|
6 |
+
import time
|
7 |
+
import warnings
|
8 |
+
from abc import ABC, abstractmethod
|
9 |
+
from dataclasses import dataclass
|
10 |
+
from threading import local
|
11 |
+
from typing import TYPE_CHECKING, Any
|
12 |
+
from weakref import WeakValueDictionary
|
13 |
+
|
14 |
+
from ._error import Timeout
|
15 |
+
|
16 |
+
if TYPE_CHECKING:
|
17 |
+
import sys
|
18 |
+
from types import TracebackType
|
19 |
+
|
20 |
+
if sys.version_info >= (3, 11): # pragma: no cover (py311+)
|
21 |
+
from typing import Self
|
22 |
+
else: # pragma: no cover (<py311)
|
23 |
+
from typing_extensions import Self
|
24 |
+
|
25 |
+
|
26 |
+
_LOGGER = logging.getLogger("filelock")
|
27 |
+
|
28 |
+
|
29 |
+
# This is a helper class which is returned by :meth:`BaseFileLock.acquire` and wraps the lock to make sure __enter__
|
30 |
+
# is not called twice when entering the with statement. If we would simply return *self*, the lock would be acquired
|
31 |
+
# again in the *__enter__* method of the BaseFileLock, but not released again automatically. issue #37 (memory leak)
|
32 |
+
class AcquireReturnProxy:
|
33 |
+
"""A context-aware object that will release the lock file when exiting."""
|
34 |
+
|
35 |
+
def __init__(self, lock: BaseFileLock) -> None:
|
36 |
+
self.lock = lock
|
37 |
+
|
38 |
+
def __enter__(self) -> BaseFileLock:
|
39 |
+
return self.lock
|
40 |
+
|
41 |
+
def __exit__(
|
42 |
+
self,
|
43 |
+
exc_type: type[BaseException] | None,
|
44 |
+
exc_value: BaseException | None,
|
45 |
+
traceback: TracebackType | None,
|
46 |
+
) -> None:
|
47 |
+
self.lock.release()
|
48 |
+
|
49 |
+
|
50 |
+
@dataclass
|
51 |
+
class FileLockContext:
|
52 |
+
"""A dataclass which holds the context for a ``BaseFileLock`` object."""
|
53 |
+
|
54 |
+
# The context is held in a separate class to allow optional use of thread local storage via the
|
55 |
+
# ThreadLocalFileContext class.
|
56 |
+
|
57 |
+
#: The path to the lock file.
|
58 |
+
lock_file: str
|
59 |
+
|
60 |
+
#: The default timeout value.
|
61 |
+
timeout: float
|
62 |
+
|
63 |
+
#: The mode for the lock files
|
64 |
+
mode: int
|
65 |
+
|
66 |
+
#: The file descriptor for the *_lock_file* as it is returned by the os.open() function, not None when lock held
|
67 |
+
lock_file_fd: int | None = None
|
68 |
+
|
69 |
+
#: The lock counter is used for implementing the nested locking mechanism.
|
70 |
+
lock_counter: int = 0 # When the lock is acquired is increased and the lock is only released, when this value is 0
|
71 |
+
|
72 |
+
|
73 |
+
class ThreadLocalFileContext(FileLockContext, local):
|
74 |
+
"""A thread local version of the ``FileLockContext`` class."""
|
75 |
+
|
76 |
+
|
77 |
+
class BaseFileLock(ABC, contextlib.ContextDecorator):
|
78 |
+
"""Abstract base class for a file lock object."""
|
79 |
+
|
80 |
+
_instances: WeakValueDictionary[str, BaseFileLock]
|
81 |
+
|
82 |
+
def __new__( # noqa: PLR0913
|
83 |
+
cls,
|
84 |
+
lock_file: str | os.PathLike[str],
|
85 |
+
timeout: float = -1,
|
86 |
+
mode: int = 0o644,
|
87 |
+
thread_local: bool = True, # noqa: ARG003, FBT001, FBT002
|
88 |
+
*,
|
89 |
+
is_singleton: bool = False,
|
90 |
+
**kwargs: dict[str, Any], # capture remaining kwargs for subclasses # noqa: ARG003
|
91 |
+
) -> Self:
|
92 |
+
"""Create a new lock object or if specified return the singleton instance for the lock file."""
|
93 |
+
if not is_singleton:
|
94 |
+
return super().__new__(cls)
|
95 |
+
|
96 |
+
instance = cls._instances.get(str(lock_file))
|
97 |
+
if not instance:
|
98 |
+
instance = super().__new__(cls)
|
99 |
+
cls._instances[str(lock_file)] = instance
|
100 |
+
elif timeout != instance.timeout or mode != instance.mode:
|
101 |
+
msg = "Singleton lock instances cannot be initialized with differing arguments"
|
102 |
+
raise ValueError(msg)
|
103 |
+
|
104 |
+
return instance # type: ignore[return-value] # https://github.com/python/mypy/issues/15322
|
105 |
+
|
106 |
+
def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None:
|
107 |
+
"""Setup unique state for lock subclasses."""
|
108 |
+
super().__init_subclass__(**kwargs)
|
109 |
+
cls._instances = WeakValueDictionary()
|
110 |
+
|
111 |
+
def __init__( # noqa: PLR0913
|
112 |
+
self,
|
113 |
+
lock_file: str | os.PathLike[str],
|
114 |
+
timeout: float = -1,
|
115 |
+
mode: int = 0o644,
|
116 |
+
thread_local: bool = True, # noqa: FBT001, FBT002
|
117 |
+
*,
|
118 |
+
is_singleton: bool = False,
|
119 |
+
) -> None:
|
120 |
+
"""
|
121 |
+
Create a new lock object.
|
122 |
+
|
123 |
+
:param lock_file: path to the file
|
124 |
+
:param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
|
125 |
+
the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
|
126 |
+
to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
|
127 |
+
:param mode: file permissions for the lockfile
|
128 |
+
:param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
|
129 |
+
``False`` then the lock will be reentrant across threads.
|
130 |
+
:param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
|
131 |
+
per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
|
132 |
+
to pass the same object around.
|
133 |
+
|
134 |
+
"""
|
135 |
+
self._is_thread_local = thread_local
|
136 |
+
self._is_singleton = is_singleton
|
137 |
+
|
138 |
+
# Create the context. Note that external code should not work with the context directly and should instead use
|
139 |
+
# properties of this class.
|
140 |
+
kwargs: dict[str, Any] = {
|
141 |
+
"lock_file": os.fspath(lock_file),
|
142 |
+
"timeout": timeout,
|
143 |
+
"mode": mode,
|
144 |
+
}
|
145 |
+
self._context: FileLockContext = (ThreadLocalFileContext if thread_local else FileLockContext)(**kwargs)
|
146 |
+
|
147 |
+
def is_thread_local(self) -> bool:
|
148 |
+
""":return: a flag indicating if this lock is thread local or not"""
|
149 |
+
return self._is_thread_local
|
150 |
+
|
151 |
+
@property
|
152 |
+
def is_singleton(self) -> bool:
|
153 |
+
""":return: a flag indicating if this lock is singleton or not"""
|
154 |
+
return self._is_singleton
|
155 |
+
|
156 |
+
@property
|
157 |
+
def lock_file(self) -> str:
|
158 |
+
""":return: path to the lock file"""
|
159 |
+
return self._context.lock_file
|
160 |
+
|
161 |
+
@property
|
162 |
+
def timeout(self) -> float:
|
163 |
+
"""
|
164 |
+
:return: the default timeout value, in seconds
|
165 |
+
|
166 |
+
.. versionadded:: 2.0.0
|
167 |
+
"""
|
168 |
+
return self._context.timeout
|
169 |
+
|
170 |
+
@timeout.setter
|
171 |
+
def timeout(self, value: float | str) -> None:
|
172 |
+
"""
|
173 |
+
Change the default timeout value.
|
174 |
+
|
175 |
+
:param value: the new value, in seconds
|
176 |
+
|
177 |
+
"""
|
178 |
+
self._context.timeout = float(value)
|
179 |
+
|
180 |
+
@property
|
181 |
+
def mode(self) -> int:
|
182 |
+
""":return: the file permissions for the lockfile"""
|
183 |
+
return self._context.mode
|
184 |
+
|
185 |
+
@abstractmethod
|
186 |
+
def _acquire(self) -> None:
|
187 |
+
"""If the file lock could be acquired, self._context.lock_file_fd holds the file descriptor of the lock file."""
|
188 |
+
raise NotImplementedError
|
189 |
+
|
190 |
+
@abstractmethod
|
191 |
+
def _release(self) -> None:
|
192 |
+
"""Releases the lock and sets self._context.lock_file_fd to None."""
|
193 |
+
raise NotImplementedError
|
194 |
+
|
195 |
+
@property
|
196 |
+
def is_locked(self) -> bool:
|
197 |
+
"""
|
198 |
+
|
199 |
+
:return: A boolean indicating if the lock file is holding the lock currently.
|
200 |
+
|
201 |
+
.. versionchanged:: 2.0.0
|
202 |
+
|
203 |
+
This was previously a method and is now a property.
|
204 |
+
"""
|
205 |
+
return self._context.lock_file_fd is not None
|
206 |
+
|
207 |
+
@property
|
208 |
+
def lock_counter(self) -> int:
|
209 |
+
""":return: The number of times this lock has been acquired (but not yet released)."""
|
210 |
+
return self._context.lock_counter
|
211 |
+
|
212 |
+
def acquire(
|
213 |
+
self,
|
214 |
+
timeout: float | None = None,
|
215 |
+
poll_interval: float = 0.05,
|
216 |
+
*,
|
217 |
+
poll_intervall: float | None = None,
|
218 |
+
blocking: bool = True,
|
219 |
+
) -> AcquireReturnProxy:
|
220 |
+
"""
|
221 |
+
Try to acquire the file lock.
|
222 |
+
|
223 |
+
:param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
|
224 |
+
if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
|
225 |
+
:param poll_interval: interval of trying to acquire the lock file
|
226 |
+
:param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
|
227 |
+
:param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
|
228 |
+
first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
|
229 |
+
:raises Timeout: if fails to acquire lock within the timeout period
|
230 |
+
:return: a context object that will unlock the file when the context is exited
|
231 |
+
|
232 |
+
.. code-block:: python
|
233 |
+
|
234 |
+
# You can use this method in the context manager (recommended)
|
235 |
+
with lock.acquire():
|
236 |
+
pass
|
237 |
+
|
238 |
+
# Or use an equivalent try-finally construct:
|
239 |
+
lock.acquire()
|
240 |
+
try:
|
241 |
+
pass
|
242 |
+
finally:
|
243 |
+
lock.release()
|
244 |
+
|
245 |
+
.. versionchanged:: 2.0.0
|
246 |
+
|
247 |
+
This method returns now a *proxy* object instead of *self*,
|
248 |
+
so that it can be used in a with statement without side effects.
|
249 |
+
|
250 |
+
"""
|
251 |
+
# Use the default timeout, if no timeout is provided.
|
252 |
+
if timeout is None:
|
253 |
+
timeout = self._context.timeout
|
254 |
+
|
255 |
+
if poll_intervall is not None:
|
256 |
+
msg = "use poll_interval instead of poll_intervall"
|
257 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
258 |
+
poll_interval = poll_intervall
|
259 |
+
|
260 |
+
# Increment the number right at the beginning. We can still undo it, if something fails.
|
261 |
+
self._context.lock_counter += 1
|
262 |
+
|
263 |
+
lock_id = id(self)
|
264 |
+
lock_filename = self.lock_file
|
265 |
+
start_time = time.perf_counter()
|
266 |
+
try:
|
267 |
+
while True:
|
268 |
+
if not self.is_locked:
|
269 |
+
_LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
|
270 |
+
self._acquire()
|
271 |
+
if self.is_locked:
|
272 |
+
_LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
|
273 |
+
break
|
274 |
+
if blocking is False:
|
275 |
+
_LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
|
276 |
+
raise Timeout(lock_filename) # noqa: TRY301
|
277 |
+
if 0 <= timeout < time.perf_counter() - start_time:
|
278 |
+
_LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
|
279 |
+
raise Timeout(lock_filename) # noqa: TRY301
|
280 |
+
msg = "Lock %s not acquired on %s, waiting %s seconds ..."
|
281 |
+
_LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
|
282 |
+
time.sleep(poll_interval)
|
283 |
+
except BaseException: # Something did go wrong, so decrement the counter.
|
284 |
+
self._context.lock_counter = max(0, self._context.lock_counter - 1)
|
285 |
+
raise
|
286 |
+
return AcquireReturnProxy(lock=self)
|
287 |
+
|
288 |
+
def release(self, force: bool = False) -> None: # noqa: FBT001, FBT002
|
289 |
+
"""
|
290 |
+
Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
|
291 |
+
Also note, that the lock file itself is not automatically deleted.
|
292 |
+
|
293 |
+
:param force: If true, the lock counter is ignored and the lock is released in every case/
|
294 |
+
|
295 |
+
"""
|
296 |
+
if self.is_locked:
|
297 |
+
self._context.lock_counter -= 1
|
298 |
+
|
299 |
+
if self._context.lock_counter == 0 or force:
|
300 |
+
lock_id, lock_filename = id(self), self.lock_file
|
301 |
+
|
302 |
+
_LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
|
303 |
+
self._release()
|
304 |
+
self._context.lock_counter = 0
|
305 |
+
_LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
|
306 |
+
|
307 |
+
def __enter__(self) -> Self:
|
308 |
+
"""
|
309 |
+
Acquire the lock.
|
310 |
+
|
311 |
+
:return: the lock object
|
312 |
+
|
313 |
+
"""
|
314 |
+
self.acquire()
|
315 |
+
return self
|
316 |
+
|
317 |
+
def __exit__(
|
318 |
+
self,
|
319 |
+
exc_type: type[BaseException] | None,
|
320 |
+
exc_value: BaseException | None,
|
321 |
+
traceback: TracebackType | None,
|
322 |
+
) -> None:
|
323 |
+
"""
|
324 |
+
Release the lock.
|
325 |
+
|
326 |
+
:param exc_type: the exception type if raised
|
327 |
+
:param exc_value: the exception value if raised
|
328 |
+
:param traceback: the exception traceback if raised
|
329 |
+
|
330 |
+
"""
|
331 |
+
self.release()
|
332 |
+
|
333 |
+
def __del__(self) -> None:
|
334 |
+
"""Called when the lock object is deleted."""
|
335 |
+
self.release(force=True)
|
336 |
+
|
337 |
+
|
338 |
+
__all__ = [
|
339 |
+
"AcquireReturnProxy",
|
340 |
+
"BaseFileLock",
|
341 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/_error.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from typing import Any
|
4 |
+
|
5 |
+
|
6 |
+
class Timeout(TimeoutError): # noqa: N818
|
7 |
+
"""Raised when the lock could not be acquired in *timeout* seconds."""
|
8 |
+
|
9 |
+
def __init__(self, lock_file: str) -> None:
|
10 |
+
super().__init__()
|
11 |
+
self._lock_file = lock_file
|
12 |
+
|
13 |
+
def __reduce__(self) -> str | tuple[Any, ...]:
|
14 |
+
return self.__class__, (self._lock_file,) # Properly pickle the exception
|
15 |
+
|
16 |
+
def __str__(self) -> str:
|
17 |
+
return f"The file lock '{self._lock_file}' could not be acquired."
|
18 |
+
|
19 |
+
def __repr__(self) -> str:
|
20 |
+
return f"{self.__class__.__name__}({self.lock_file!r})"
|
21 |
+
|
22 |
+
@property
|
23 |
+
def lock_file(self) -> str:
|
24 |
+
""":return: The path of the file lock."""
|
25 |
+
return self._lock_file
|
26 |
+
|
27 |
+
|
28 |
+
__all__ = [
|
29 |
+
"Timeout",
|
30 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/_soft.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from contextlib import suppress
|
6 |
+
from errno import EACCES, EEXIST
|
7 |
+
from pathlib import Path
|
8 |
+
|
9 |
+
from ._api import BaseFileLock
|
10 |
+
from ._util import ensure_directory_exists, raise_on_not_writable_file
|
11 |
+
|
12 |
+
|
13 |
+
class SoftFileLock(BaseFileLock):
|
14 |
+
"""Simply watches the existence of the lock file."""
|
15 |
+
|
16 |
+
def _acquire(self) -> None:
|
17 |
+
raise_on_not_writable_file(self.lock_file)
|
18 |
+
ensure_directory_exists(self.lock_file)
|
19 |
+
# first check for exists and read-only mode as the open will mask this case as EEXIST
|
20 |
+
flags = (
|
21 |
+
os.O_WRONLY # open for writing only
|
22 |
+
| os.O_CREAT
|
23 |
+
| os.O_EXCL # together with above raise EEXIST if the file specified by filename exists
|
24 |
+
| os.O_TRUNC # truncate the file to zero byte
|
25 |
+
)
|
26 |
+
try:
|
27 |
+
file_handler = os.open(self.lock_file, flags, self._context.mode)
|
28 |
+
except OSError as exception: # re-raise unless expected exception
|
29 |
+
if not (
|
30 |
+
exception.errno == EEXIST # lock already exist
|
31 |
+
or (exception.errno == EACCES and sys.platform == "win32") # has no access to this lock
|
32 |
+
): # pragma: win32 no cover
|
33 |
+
raise
|
34 |
+
else:
|
35 |
+
self._context.lock_file_fd = file_handler
|
36 |
+
|
37 |
+
def _release(self) -> None:
|
38 |
+
assert self._context.lock_file_fd is not None # noqa: S101
|
39 |
+
os.close(self._context.lock_file_fd) # the lock file is definitely not None
|
40 |
+
self._context.lock_file_fd = None
|
41 |
+
with suppress(OSError): # the file is already deleted and that's what we want
|
42 |
+
Path(self.lock_file).unlink()
|
43 |
+
|
44 |
+
|
45 |
+
__all__ = [
|
46 |
+
"SoftFileLock",
|
47 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/_unix.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from contextlib import suppress
|
6 |
+
from errno import ENOSYS
|
7 |
+
from pathlib import Path
|
8 |
+
from typing import cast
|
9 |
+
|
10 |
+
from ._api import BaseFileLock
|
11 |
+
from ._util import ensure_directory_exists
|
12 |
+
|
13 |
+
#: a flag to indicate if the fcntl API is available
|
14 |
+
has_fcntl = False
|
15 |
+
if sys.platform == "win32": # pragma: win32 cover
|
16 |
+
|
17 |
+
class UnixFileLock(BaseFileLock):
|
18 |
+
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
19 |
+
|
20 |
+
def _acquire(self) -> None:
|
21 |
+
raise NotImplementedError
|
22 |
+
|
23 |
+
def _release(self) -> None:
|
24 |
+
raise NotImplementedError
|
25 |
+
|
26 |
+
else: # pragma: win32 no cover
|
27 |
+
try:
|
28 |
+
import fcntl
|
29 |
+
except ImportError:
|
30 |
+
pass
|
31 |
+
else:
|
32 |
+
has_fcntl = True
|
33 |
+
|
34 |
+
class UnixFileLock(BaseFileLock):
|
35 |
+
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
36 |
+
|
37 |
+
def _acquire(self) -> None:
|
38 |
+
ensure_directory_exists(self.lock_file)
|
39 |
+
open_flags = os.O_RDWR | os.O_TRUNC
|
40 |
+
if not Path(self.lock_file).exists():
|
41 |
+
open_flags |= os.O_CREAT
|
42 |
+
fd = os.open(self.lock_file, open_flags, self._context.mode)
|
43 |
+
with suppress(PermissionError): # This locked is not owned by this UID
|
44 |
+
os.fchmod(fd, self._context.mode)
|
45 |
+
try:
|
46 |
+
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
47 |
+
except OSError as exception:
|
48 |
+
os.close(fd)
|
49 |
+
if exception.errno == ENOSYS: # NotImplemented error
|
50 |
+
msg = "FileSystem does not appear to support flock; use SoftFileLock instead"
|
51 |
+
raise NotImplementedError(msg) from exception
|
52 |
+
else:
|
53 |
+
self._context.lock_file_fd = fd
|
54 |
+
|
55 |
+
def _release(self) -> None:
|
56 |
+
# Do not remove the lockfile:
|
57 |
+
# https://github.com/tox-dev/py-filelock/issues/31
|
58 |
+
# https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
|
59 |
+
fd = cast(int, self._context.lock_file_fd)
|
60 |
+
self._context.lock_file_fd = None
|
61 |
+
fcntl.flock(fd, fcntl.LOCK_UN)
|
62 |
+
os.close(fd)
|
63 |
+
|
64 |
+
|
65 |
+
__all__ = [
|
66 |
+
"UnixFileLock",
|
67 |
+
"has_fcntl",
|
68 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/_util.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import stat
|
5 |
+
import sys
|
6 |
+
from errno import EACCES, EISDIR
|
7 |
+
from pathlib import Path
|
8 |
+
|
9 |
+
|
10 |
+
def raise_on_not_writable_file(filename: str) -> None:
|
11 |
+
"""
|
12 |
+
Raise an exception if attempting to open the file for writing would fail.
|
13 |
+
|
14 |
+
This is done so files that will never be writable can be separated from files that are writable but currently
|
15 |
+
locked.
|
16 |
+
|
17 |
+
:param filename: file to check
|
18 |
+
:raises OSError: as if the file was opened for writing.
|
19 |
+
|
20 |
+
"""
|
21 |
+
try: # use stat to do exists + can write to check without race condition
|
22 |
+
file_stat = os.stat(filename) # noqa: PTH116
|
23 |
+
except OSError:
|
24 |
+
return # swallow does not exist or other errors
|
25 |
+
|
26 |
+
if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
|
27 |
+
if not (file_stat.st_mode & stat.S_IWUSR):
|
28 |
+
raise PermissionError(EACCES, "Permission denied", filename)
|
29 |
+
|
30 |
+
if stat.S_ISDIR(file_stat.st_mode):
|
31 |
+
if sys.platform == "win32": # pragma: win32 cover
|
32 |
+
# On Windows, this is PermissionError
|
33 |
+
raise PermissionError(EACCES, "Permission denied", filename)
|
34 |
+
else: # pragma: win32 no cover # noqa: RET506
|
35 |
+
# On linux / macOS, this is IsADirectoryError
|
36 |
+
raise IsADirectoryError(EISDIR, "Is a directory", filename)
|
37 |
+
|
38 |
+
|
39 |
+
def ensure_directory_exists(filename: Path | str) -> None:
|
40 |
+
"""
|
41 |
+
Ensure the directory containing the file exists (create it if necessary).
|
42 |
+
|
43 |
+
:param filename: file.
|
44 |
+
|
45 |
+
"""
|
46 |
+
Path(filename).parent.mkdir(parents=True, exist_ok=True)
|
47 |
+
|
48 |
+
|
49 |
+
__all__ = [
|
50 |
+
"ensure_directory_exists",
|
51 |
+
"raise_on_not_writable_file",
|
52 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/_windows.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from contextlib import suppress
|
6 |
+
from errno import EACCES
|
7 |
+
from pathlib import Path
|
8 |
+
from typing import cast
|
9 |
+
|
10 |
+
from ._api import BaseFileLock
|
11 |
+
from ._util import ensure_directory_exists, raise_on_not_writable_file
|
12 |
+
|
13 |
+
if sys.platform == "win32": # pragma: win32 cover
|
14 |
+
import msvcrt
|
15 |
+
|
16 |
+
class WindowsFileLock(BaseFileLock):
|
17 |
+
"""Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
|
18 |
+
|
19 |
+
def _acquire(self) -> None:
|
20 |
+
raise_on_not_writable_file(self.lock_file)
|
21 |
+
ensure_directory_exists(self.lock_file)
|
22 |
+
flags = (
|
23 |
+
os.O_RDWR # open for read and write
|
24 |
+
| os.O_CREAT # create file if not exists
|
25 |
+
| os.O_TRUNC # truncate file if not empty
|
26 |
+
)
|
27 |
+
try:
|
28 |
+
fd = os.open(self.lock_file, flags, self._context.mode)
|
29 |
+
except OSError as exception:
|
30 |
+
if exception.errno != EACCES: # has no access to this lock
|
31 |
+
raise
|
32 |
+
else:
|
33 |
+
try:
|
34 |
+
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
|
35 |
+
except OSError as exception:
|
36 |
+
os.close(fd) # close file first
|
37 |
+
if exception.errno != EACCES: # file is already locked
|
38 |
+
raise
|
39 |
+
else:
|
40 |
+
self._context.lock_file_fd = fd
|
41 |
+
|
42 |
+
def _release(self) -> None:
|
43 |
+
fd = cast(int, self._context.lock_file_fd)
|
44 |
+
self._context.lock_file_fd = None
|
45 |
+
msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
|
46 |
+
os.close(fd)
|
47 |
+
|
48 |
+
with suppress(OSError): # Probably another instance of the application hat acquired the file lock.
|
49 |
+
Path(self.lock_file).unlink()
|
50 |
+
|
51 |
+
else: # pragma: win32 no cover
|
52 |
+
|
53 |
+
class WindowsFileLock(BaseFileLock):
|
54 |
+
"""Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
|
55 |
+
|
56 |
+
def _acquire(self) -> None:
|
57 |
+
raise NotImplementedError
|
58 |
+
|
59 |
+
def _release(self) -> None:
|
60 |
+
raise NotImplementedError
|
61 |
+
|
62 |
+
|
63 |
+
__all__ = [
|
64 |
+
"WindowsFileLock",
|
65 |
+
]
|
env-llmeval/lib/python3.10/site-packages/filelock/py.typed
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/filelock/version.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# file generated by setuptools_scm
|
2 |
+
# don't change, don't track in version control
|
3 |
+
TYPE_CHECKING = False
|
4 |
+
if TYPE_CHECKING:
|
5 |
+
from typing import Tuple, Union
|
6 |
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
7 |
+
else:
|
8 |
+
VERSION_TUPLE = object
|
9 |
+
|
10 |
+
version: str
|
11 |
+
__version__: str
|
12 |
+
__version_tuple__: VERSION_TUPLE
|
13 |
+
version_tuple: VERSION_TUPLE
|
14 |
+
|
15 |
+
__version__ = version = '3.13.4'
|
16 |
+
__version_tuple__ = version_tuple = (3, 13, 4)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (635 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc
ADDED
Binary file (1.57 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc
ADDED
Binary file (3.15 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc
ADDED
Binary file (1.84 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (749 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc
ADDED
Binary file (8.2 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc
ADDED
Binary file (2.79 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc
ADDED
Binary file (4.71 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc
ADDED
Binary file (4.24 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from .file_cache import FileCache # noqa
|
6 |
+
from .redis_cache import RedisCache # noqa
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (286 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc
ADDED
Binary file (3.36 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc
ADDED
Binary file (1.57 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import hashlib
|
6 |
+
import os
|
7 |
+
from textwrap import dedent
|
8 |
+
|
9 |
+
from ..cache import BaseCache
|
10 |
+
from ..controller import CacheController
|
11 |
+
|
12 |
+
try:
|
13 |
+
FileNotFoundError
|
14 |
+
except NameError:
|
15 |
+
# py2.X
|
16 |
+
FileNotFoundError = (IOError, OSError)
|
17 |
+
|
18 |
+
|
19 |
+
def _secure_open_write(filename, fmode):
|
20 |
+
# We only want to write to this file, so open it in write only mode
|
21 |
+
flags = os.O_WRONLY
|
22 |
+
|
23 |
+
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
|
24 |
+
# will open *new* files.
|
25 |
+
# We specify this because we want to ensure that the mode we pass is the
|
26 |
+
# mode of the file.
|
27 |
+
flags |= os.O_CREAT | os.O_EXCL
|
28 |
+
|
29 |
+
# Do not follow symlinks to prevent someone from making a symlink that
|
30 |
+
# we follow and insecurely open a cache file.
|
31 |
+
if hasattr(os, "O_NOFOLLOW"):
|
32 |
+
flags |= os.O_NOFOLLOW
|
33 |
+
|
34 |
+
# On Windows we'll mark this file as binary
|
35 |
+
if hasattr(os, "O_BINARY"):
|
36 |
+
flags |= os.O_BINARY
|
37 |
+
|
38 |
+
# Before we open our file, we want to delete any existing file that is
|
39 |
+
# there
|
40 |
+
try:
|
41 |
+
os.remove(filename)
|
42 |
+
except (IOError, OSError):
|
43 |
+
# The file must not exist already, so we can just skip ahead to opening
|
44 |
+
pass
|
45 |
+
|
46 |
+
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
|
47 |
+
# race condition happens between the os.remove and this line, that an
|
48 |
+
# error will be raised. Because we utilize a lockfile this should only
|
49 |
+
# happen if someone is attempting to attack us.
|
50 |
+
fd = os.open(filename, flags, fmode)
|
51 |
+
try:
|
52 |
+
return os.fdopen(fd, "wb")
|
53 |
+
|
54 |
+
except:
|
55 |
+
# An error occurred wrapping our FD in a file object
|
56 |
+
os.close(fd)
|
57 |
+
raise
|
58 |
+
|
59 |
+
|
60 |
+
class FileCache(BaseCache):
|
61 |
+
|
62 |
+
def __init__(
|
63 |
+
self,
|
64 |
+
directory,
|
65 |
+
forever=False,
|
66 |
+
filemode=0o0600,
|
67 |
+
dirmode=0o0700,
|
68 |
+
use_dir_lock=None,
|
69 |
+
lock_class=None,
|
70 |
+
):
|
71 |
+
|
72 |
+
if use_dir_lock is not None and lock_class is not None:
|
73 |
+
raise ValueError("Cannot use use_dir_lock and lock_class together")
|
74 |
+
|
75 |
+
try:
|
76 |
+
from lockfile import LockFile
|
77 |
+
from lockfile.mkdirlockfile import MkdirLockFile
|
78 |
+
except ImportError:
|
79 |
+
notice = dedent(
|
80 |
+
"""
|
81 |
+
NOTE: In order to use the FileCache you must have
|
82 |
+
lockfile installed. You can install it via pip:
|
83 |
+
pip install lockfile
|
84 |
+
"""
|
85 |
+
)
|
86 |
+
raise ImportError(notice)
|
87 |
+
|
88 |
+
else:
|
89 |
+
if use_dir_lock:
|
90 |
+
lock_class = MkdirLockFile
|
91 |
+
|
92 |
+
elif lock_class is None:
|
93 |
+
lock_class = LockFile
|
94 |
+
|
95 |
+
self.directory = directory
|
96 |
+
self.forever = forever
|
97 |
+
self.filemode = filemode
|
98 |
+
self.dirmode = dirmode
|
99 |
+
self.lock_class = lock_class
|
100 |
+
|
101 |
+
@staticmethod
|
102 |
+
def encode(x):
|
103 |
+
return hashlib.sha224(x.encode()).hexdigest()
|
104 |
+
|
105 |
+
def _fn(self, name):
|
106 |
+
# NOTE: This method should not change as some may depend on it.
|
107 |
+
# See: https://github.com/ionrock/cachecontrol/issues/63
|
108 |
+
hashed = self.encode(name)
|
109 |
+
parts = list(hashed[:5]) + [hashed]
|
110 |
+
return os.path.join(self.directory, *parts)
|
111 |
+
|
112 |
+
def get(self, key):
|
113 |
+
name = self._fn(key)
|
114 |
+
try:
|
115 |
+
with open(name, "rb") as fh:
|
116 |
+
return fh.read()
|
117 |
+
|
118 |
+
except FileNotFoundError:
|
119 |
+
return None
|
120 |
+
|
121 |
+
def set(self, key, value, expires=None):
|
122 |
+
name = self._fn(key)
|
123 |
+
|
124 |
+
# Make sure the directory exists
|
125 |
+
try:
|
126 |
+
os.makedirs(os.path.dirname(name), self.dirmode)
|
127 |
+
except (IOError, OSError):
|
128 |
+
pass
|
129 |
+
|
130 |
+
with self.lock_class(name) as lock:
|
131 |
+
# Write our actual file
|
132 |
+
with _secure_open_write(lock.path, self.filemode) as fh:
|
133 |
+
fh.write(value)
|
134 |
+
|
135 |
+
def delete(self, key):
|
136 |
+
name = self._fn(key)
|
137 |
+
if not self.forever:
|
138 |
+
try:
|
139 |
+
os.remove(name)
|
140 |
+
except FileNotFoundError:
|
141 |
+
pass
|
142 |
+
|
143 |
+
|
144 |
+
def url_to_file_path(url, filecache):
|
145 |
+
"""Return the file cache path based on the URL.
|
146 |
+
|
147 |
+
This does not ensure the file exists!
|
148 |
+
"""
|
149 |
+
key = CacheController.cache_url(url)
|
150 |
+
return filecache._fn(key)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from __future__ import division
|
6 |
+
|
7 |
+
from datetime import datetime
|
8 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
9 |
+
|
10 |
+
|
11 |
+
class RedisCache(BaseCache):
|
12 |
+
|
13 |
+
def __init__(self, conn):
|
14 |
+
self.conn = conn
|
15 |
+
|
16 |
+
def get(self, key):
|
17 |
+
return self.conn.get(key)
|
18 |
+
|
19 |
+
def set(self, key, value, expires=None):
|
20 |
+
if not expires:
|
21 |
+
self.conn.set(key, value)
|
22 |
+
else:
|
23 |
+
expires = expires - datetime.utcnow()
|
24 |
+
self.conn.setex(key, int(expires.total_seconds()), value)
|
25 |
+
|
26 |
+
def delete(self, key):
|
27 |
+
self.conn.delete(key)
|
28 |
+
|
29 |
+
def clear(self):
|
30 |
+
"""Helper for clearing all the keys in a database. Use with
|
31 |
+
caution!"""
|
32 |
+
for key in self.conn.keys():
|
33 |
+
self.conn.delete(key)
|
34 |
+
|
35 |
+
def close(self):
|
36 |
+
"""Redis uses connection pooling, no need to close the connection."""
|
37 |
+
pass
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
try:
|
6 |
+
from urllib.parse import urljoin
|
7 |
+
except ImportError:
|
8 |
+
from urlparse import urljoin
|
9 |
+
|
10 |
+
|
11 |
+
try:
|
12 |
+
import cPickle as pickle
|
13 |
+
except ImportError:
|
14 |
+
import pickle
|
15 |
+
|
16 |
+
# Handle the case where the requests module has been patched to not have
|
17 |
+
# urllib3 bundled as part of its source.
|
18 |
+
try:
|
19 |
+
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
|
20 |
+
except ImportError:
|
21 |
+
from pip._vendor.urllib3.response import HTTPResponse
|
22 |
+
|
23 |
+
try:
|
24 |
+
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
|
25 |
+
except ImportError:
|
26 |
+
from pip._vendor.urllib3.util import is_fp_closed
|
27 |
+
|
28 |
+
# Replicate some six behaviour
|
29 |
+
try:
|
30 |
+
text_type = unicode
|
31 |
+
except NameError:
|
32 |
+
text_type = str
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py
ADDED
@@ -0,0 +1,415 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""
|
6 |
+
The httplib2 algorithms ported for use with requests.
|
7 |
+
"""
|
8 |
+
import logging
|
9 |
+
import re
|
10 |
+
import calendar
|
11 |
+
import time
|
12 |
+
from email.utils import parsedate_tz
|
13 |
+
|
14 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
15 |
+
|
16 |
+
from .cache import DictCache
|
17 |
+
from .serialize import Serializer
|
18 |
+
|
19 |
+
|
20 |
+
logger = logging.getLogger(__name__)
|
21 |
+
|
22 |
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
23 |
+
|
24 |
+
PERMANENT_REDIRECT_STATUSES = (301, 308)
|
25 |
+
|
26 |
+
|
27 |
+
def parse_uri(uri):
|
28 |
+
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
29 |
+
|
30 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
31 |
+
"""
|
32 |
+
groups = URI.match(uri).groups()
|
33 |
+
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
34 |
+
|
35 |
+
|
36 |
+
class CacheController(object):
|
37 |
+
"""An interface to see if request should cached or not.
|
38 |
+
"""
|
39 |
+
|
40 |
+
def __init__(
|
41 |
+
self, cache=None, cache_etags=True, serializer=None, status_codes=None
|
42 |
+
):
|
43 |
+
self.cache = DictCache() if cache is None else cache
|
44 |
+
self.cache_etags = cache_etags
|
45 |
+
self.serializer = serializer or Serializer()
|
46 |
+
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
|
47 |
+
|
48 |
+
@classmethod
|
49 |
+
def _urlnorm(cls, uri):
|
50 |
+
"""Normalize the URL to create a safe key for the cache"""
|
51 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
52 |
+
if not scheme or not authority:
|
53 |
+
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
|
54 |
+
|
55 |
+
scheme = scheme.lower()
|
56 |
+
authority = authority.lower()
|
57 |
+
|
58 |
+
if not path:
|
59 |
+
path = "/"
|
60 |
+
|
61 |
+
# Could do syntax based normalization of the URI before
|
62 |
+
# computing the digest. See Section 6.2.2 of Std 66.
|
63 |
+
request_uri = query and "?".join([path, query]) or path
|
64 |
+
defrag_uri = scheme + "://" + authority + request_uri
|
65 |
+
|
66 |
+
return defrag_uri
|
67 |
+
|
68 |
+
@classmethod
|
69 |
+
def cache_url(cls, uri):
|
70 |
+
return cls._urlnorm(uri)
|
71 |
+
|
72 |
+
def parse_cache_control(self, headers):
|
73 |
+
known_directives = {
|
74 |
+
# https://tools.ietf.org/html/rfc7234#section-5.2
|
75 |
+
"max-age": (int, True),
|
76 |
+
"max-stale": (int, False),
|
77 |
+
"min-fresh": (int, True),
|
78 |
+
"no-cache": (None, False),
|
79 |
+
"no-store": (None, False),
|
80 |
+
"no-transform": (None, False),
|
81 |
+
"only-if-cached": (None, False),
|
82 |
+
"must-revalidate": (None, False),
|
83 |
+
"public": (None, False),
|
84 |
+
"private": (None, False),
|
85 |
+
"proxy-revalidate": (None, False),
|
86 |
+
"s-maxage": (int, True),
|
87 |
+
}
|
88 |
+
|
89 |
+
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
|
90 |
+
|
91 |
+
retval = {}
|
92 |
+
|
93 |
+
for cc_directive in cc_headers.split(","):
|
94 |
+
if not cc_directive.strip():
|
95 |
+
continue
|
96 |
+
|
97 |
+
parts = cc_directive.split("=", 1)
|
98 |
+
directive = parts[0].strip()
|
99 |
+
|
100 |
+
try:
|
101 |
+
typ, required = known_directives[directive]
|
102 |
+
except KeyError:
|
103 |
+
logger.debug("Ignoring unknown cache-control directive: %s", directive)
|
104 |
+
continue
|
105 |
+
|
106 |
+
if not typ or not required:
|
107 |
+
retval[directive] = None
|
108 |
+
if typ:
|
109 |
+
try:
|
110 |
+
retval[directive] = typ(parts[1].strip())
|
111 |
+
except IndexError:
|
112 |
+
if required:
|
113 |
+
logger.debug(
|
114 |
+
"Missing value for cache-control " "directive: %s",
|
115 |
+
directive,
|
116 |
+
)
|
117 |
+
except ValueError:
|
118 |
+
logger.debug(
|
119 |
+
"Invalid value for cache-control directive " "%s, must be %s",
|
120 |
+
directive,
|
121 |
+
typ.__name__,
|
122 |
+
)
|
123 |
+
|
124 |
+
return retval
|
125 |
+
|
126 |
+
def cached_request(self, request):
|
127 |
+
"""
|
128 |
+
Return a cached response if it exists in the cache, otherwise
|
129 |
+
return False.
|
130 |
+
"""
|
131 |
+
cache_url = self.cache_url(request.url)
|
132 |
+
logger.debug('Looking up "%s" in the cache', cache_url)
|
133 |
+
cc = self.parse_cache_control(request.headers)
|
134 |
+
|
135 |
+
# Bail out if the request insists on fresh data
|
136 |
+
if "no-cache" in cc:
|
137 |
+
logger.debug('Request header has "no-cache", cache bypassed')
|
138 |
+
return False
|
139 |
+
|
140 |
+
if "max-age" in cc and cc["max-age"] == 0:
|
141 |
+
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
142 |
+
return False
|
143 |
+
|
144 |
+
# Request allows serving from the cache, let's see if we find something
|
145 |
+
cache_data = self.cache.get(cache_url)
|
146 |
+
if cache_data is None:
|
147 |
+
logger.debug("No cache entry available")
|
148 |
+
return False
|
149 |
+
|
150 |
+
# Check whether it can be deserialized
|
151 |
+
resp = self.serializer.loads(request, cache_data)
|
152 |
+
if not resp:
|
153 |
+
logger.warning("Cache entry deserialization failed, entry ignored")
|
154 |
+
return False
|
155 |
+
|
156 |
+
# If we have a cached permanent redirect, return it immediately. We
|
157 |
+
# don't need to test our response for other headers b/c it is
|
158 |
+
# intrinsically "cacheable" as it is Permanent.
|
159 |
+
#
|
160 |
+
# See:
|
161 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.2
|
162 |
+
#
|
163 |
+
# Client can try to refresh the value by repeating the request
|
164 |
+
# with cache busting headers as usual (ie no-cache).
|
165 |
+
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
|
166 |
+
msg = (
|
167 |
+
"Returning cached permanent redirect response "
|
168 |
+
"(ignoring date and etag information)"
|
169 |
+
)
|
170 |
+
logger.debug(msg)
|
171 |
+
return resp
|
172 |
+
|
173 |
+
headers = CaseInsensitiveDict(resp.headers)
|
174 |
+
if not headers or "date" not in headers:
|
175 |
+
if "etag" not in headers:
|
176 |
+
# Without date or etag, the cached response can never be used
|
177 |
+
# and should be deleted.
|
178 |
+
logger.debug("Purging cached response: no date or etag")
|
179 |
+
self.cache.delete(cache_url)
|
180 |
+
logger.debug("Ignoring cached response: no date")
|
181 |
+
return False
|
182 |
+
|
183 |
+
now = time.time()
|
184 |
+
date = calendar.timegm(parsedate_tz(headers["date"]))
|
185 |
+
current_age = max(0, now - date)
|
186 |
+
logger.debug("Current age based on date: %i", current_age)
|
187 |
+
|
188 |
+
# TODO: There is an assumption that the result will be a
|
189 |
+
# urllib3 response object. This may not be best since we
|
190 |
+
# could probably avoid instantiating or constructing the
|
191 |
+
# response until we know we need it.
|
192 |
+
resp_cc = self.parse_cache_control(headers)
|
193 |
+
|
194 |
+
# determine freshness
|
195 |
+
freshness_lifetime = 0
|
196 |
+
|
197 |
+
# Check the max-age pragma in the cache control header
|
198 |
+
if "max-age" in resp_cc:
|
199 |
+
freshness_lifetime = resp_cc["max-age"]
|
200 |
+
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
|
201 |
+
|
202 |
+
# If there isn't a max-age, check for an expires header
|
203 |
+
elif "expires" in headers:
|
204 |
+
expires = parsedate_tz(headers["expires"])
|
205 |
+
if expires is not None:
|
206 |
+
expire_time = calendar.timegm(expires) - date
|
207 |
+
freshness_lifetime = max(0, expire_time)
|
208 |
+
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
|
209 |
+
|
210 |
+
# Determine if we are setting freshness limit in the
|
211 |
+
# request. Note, this overrides what was in the response.
|
212 |
+
if "max-age" in cc:
|
213 |
+
freshness_lifetime = cc["max-age"]
|
214 |
+
logger.debug(
|
215 |
+
"Freshness lifetime from request max-age: %i", freshness_lifetime
|
216 |
+
)
|
217 |
+
|
218 |
+
if "min-fresh" in cc:
|
219 |
+
min_fresh = cc["min-fresh"]
|
220 |
+
# adjust our current age by our min fresh
|
221 |
+
current_age += min_fresh
|
222 |
+
logger.debug("Adjusted current age from min-fresh: %i", current_age)
|
223 |
+
|
224 |
+
# Return entry if it is fresh enough
|
225 |
+
if freshness_lifetime > current_age:
|
226 |
+
logger.debug('The response is "fresh", returning cached response')
|
227 |
+
logger.debug("%i > %i", freshness_lifetime, current_age)
|
228 |
+
return resp
|
229 |
+
|
230 |
+
# we're not fresh. If we don't have an Etag, clear it out
|
231 |
+
if "etag" not in headers:
|
232 |
+
logger.debug('The cached response is "stale" with no etag, purging')
|
233 |
+
self.cache.delete(cache_url)
|
234 |
+
|
235 |
+
# return the original handler
|
236 |
+
return False
|
237 |
+
|
238 |
+
def conditional_headers(self, request):
|
239 |
+
cache_url = self.cache_url(request.url)
|
240 |
+
resp = self.serializer.loads(request, self.cache.get(cache_url))
|
241 |
+
new_headers = {}
|
242 |
+
|
243 |
+
if resp:
|
244 |
+
headers = CaseInsensitiveDict(resp.headers)
|
245 |
+
|
246 |
+
if "etag" in headers:
|
247 |
+
new_headers["If-None-Match"] = headers["ETag"]
|
248 |
+
|
249 |
+
if "last-modified" in headers:
|
250 |
+
new_headers["If-Modified-Since"] = headers["Last-Modified"]
|
251 |
+
|
252 |
+
return new_headers
|
253 |
+
|
254 |
+
def cache_response(self, request, response, body=None, status_codes=None):
|
255 |
+
"""
|
256 |
+
Algorithm for caching requests.
|
257 |
+
|
258 |
+
This assumes a requests Response object.
|
259 |
+
"""
|
260 |
+
# From httplib2: Don't cache 206's since we aren't going to
|
261 |
+
# handle byte range requests
|
262 |
+
cacheable_status_codes = status_codes or self.cacheable_status_codes
|
263 |
+
if response.status not in cacheable_status_codes:
|
264 |
+
logger.debug(
|
265 |
+
"Status code %s not in %s", response.status, cacheable_status_codes
|
266 |
+
)
|
267 |
+
return
|
268 |
+
|
269 |
+
response_headers = CaseInsensitiveDict(response.headers)
|
270 |
+
|
271 |
+
if "date" in response_headers:
|
272 |
+
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
273 |
+
else:
|
274 |
+
date = 0
|
275 |
+
|
276 |
+
# If we've been given a body, our response has a Content-Length, that
|
277 |
+
# Content-Length is valid then we can check to see if the body we've
|
278 |
+
# been given matches the expected size, and if it doesn't we'll just
|
279 |
+
# skip trying to cache it.
|
280 |
+
if (
|
281 |
+
body is not None
|
282 |
+
and "content-length" in response_headers
|
283 |
+
and response_headers["content-length"].isdigit()
|
284 |
+
and int(response_headers["content-length"]) != len(body)
|
285 |
+
):
|
286 |
+
return
|
287 |
+
|
288 |
+
cc_req = self.parse_cache_control(request.headers)
|
289 |
+
cc = self.parse_cache_control(response_headers)
|
290 |
+
|
291 |
+
cache_url = self.cache_url(request.url)
|
292 |
+
logger.debug('Updating cache with response from "%s"', cache_url)
|
293 |
+
|
294 |
+
# Delete it from the cache if we happen to have it stored there
|
295 |
+
no_store = False
|
296 |
+
if "no-store" in cc:
|
297 |
+
no_store = True
|
298 |
+
logger.debug('Response header has "no-store"')
|
299 |
+
if "no-store" in cc_req:
|
300 |
+
no_store = True
|
301 |
+
logger.debug('Request header has "no-store"')
|
302 |
+
if no_store and self.cache.get(cache_url):
|
303 |
+
logger.debug('Purging existing cache entry to honor "no-store"')
|
304 |
+
self.cache.delete(cache_url)
|
305 |
+
if no_store:
|
306 |
+
return
|
307 |
+
|
308 |
+
# https://tools.ietf.org/html/rfc7234#section-4.1:
|
309 |
+
# A Vary header field-value of "*" always fails to match.
|
310 |
+
# Storing such a response leads to a deserialization warning
|
311 |
+
# during cache lookup and is not allowed to ever be served,
|
312 |
+
# so storing it can be avoided.
|
313 |
+
if "*" in response_headers.get("vary", ""):
|
314 |
+
logger.debug('Response header has "Vary: *"')
|
315 |
+
return
|
316 |
+
|
317 |
+
# If we've been given an etag, then keep the response
|
318 |
+
if self.cache_etags and "etag" in response_headers:
|
319 |
+
expires_time = 0
|
320 |
+
if response_headers.get("expires"):
|
321 |
+
expires = parsedate_tz(response_headers["expires"])
|
322 |
+
if expires is not None:
|
323 |
+
expires_time = calendar.timegm(expires) - date
|
324 |
+
|
325 |
+
expires_time = max(expires_time, 14 * 86400)
|
326 |
+
|
327 |
+
logger.debug("etag object cached for {0} seconds".format(expires_time))
|
328 |
+
logger.debug("Caching due to etag")
|
329 |
+
self.cache.set(
|
330 |
+
cache_url,
|
331 |
+
self.serializer.dumps(request, response, body),
|
332 |
+
expires=expires_time,
|
333 |
+
)
|
334 |
+
|
335 |
+
# Add to the cache any permanent redirects. We do this before looking
|
336 |
+
# that the Date headers.
|
337 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
338 |
+
logger.debug("Caching permanent redirect")
|
339 |
+
self.cache.set(cache_url, self.serializer.dumps(request, response, b""))
|
340 |
+
|
341 |
+
# Add to the cache if the response headers demand it. If there
|
342 |
+
# is no date header then we can't do anything about expiring
|
343 |
+
# the cache.
|
344 |
+
elif "date" in response_headers:
|
345 |
+
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
346 |
+
# cache when there is a max-age > 0
|
347 |
+
if "max-age" in cc and cc["max-age"] > 0:
|
348 |
+
logger.debug("Caching b/c date exists and max-age > 0")
|
349 |
+
expires_time = cc["max-age"]
|
350 |
+
self.cache.set(
|
351 |
+
cache_url,
|
352 |
+
self.serializer.dumps(request, response, body),
|
353 |
+
expires=expires_time,
|
354 |
+
)
|
355 |
+
|
356 |
+
# If the request can expire, it means we should cache it
|
357 |
+
# in the meantime.
|
358 |
+
elif "expires" in response_headers:
|
359 |
+
if response_headers["expires"]:
|
360 |
+
expires = parsedate_tz(response_headers["expires"])
|
361 |
+
if expires is not None:
|
362 |
+
expires_time = calendar.timegm(expires) - date
|
363 |
+
else:
|
364 |
+
expires_time = None
|
365 |
+
|
366 |
+
logger.debug(
|
367 |
+
"Caching b/c of expires header. expires in {0} seconds".format(
|
368 |
+
expires_time
|
369 |
+
)
|
370 |
+
)
|
371 |
+
self.cache.set(
|
372 |
+
cache_url,
|
373 |
+
self.serializer.dumps(request, response, body=body),
|
374 |
+
expires=expires_time,
|
375 |
+
)
|
376 |
+
|
377 |
+
def update_cached_response(self, request, response):
|
378 |
+
"""On a 304 we will get a new set of headers that we want to
|
379 |
+
update our cached value with, assuming we have one.
|
380 |
+
|
381 |
+
This should only ever be called when we've sent an ETag and
|
382 |
+
gotten a 304 as the response.
|
383 |
+
"""
|
384 |
+
cache_url = self.cache_url(request.url)
|
385 |
+
|
386 |
+
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
|
387 |
+
|
388 |
+
if not cached_response:
|
389 |
+
# we didn't have a cached response
|
390 |
+
return response
|
391 |
+
|
392 |
+
# Lets update our headers with the headers from the new request:
|
393 |
+
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
|
394 |
+
#
|
395 |
+
# The server isn't supposed to send headers that would make
|
396 |
+
# the cached body invalid. But... just in case, we'll be sure
|
397 |
+
# to strip out ones we know that might be problmatic due to
|
398 |
+
# typical assumptions.
|
399 |
+
excluded_headers = ["content-length"]
|
400 |
+
|
401 |
+
cached_response.headers.update(
|
402 |
+
dict(
|
403 |
+
(k, v)
|
404 |
+
for k, v in response.headers.items()
|
405 |
+
if k.lower() not in excluded_headers
|
406 |
+
)
|
407 |
+
)
|
408 |
+
|
409 |
+
# we want a 200 b/c we have content via the cache
|
410 |
+
cached_response.status = 200
|
411 |
+
|
412 |
+
# update our cache
|
413 |
+
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
|
414 |
+
|
415 |
+
return cached_response
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from tempfile import NamedTemporaryFile
|
6 |
+
import mmap
|
7 |
+
|
8 |
+
|
9 |
+
class CallbackFileWrapper(object):
|
10 |
+
"""
|
11 |
+
Small wrapper around a fp object which will tee everything read into a
|
12 |
+
buffer, and when that file is closed it will execute a callback with the
|
13 |
+
contents of that buffer.
|
14 |
+
|
15 |
+
All attributes are proxied to the underlying file object.
|
16 |
+
|
17 |
+
This class uses members with a double underscore (__) leading prefix so as
|
18 |
+
not to accidentally shadow an attribute.
|
19 |
+
|
20 |
+
The data is stored in a temporary file until it is all available. As long
|
21 |
+
as the temporary files directory is disk-based (sometimes it's a
|
22 |
+
memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
|
23 |
+
pressure is high. For small files the disk usually won't be used at all,
|
24 |
+
it'll all be in the filesystem memory cache, so there should be no
|
25 |
+
performance impact.
|
26 |
+
"""
|
27 |
+
|
28 |
+
def __init__(self, fp, callback):
|
29 |
+
self.__buf = NamedTemporaryFile("rb+", delete=True)
|
30 |
+
self.__fp = fp
|
31 |
+
self.__callback = callback
|
32 |
+
|
33 |
+
def __getattr__(self, name):
|
34 |
+
# The vaguaries of garbage collection means that self.__fp is
|
35 |
+
# not always set. By using __getattribute__ and the private
|
36 |
+
# name[0] allows looking up the attribute value and raising an
|
37 |
+
# AttributeError when it doesn't exist. This stop thigns from
|
38 |
+
# infinitely recursing calls to getattr in the case where
|
39 |
+
# self.__fp hasn't been set.
|
40 |
+
#
|
41 |
+
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
|
42 |
+
fp = self.__getattribute__("_CallbackFileWrapper__fp")
|
43 |
+
return getattr(fp, name)
|
44 |
+
|
45 |
+
def __is_fp_closed(self):
|
46 |
+
try:
|
47 |
+
return self.__fp.fp is None
|
48 |
+
|
49 |
+
except AttributeError:
|
50 |
+
pass
|
51 |
+
|
52 |
+
try:
|
53 |
+
return self.__fp.closed
|
54 |
+
|
55 |
+
except AttributeError:
|
56 |
+
pass
|
57 |
+
|
58 |
+
# We just don't cache it then.
|
59 |
+
# TODO: Add some logging here...
|
60 |
+
return False
|
61 |
+
|
62 |
+
def _close(self):
|
63 |
+
if self.__callback:
|
64 |
+
if self.__buf.tell() == 0:
|
65 |
+
# Empty file:
|
66 |
+
result = b""
|
67 |
+
else:
|
68 |
+
# Return the data without actually loading it into memory,
|
69 |
+
# relying on Python's buffer API and mmap(). mmap() just gives
|
70 |
+
# a view directly into the filesystem's memory cache, so it
|
71 |
+
# doesn't result in duplicate memory use.
|
72 |
+
self.__buf.seek(0, 0)
|
73 |
+
result = memoryview(
|
74 |
+
mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
|
75 |
+
)
|
76 |
+
self.__callback(result)
|
77 |
+
|
78 |
+
# We assign this to None here, because otherwise we can get into
|
79 |
+
# really tricky problems where the CPython interpreter dead locks
|
80 |
+
# because the callback is holding a reference to something which
|
81 |
+
# has a __del__ method. Setting this to None breaks the cycle
|
82 |
+
# and allows the garbage collector to do it's thing normally.
|
83 |
+
self.__callback = None
|
84 |
+
|
85 |
+
# Closing the temporary file releases memory and frees disk space.
|
86 |
+
# Important when caching big files.
|
87 |
+
self.__buf.close()
|
88 |
+
|
89 |
+
def read(self, amt=None):
|
90 |
+
data = self.__fp.read(amt)
|
91 |
+
if data:
|
92 |
+
# We may be dealing with b'', a sign that things are over:
|
93 |
+
# it's passed e.g. after we've already closed self.__buf.
|
94 |
+
self.__buf.write(data)
|
95 |
+
if self.__is_fp_closed():
|
96 |
+
self._close()
|
97 |
+
|
98 |
+
return data
|
99 |
+
|
100 |
+
def _safe_read(self, amt):
|
101 |
+
data = self.__fp._safe_read(amt)
|
102 |
+
if amt == 2 and data == b"\r\n":
|
103 |
+
# urllib executes this read to toss the CRLF at the end
|
104 |
+
# of the chunk.
|
105 |
+
return data
|
106 |
+
|
107 |
+
self.__buf.write(data)
|
108 |
+
if self.__is_fp_closed():
|
109 |
+
self._close()
|
110 |
+
|
111 |
+
return data
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import calendar
|
6 |
+
import time
|
7 |
+
|
8 |
+
from email.utils import formatdate, parsedate, parsedate_tz
|
9 |
+
|
10 |
+
from datetime import datetime, timedelta
|
11 |
+
|
12 |
+
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
13 |
+
|
14 |
+
|
15 |
+
def expire_after(delta, date=None):
|
16 |
+
date = date or datetime.utcnow()
|
17 |
+
return date + delta
|
18 |
+
|
19 |
+
|
20 |
+
def datetime_to_header(dt):
|
21 |
+
return formatdate(calendar.timegm(dt.timetuple()))
|
22 |
+
|
23 |
+
|
24 |
+
class BaseHeuristic(object):
|
25 |
+
|
26 |
+
def warning(self, response):
|
27 |
+
"""
|
28 |
+
Return a valid 1xx warning header value describing the cache
|
29 |
+
adjustments.
|
30 |
+
|
31 |
+
The response is provided too allow warnings like 113
|
32 |
+
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
|
33 |
+
to explicitly say response is over 24 hours old.
|
34 |
+
"""
|
35 |
+
return '110 - "Response is Stale"'
|
36 |
+
|
37 |
+
def update_headers(self, response):
|
38 |
+
"""Update the response headers with any new headers.
|
39 |
+
|
40 |
+
NOTE: This SHOULD always include some Warning header to
|
41 |
+
signify that the response was cached by the client, not
|
42 |
+
by way of the provided headers.
|
43 |
+
"""
|
44 |
+
return {}
|
45 |
+
|
46 |
+
def apply(self, response):
|
47 |
+
updated_headers = self.update_headers(response)
|
48 |
+
|
49 |
+
if updated_headers:
|
50 |
+
response.headers.update(updated_headers)
|
51 |
+
warning_header_value = self.warning(response)
|
52 |
+
if warning_header_value is not None:
|
53 |
+
response.headers.update({"Warning": warning_header_value})
|
54 |
+
|
55 |
+
return response
|
56 |
+
|
57 |
+
|
58 |
+
class OneDayCache(BaseHeuristic):
|
59 |
+
"""
|
60 |
+
Cache the response by providing an expires 1 day in the
|
61 |
+
future.
|
62 |
+
"""
|
63 |
+
|
64 |
+
def update_headers(self, response):
|
65 |
+
headers = {}
|
66 |
+
|
67 |
+
if "expires" not in response.headers:
|
68 |
+
date = parsedate(response.headers["date"])
|
69 |
+
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
|
70 |
+
headers["expires"] = datetime_to_header(expires)
|
71 |
+
headers["cache-control"] = "public"
|
72 |
+
return headers
|
73 |
+
|
74 |
+
|
75 |
+
class ExpiresAfter(BaseHeuristic):
|
76 |
+
"""
|
77 |
+
Cache **all** requests for a defined time period.
|
78 |
+
"""
|
79 |
+
|
80 |
+
def __init__(self, **kw):
|
81 |
+
self.delta = timedelta(**kw)
|
82 |
+
|
83 |
+
def update_headers(self, response):
|
84 |
+
expires = expire_after(self.delta)
|
85 |
+
return {"expires": datetime_to_header(expires), "cache-control": "public"}
|
86 |
+
|
87 |
+
def warning(self, response):
|
88 |
+
tmpl = "110 - Automatically cached for %s. Response might be stale"
|
89 |
+
return tmpl % self.delta
|
90 |
+
|
91 |
+
|
92 |
+
class LastModified(BaseHeuristic):
|
93 |
+
"""
|
94 |
+
If there is no Expires header already, fall back on Last-Modified
|
95 |
+
using the heuristic from
|
96 |
+
http://tools.ietf.org/html/rfc7234#section-4.2.2
|
97 |
+
to calculate a reasonable value.
|
98 |
+
|
99 |
+
Firefox also does something like this per
|
100 |
+
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
|
101 |
+
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
102 |
+
Unlike mozilla we limit this to 24-hr.
|
103 |
+
"""
|
104 |
+
cacheable_by_default_statuses = {
|
105 |
+
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
|
106 |
+
}
|
107 |
+
|
108 |
+
def update_headers(self, resp):
|
109 |
+
headers = resp.headers
|
110 |
+
|
111 |
+
if "expires" in headers:
|
112 |
+
return {}
|
113 |
+
|
114 |
+
if "cache-control" in headers and headers["cache-control"] != "public":
|
115 |
+
return {}
|
116 |
+
|
117 |
+
if resp.status not in self.cacheable_by_default_statuses:
|
118 |
+
return {}
|
119 |
+
|
120 |
+
if "date" not in headers or "last-modified" not in headers:
|
121 |
+
return {}
|
122 |
+
|
123 |
+
date = calendar.timegm(parsedate_tz(headers["date"]))
|
124 |
+
last_modified = parsedate(headers["last-modified"])
|
125 |
+
if date is None or last_modified is None:
|
126 |
+
return {}
|
127 |
+
|
128 |
+
now = time.time()
|
129 |
+
current_age = max(0, now - date)
|
130 |
+
delta = date - calendar.timegm(last_modified)
|
131 |
+
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
|
132 |
+
if freshness_lifetime <= current_age:
|
133 |
+
return {}
|
134 |
+
|
135 |
+
expires = date + freshness_lifetime
|
136 |
+
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
|
137 |
+
|
138 |
+
def warning(self, resp):
|
139 |
+
return None
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc
ADDED
Binary file (1.68 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc
ADDED
Binary file (3.94 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (1.42 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-310.pyc
ADDED
Binary file (211 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (1.8 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc
ADDED
Binary file (6.31 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc
ADDED
Binary file (25.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/msgpack/_version.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
version = (1, 0, 3)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__init__.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
__all__ = [
|
2 |
+
"__version__",
|
3 |
+
"AbstractProvider",
|
4 |
+
"AbstractResolver",
|
5 |
+
"BaseReporter",
|
6 |
+
"InconsistentCandidate",
|
7 |
+
"Resolver",
|
8 |
+
"RequirementsConflicted",
|
9 |
+
"ResolutionError",
|
10 |
+
"ResolutionImpossible",
|
11 |
+
"ResolutionTooDeep",
|
12 |
+
]
|
13 |
+
|
14 |
+
__version__ = "0.8.1"
|
15 |
+
|
16 |
+
|
17 |
+
from .providers import AbstractProvider, AbstractResolver
|
18 |
+
from .reporters import BaseReporter
|
19 |
+
from .resolvers import (
|
20 |
+
InconsistentCandidate,
|
21 |
+
RequirementsConflicted,
|
22 |
+
ResolutionError,
|
23 |
+
ResolutionImpossible,
|
24 |
+
ResolutionTooDeep,
|
25 |
+
Resolver,
|
26 |
+
)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (599 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc
ADDED
Binary file (6.65 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc
ADDED
Binary file (2.57 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc
ADDED
Binary file (15.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc
ADDED
Binary file (7.16 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (194 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc
ADDED
Binary file (370 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
__all__ = ["Mapping", "Sequence"]
|
2 |
+
|
3 |
+
try:
|
4 |
+
from collections.abc import Mapping, Sequence
|
5 |
+
except ImportError:
|
6 |
+
from collections import Mapping, Sequence
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/resolvelib/providers.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
class AbstractProvider(object):
|
2 |
+
"""Delegate class to provide requirement interface for the resolver."""
|
3 |
+
|
4 |
+
def identify(self, requirement_or_candidate):
|
5 |
+
"""Given a requirement, return an identifier for it.
|
6 |
+
|
7 |
+
This is used to identify a requirement, e.g. whether two requirements
|
8 |
+
should have their specifier parts merged.
|
9 |
+
"""
|
10 |
+
raise NotImplementedError
|
11 |
+
|
12 |
+
def get_preference(
|
13 |
+
self,
|
14 |
+
identifier,
|
15 |
+
resolutions,
|
16 |
+
candidates,
|
17 |
+
information,
|
18 |
+
backtrack_causes,
|
19 |
+
):
|
20 |
+
"""Produce a sort key for given requirement based on preference.
|
21 |
+
|
22 |
+
The preference is defined as "I think this requirement should be
|
23 |
+
resolved first". The lower the return value is, the more preferred
|
24 |
+
this group of arguments is.
|
25 |
+
|
26 |
+
:param identifier: An identifier as returned by ``identify()``. This
|
27 |
+
identifies the dependency matches of which should be returned.
|
28 |
+
:param resolutions: Mapping of candidates currently pinned by the
|
29 |
+
resolver. Each key is an identifier, and the value a candidate.
|
30 |
+
The candidate may conflict with requirements from ``information``.
|
31 |
+
:param candidates: Mapping of each dependency's possible candidates.
|
32 |
+
Each value is an iterator of candidates.
|
33 |
+
:param information: Mapping of requirement information of each package.
|
34 |
+
Each value is an iterator of *requirement information*.
|
35 |
+
:param backtrack_causes: Sequence of requirement information that were
|
36 |
+
the requirements that caused the resolver to most recently backtrack.
|
37 |
+
|
38 |
+
A *requirement information* instance is a named tuple with two members:
|
39 |
+
|
40 |
+
* ``requirement`` specifies a requirement contributing to the current
|
41 |
+
list of candidates.
|
42 |
+
* ``parent`` specifies the candidate that provides (dependend on) the
|
43 |
+
requirement, or ``None`` to indicate a root requirement.
|
44 |
+
|
45 |
+
The preference could depend on a various of issues, including (not
|
46 |
+
necessarily in this order):
|
47 |
+
|
48 |
+
* Is this package pinned in the current resolution result?
|
49 |
+
* How relaxed is the requirement? Stricter ones should probably be
|
50 |
+
worked on first? (I don't know, actually.)
|
51 |
+
* How many possibilities are there to satisfy this requirement? Those
|
52 |
+
with few left should likely be worked on first, I guess?
|
53 |
+
* Are there any known conflicts for this requirement? We should
|
54 |
+
probably work on those with the most known conflicts.
|
55 |
+
|
56 |
+
A sortable value should be returned (this will be used as the ``key``
|
57 |
+
parameter of the built-in sorting function). The smaller the value is,
|
58 |
+
the more preferred this requirement is (i.e. the sorting function
|
59 |
+
is called with ``reverse=False``).
|
60 |
+
"""
|
61 |
+
raise NotImplementedError
|
62 |
+
|
63 |
+
def find_matches(self, identifier, requirements, incompatibilities):
|
64 |
+
"""Find all possible candidates that satisfy given constraints.
|
65 |
+
|
66 |
+
:param identifier: An identifier as returned by ``identify()``. This
|
67 |
+
identifies the dependency matches of which should be returned.
|
68 |
+
:param requirements: A mapping of requirements that all returned
|
69 |
+
candidates must satisfy. Each key is an identifier, and the value
|
70 |
+
an iterator of requirements for that dependency.
|
71 |
+
:param incompatibilities: A mapping of known incompatibilities of
|
72 |
+
each dependency. Each key is an identifier, and the value an
|
73 |
+
iterator of incompatibilities known to the resolver. All
|
74 |
+
incompatibilities *must* be excluded from the return value.
|
75 |
+
|
76 |
+
This should try to get candidates based on the requirements' types.
|
77 |
+
For VCS, local, and archive requirements, the one-and-only match is
|
78 |
+
returned, and for a "named" requirement, the index(es) should be
|
79 |
+
consulted to find concrete candidates for this requirement.
|
80 |
+
|
81 |
+
The return value should produce candidates ordered by preference; the
|
82 |
+
most preferred candidate should come first. The return type may be one
|
83 |
+
of the following:
|
84 |
+
|
85 |
+
* A callable that returns an iterator that yields candidates.
|
86 |
+
* An collection of candidates.
|
87 |
+
* An iterable of candidates. This will be consumed immediately into a
|
88 |
+
list of candidates.
|
89 |
+
"""
|
90 |
+
raise NotImplementedError
|
91 |
+
|
92 |
+
def is_satisfied_by(self, requirement, candidate):
|
93 |
+
"""Whether the given requirement can be satisfied by a candidate.
|
94 |
+
|
95 |
+
The candidate is guarenteed to have been generated from the
|
96 |
+
requirement.
|
97 |
+
|
98 |
+
A boolean should be returned to indicate whether ``candidate`` is a
|
99 |
+
viable solution to the requirement.
|
100 |
+
"""
|
101 |
+
raise NotImplementedError
|
102 |
+
|
103 |
+
def get_dependencies(self, candidate):
|
104 |
+
"""Get dependencies of a candidate.
|
105 |
+
|
106 |
+
This should return a collection of requirements that `candidate`
|
107 |
+
specifies as its dependencies.
|
108 |
+
"""
|
109 |
+
raise NotImplementedError
|
110 |
+
|
111 |
+
|
112 |
+
class AbstractResolver(object):
|
113 |
+
"""The thing that performs the actual resolution work."""
|
114 |
+
|
115 |
+
base_exception = Exception
|
116 |
+
|
117 |
+
def __init__(self, provider, reporter):
|
118 |
+
self.provider = provider
|
119 |
+
self.reporter = reporter
|
120 |
+
|
121 |
+
def resolve(self, requirements, **kwargs):
|
122 |
+
"""Take a collection of constraints, spit out the resolution result.
|
123 |
+
|
124 |
+
This returns a representation of the final resolution state, with one
|
125 |
+
guarenteed attribute ``mapping`` that contains resolved candidates as
|
126 |
+
values. The keys are their respective identifiers.
|
127 |
+
|
128 |
+
:param requirements: A collection of constraints.
|
129 |
+
:param kwargs: Additional keyword arguments that subclasses may accept.
|
130 |
+
|
131 |
+
:raises: ``self.base_exception`` or its subclass.
|
132 |
+
"""
|
133 |
+
raise NotImplementedError
|