Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/__init__.py +23 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/compat.py +1116 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/database.py +1345 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/index.py +509 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py +393 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/markers.py +152 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/metadata.py +1058 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py +358 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py +429 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/util.py +1932 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/version.py +739 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/py31compat.py +23 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__init__.py +331 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__main__.py +46 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/android.py +119 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/api.py +156 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/macos.py +64 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/unix.py +181 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/version.py +4 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/windows.py +182 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/models.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__version__.py +14 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/adapters.py +538 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/api.py +159 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/certs.py +18 -0
- env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/hooks.py +34 -0
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/__init__.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2012-2019 Vinay Sajip.
|
4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
6 |
+
#
|
7 |
+
import logging
|
8 |
+
|
9 |
+
__version__ = '0.3.4'
|
10 |
+
|
11 |
+
class DistlibException(Exception):
|
12 |
+
pass
|
13 |
+
|
14 |
+
try:
|
15 |
+
from logging import NullHandler
|
16 |
+
except ImportError: # pragma: no cover
|
17 |
+
class NullHandler(logging.Handler):
|
18 |
+
def handle(self, record): pass
|
19 |
+
def emit(self, record): pass
|
20 |
+
def createLock(self): self.lock = None
|
21 |
+
|
22 |
+
logger = logging.getLogger(__name__)
|
23 |
+
logger.addHandler(NullHandler())
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/compat.py
ADDED
@@ -0,0 +1,1116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2013-2017 Vinay Sajip.
|
4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
6 |
+
#
|
7 |
+
from __future__ import absolute_import
|
8 |
+
|
9 |
+
import os
|
10 |
+
import re
|
11 |
+
import sys
|
12 |
+
|
13 |
+
try:
|
14 |
+
import ssl
|
15 |
+
except ImportError: # pragma: no cover
|
16 |
+
ssl = None
|
17 |
+
|
18 |
+
if sys.version_info[0] < 3: # pragma: no cover
|
19 |
+
from StringIO import StringIO
|
20 |
+
string_types = basestring,
|
21 |
+
text_type = unicode
|
22 |
+
from types import FileType as file_type
|
23 |
+
import __builtin__ as builtins
|
24 |
+
import ConfigParser as configparser
|
25 |
+
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
|
26 |
+
from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
|
27 |
+
pathname2url, ContentTooShortError, splittype)
|
28 |
+
|
29 |
+
def quote(s):
|
30 |
+
if isinstance(s, unicode):
|
31 |
+
s = s.encode('utf-8')
|
32 |
+
return _quote(s)
|
33 |
+
|
34 |
+
import urllib2
|
35 |
+
from urllib2 import (Request, urlopen, URLError, HTTPError,
|
36 |
+
HTTPBasicAuthHandler, HTTPPasswordMgr,
|
37 |
+
HTTPHandler, HTTPRedirectHandler,
|
38 |
+
build_opener)
|
39 |
+
if ssl:
|
40 |
+
from urllib2 import HTTPSHandler
|
41 |
+
import httplib
|
42 |
+
import xmlrpclib
|
43 |
+
import Queue as queue
|
44 |
+
from HTMLParser import HTMLParser
|
45 |
+
import htmlentitydefs
|
46 |
+
raw_input = raw_input
|
47 |
+
from itertools import ifilter as filter
|
48 |
+
from itertools import ifilterfalse as filterfalse
|
49 |
+
|
50 |
+
# Leaving this around for now, in case it needs resurrecting in some way
|
51 |
+
# _userprog = None
|
52 |
+
# def splituser(host):
|
53 |
+
# """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
|
54 |
+
# global _userprog
|
55 |
+
# if _userprog is None:
|
56 |
+
# import re
|
57 |
+
# _userprog = re.compile('^(.*)@(.*)$')
|
58 |
+
|
59 |
+
# match = _userprog.match(host)
|
60 |
+
# if match: return match.group(1, 2)
|
61 |
+
# return None, host
|
62 |
+
|
63 |
+
else: # pragma: no cover
|
64 |
+
from io import StringIO
|
65 |
+
string_types = str,
|
66 |
+
text_type = str
|
67 |
+
from io import TextIOWrapper as file_type
|
68 |
+
import builtins
|
69 |
+
import configparser
|
70 |
+
import shutil
|
71 |
+
from urllib.parse import (urlparse, urlunparse, urljoin, quote,
|
72 |
+
unquote, urlsplit, urlunsplit, splittype)
|
73 |
+
from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
|
74 |
+
pathname2url,
|
75 |
+
HTTPBasicAuthHandler, HTTPPasswordMgr,
|
76 |
+
HTTPHandler, HTTPRedirectHandler,
|
77 |
+
build_opener)
|
78 |
+
if ssl:
|
79 |
+
from urllib.request import HTTPSHandler
|
80 |
+
from urllib.error import HTTPError, URLError, ContentTooShortError
|
81 |
+
import http.client as httplib
|
82 |
+
import urllib.request as urllib2
|
83 |
+
import xmlrpc.client as xmlrpclib
|
84 |
+
import queue
|
85 |
+
from html.parser import HTMLParser
|
86 |
+
import html.entities as htmlentitydefs
|
87 |
+
raw_input = input
|
88 |
+
from itertools import filterfalse
|
89 |
+
filter = filter
|
90 |
+
|
91 |
+
|
92 |
+
try:
|
93 |
+
from ssl import match_hostname, CertificateError
|
94 |
+
except ImportError: # pragma: no cover
|
95 |
+
class CertificateError(ValueError):
|
96 |
+
pass
|
97 |
+
|
98 |
+
|
99 |
+
def _dnsname_match(dn, hostname, max_wildcards=1):
|
100 |
+
"""Matching according to RFC 6125, section 6.4.3
|
101 |
+
|
102 |
+
http://tools.ietf.org/html/rfc6125#section-6.4.3
|
103 |
+
"""
|
104 |
+
pats = []
|
105 |
+
if not dn:
|
106 |
+
return False
|
107 |
+
|
108 |
+
parts = dn.split('.')
|
109 |
+
leftmost, remainder = parts[0], parts[1:]
|
110 |
+
|
111 |
+
wildcards = leftmost.count('*')
|
112 |
+
if wildcards > max_wildcards:
|
113 |
+
# Issue #17980: avoid denials of service by refusing more
|
114 |
+
# than one wildcard per fragment. A survey of established
|
115 |
+
# policy among SSL implementations showed it to be a
|
116 |
+
# reasonable choice.
|
117 |
+
raise CertificateError(
|
118 |
+
"too many wildcards in certificate DNS name: " + repr(dn))
|
119 |
+
|
120 |
+
# speed up common case w/o wildcards
|
121 |
+
if not wildcards:
|
122 |
+
return dn.lower() == hostname.lower()
|
123 |
+
|
124 |
+
# RFC 6125, section 6.4.3, subitem 1.
|
125 |
+
# The client SHOULD NOT attempt to match a presented identifier in which
|
126 |
+
# the wildcard character comprises a label other than the left-most label.
|
127 |
+
if leftmost == '*':
|
128 |
+
# When '*' is a fragment by itself, it matches a non-empty dotless
|
129 |
+
# fragment.
|
130 |
+
pats.append('[^.]+')
|
131 |
+
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
|
132 |
+
# RFC 6125, section 6.4.3, subitem 3.
|
133 |
+
# The client SHOULD NOT attempt to match a presented identifier
|
134 |
+
# where the wildcard character is embedded within an A-label or
|
135 |
+
# U-label of an internationalized domain name.
|
136 |
+
pats.append(re.escape(leftmost))
|
137 |
+
else:
|
138 |
+
# Otherwise, '*' matches any dotless string, e.g. www*
|
139 |
+
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
|
140 |
+
|
141 |
+
# add the remaining fragments, ignore any wildcards
|
142 |
+
for frag in remainder:
|
143 |
+
pats.append(re.escape(frag))
|
144 |
+
|
145 |
+
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
|
146 |
+
return pat.match(hostname)
|
147 |
+
|
148 |
+
|
149 |
+
def match_hostname(cert, hostname):
|
150 |
+
"""Verify that *cert* (in decoded format as returned by
|
151 |
+
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
152 |
+
rules are followed, but IP addresses are not accepted for *hostname*.
|
153 |
+
|
154 |
+
CertificateError is raised on failure. On success, the function
|
155 |
+
returns nothing.
|
156 |
+
"""
|
157 |
+
if not cert:
|
158 |
+
raise ValueError("empty or no certificate, match_hostname needs a "
|
159 |
+
"SSL socket or SSL context with either "
|
160 |
+
"CERT_OPTIONAL or CERT_REQUIRED")
|
161 |
+
dnsnames = []
|
162 |
+
san = cert.get('subjectAltName', ())
|
163 |
+
for key, value in san:
|
164 |
+
if key == 'DNS':
|
165 |
+
if _dnsname_match(value, hostname):
|
166 |
+
return
|
167 |
+
dnsnames.append(value)
|
168 |
+
if not dnsnames:
|
169 |
+
# The subject is only checked when there is no dNSName entry
|
170 |
+
# in subjectAltName
|
171 |
+
for sub in cert.get('subject', ()):
|
172 |
+
for key, value in sub:
|
173 |
+
# XXX according to RFC 2818, the most specific Common Name
|
174 |
+
# must be used.
|
175 |
+
if key == 'commonName':
|
176 |
+
if _dnsname_match(value, hostname):
|
177 |
+
return
|
178 |
+
dnsnames.append(value)
|
179 |
+
if len(dnsnames) > 1:
|
180 |
+
raise CertificateError("hostname %r "
|
181 |
+
"doesn't match either of %s"
|
182 |
+
% (hostname, ', '.join(map(repr, dnsnames))))
|
183 |
+
elif len(dnsnames) == 1:
|
184 |
+
raise CertificateError("hostname %r "
|
185 |
+
"doesn't match %r"
|
186 |
+
% (hostname, dnsnames[0]))
|
187 |
+
else:
|
188 |
+
raise CertificateError("no appropriate commonName or "
|
189 |
+
"subjectAltName fields were found")
|
190 |
+
|
191 |
+
|
192 |
+
try:
|
193 |
+
from types import SimpleNamespace as Container
|
194 |
+
except ImportError: # pragma: no cover
|
195 |
+
class Container(object):
|
196 |
+
"""
|
197 |
+
A generic container for when multiple values need to be returned
|
198 |
+
"""
|
199 |
+
def __init__(self, **kwargs):
|
200 |
+
self.__dict__.update(kwargs)
|
201 |
+
|
202 |
+
|
203 |
+
try:
|
204 |
+
from shutil import which
|
205 |
+
except ImportError: # pragma: no cover
|
206 |
+
# Implementation from Python 3.3
|
207 |
+
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
208 |
+
"""Given a command, mode, and a PATH string, return the path which
|
209 |
+
conforms to the given mode on the PATH, or None if there is no such
|
210 |
+
file.
|
211 |
+
|
212 |
+
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
|
213 |
+
of os.environ.get("PATH"), or can be overridden with a custom search
|
214 |
+
path.
|
215 |
+
|
216 |
+
"""
|
217 |
+
# Check that a given file can be accessed with the correct mode.
|
218 |
+
# Additionally check that `file` is not a directory, as on Windows
|
219 |
+
# directories pass the os.access check.
|
220 |
+
def _access_check(fn, mode):
|
221 |
+
return (os.path.exists(fn) and os.access(fn, mode)
|
222 |
+
and not os.path.isdir(fn))
|
223 |
+
|
224 |
+
# If we're given a path with a directory part, look it up directly rather
|
225 |
+
# than referring to PATH directories. This includes checking relative to the
|
226 |
+
# current directory, e.g. ./script
|
227 |
+
if os.path.dirname(cmd):
|
228 |
+
if _access_check(cmd, mode):
|
229 |
+
return cmd
|
230 |
+
return None
|
231 |
+
|
232 |
+
if path is None:
|
233 |
+
path = os.environ.get("PATH", os.defpath)
|
234 |
+
if not path:
|
235 |
+
return None
|
236 |
+
path = path.split(os.pathsep)
|
237 |
+
|
238 |
+
if sys.platform == "win32":
|
239 |
+
# The current directory takes precedence on Windows.
|
240 |
+
if not os.curdir in path:
|
241 |
+
path.insert(0, os.curdir)
|
242 |
+
|
243 |
+
# PATHEXT is necessary to check on Windows.
|
244 |
+
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
|
245 |
+
# See if the given file matches any of the expected path extensions.
|
246 |
+
# This will allow us to short circuit when given "python.exe".
|
247 |
+
# If it does match, only test that one, otherwise we have to try
|
248 |
+
# others.
|
249 |
+
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
|
250 |
+
files = [cmd]
|
251 |
+
else:
|
252 |
+
files = [cmd + ext for ext in pathext]
|
253 |
+
else:
|
254 |
+
# On other platforms you don't have things like PATHEXT to tell you
|
255 |
+
# what file suffixes are executable, so just pass on cmd as-is.
|
256 |
+
files = [cmd]
|
257 |
+
|
258 |
+
seen = set()
|
259 |
+
for dir in path:
|
260 |
+
normdir = os.path.normcase(dir)
|
261 |
+
if not normdir in seen:
|
262 |
+
seen.add(normdir)
|
263 |
+
for thefile in files:
|
264 |
+
name = os.path.join(dir, thefile)
|
265 |
+
if _access_check(name, mode):
|
266 |
+
return name
|
267 |
+
return None
|
268 |
+
|
269 |
+
|
270 |
+
# ZipFile is a context manager in 2.7, but not in 2.6
|
271 |
+
|
272 |
+
from zipfile import ZipFile as BaseZipFile
|
273 |
+
|
274 |
+
if hasattr(BaseZipFile, '__enter__'): # pragma: no cover
|
275 |
+
ZipFile = BaseZipFile
|
276 |
+
else: # pragma: no cover
|
277 |
+
from zipfile import ZipExtFile as BaseZipExtFile
|
278 |
+
|
279 |
+
class ZipExtFile(BaseZipExtFile):
|
280 |
+
def __init__(self, base):
|
281 |
+
self.__dict__.update(base.__dict__)
|
282 |
+
|
283 |
+
def __enter__(self):
|
284 |
+
return self
|
285 |
+
|
286 |
+
def __exit__(self, *exc_info):
|
287 |
+
self.close()
|
288 |
+
# return None, so if an exception occurred, it will propagate
|
289 |
+
|
290 |
+
class ZipFile(BaseZipFile):
|
291 |
+
def __enter__(self):
|
292 |
+
return self
|
293 |
+
|
294 |
+
def __exit__(self, *exc_info):
|
295 |
+
self.close()
|
296 |
+
# return None, so if an exception occurred, it will propagate
|
297 |
+
|
298 |
+
def open(self, *args, **kwargs):
|
299 |
+
base = BaseZipFile.open(self, *args, **kwargs)
|
300 |
+
return ZipExtFile(base)
|
301 |
+
|
302 |
+
try:
|
303 |
+
from platform import python_implementation
|
304 |
+
except ImportError: # pragma: no cover
|
305 |
+
def python_implementation():
|
306 |
+
"""Return a string identifying the Python implementation."""
|
307 |
+
if 'PyPy' in sys.version:
|
308 |
+
return 'PyPy'
|
309 |
+
if os.name == 'java':
|
310 |
+
return 'Jython'
|
311 |
+
if sys.version.startswith('IronPython'):
|
312 |
+
return 'IronPython'
|
313 |
+
return 'CPython'
|
314 |
+
|
315 |
+
import shutil
|
316 |
+
import sysconfig
|
317 |
+
|
318 |
+
try:
|
319 |
+
callable = callable
|
320 |
+
except NameError: # pragma: no cover
|
321 |
+
from collections.abc import Callable
|
322 |
+
|
323 |
+
def callable(obj):
|
324 |
+
return isinstance(obj, Callable)
|
325 |
+
|
326 |
+
|
327 |
+
try:
|
328 |
+
fsencode = os.fsencode
|
329 |
+
fsdecode = os.fsdecode
|
330 |
+
except AttributeError: # pragma: no cover
|
331 |
+
# Issue #99: on some systems (e.g. containerised),
|
332 |
+
# sys.getfilesystemencoding() returns None, and we need a real value,
|
333 |
+
# so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
|
334 |
+
# sys.getfilesystemencoding(): the return value is "the user’s preference
|
335 |
+
# according to the result of nl_langinfo(CODESET), or None if the
|
336 |
+
# nl_langinfo(CODESET) failed."
|
337 |
+
_fsencoding = sys.getfilesystemencoding() or 'utf-8'
|
338 |
+
if _fsencoding == 'mbcs':
|
339 |
+
_fserrors = 'strict'
|
340 |
+
else:
|
341 |
+
_fserrors = 'surrogateescape'
|
342 |
+
|
343 |
+
def fsencode(filename):
|
344 |
+
if isinstance(filename, bytes):
|
345 |
+
return filename
|
346 |
+
elif isinstance(filename, text_type):
|
347 |
+
return filename.encode(_fsencoding, _fserrors)
|
348 |
+
else:
|
349 |
+
raise TypeError("expect bytes or str, not %s" %
|
350 |
+
type(filename).__name__)
|
351 |
+
|
352 |
+
def fsdecode(filename):
|
353 |
+
if isinstance(filename, text_type):
|
354 |
+
return filename
|
355 |
+
elif isinstance(filename, bytes):
|
356 |
+
return filename.decode(_fsencoding, _fserrors)
|
357 |
+
else:
|
358 |
+
raise TypeError("expect bytes or str, not %s" %
|
359 |
+
type(filename).__name__)
|
360 |
+
|
361 |
+
try:
|
362 |
+
from tokenize import detect_encoding
|
363 |
+
except ImportError: # pragma: no cover
|
364 |
+
from codecs import BOM_UTF8, lookup
|
365 |
+
import re
|
366 |
+
|
367 |
+
cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
|
368 |
+
|
369 |
+
def _get_normal_name(orig_enc):
|
370 |
+
"""Imitates get_normal_name in tokenizer.c."""
|
371 |
+
# Only care about the first 12 characters.
|
372 |
+
enc = orig_enc[:12].lower().replace("_", "-")
|
373 |
+
if enc == "utf-8" or enc.startswith("utf-8-"):
|
374 |
+
return "utf-8"
|
375 |
+
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
|
376 |
+
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
|
377 |
+
return "iso-8859-1"
|
378 |
+
return orig_enc
|
379 |
+
|
380 |
+
def detect_encoding(readline):
|
381 |
+
"""
|
382 |
+
The detect_encoding() function is used to detect the encoding that should
|
383 |
+
be used to decode a Python source file. It requires one argument, readline,
|
384 |
+
in the same way as the tokenize() generator.
|
385 |
+
|
386 |
+
It will call readline a maximum of twice, and return the encoding used
|
387 |
+
(as a string) and a list of any lines (left as bytes) it has read in.
|
388 |
+
|
389 |
+
It detects the encoding from the presence of a utf-8 bom or an encoding
|
390 |
+
cookie as specified in pep-0263. If both a bom and a cookie are present,
|
391 |
+
but disagree, a SyntaxError will be raised. If the encoding cookie is an
|
392 |
+
invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
|
393 |
+
'utf-8-sig' is returned.
|
394 |
+
|
395 |
+
If no encoding is specified, then the default of 'utf-8' will be returned.
|
396 |
+
"""
|
397 |
+
try:
|
398 |
+
filename = readline.__self__.name
|
399 |
+
except AttributeError:
|
400 |
+
filename = None
|
401 |
+
bom_found = False
|
402 |
+
encoding = None
|
403 |
+
default = 'utf-8'
|
404 |
+
def read_or_stop():
|
405 |
+
try:
|
406 |
+
return readline()
|
407 |
+
except StopIteration:
|
408 |
+
return b''
|
409 |
+
|
410 |
+
def find_cookie(line):
|
411 |
+
try:
|
412 |
+
# Decode as UTF-8. Either the line is an encoding declaration,
|
413 |
+
# in which case it should be pure ASCII, or it must be UTF-8
|
414 |
+
# per default encoding.
|
415 |
+
line_string = line.decode('utf-8')
|
416 |
+
except UnicodeDecodeError:
|
417 |
+
msg = "invalid or missing encoding declaration"
|
418 |
+
if filename is not None:
|
419 |
+
msg = '{} for {!r}'.format(msg, filename)
|
420 |
+
raise SyntaxError(msg)
|
421 |
+
|
422 |
+
matches = cookie_re.findall(line_string)
|
423 |
+
if not matches:
|
424 |
+
return None
|
425 |
+
encoding = _get_normal_name(matches[0])
|
426 |
+
try:
|
427 |
+
codec = lookup(encoding)
|
428 |
+
except LookupError:
|
429 |
+
# This behaviour mimics the Python interpreter
|
430 |
+
if filename is None:
|
431 |
+
msg = "unknown encoding: " + encoding
|
432 |
+
else:
|
433 |
+
msg = "unknown encoding for {!r}: {}".format(filename,
|
434 |
+
encoding)
|
435 |
+
raise SyntaxError(msg)
|
436 |
+
|
437 |
+
if bom_found:
|
438 |
+
if codec.name != 'utf-8':
|
439 |
+
# This behaviour mimics the Python interpreter
|
440 |
+
if filename is None:
|
441 |
+
msg = 'encoding problem: utf-8'
|
442 |
+
else:
|
443 |
+
msg = 'encoding problem for {!r}: utf-8'.format(filename)
|
444 |
+
raise SyntaxError(msg)
|
445 |
+
encoding += '-sig'
|
446 |
+
return encoding
|
447 |
+
|
448 |
+
first = read_or_stop()
|
449 |
+
if first.startswith(BOM_UTF8):
|
450 |
+
bom_found = True
|
451 |
+
first = first[3:]
|
452 |
+
default = 'utf-8-sig'
|
453 |
+
if not first:
|
454 |
+
return default, []
|
455 |
+
|
456 |
+
encoding = find_cookie(first)
|
457 |
+
if encoding:
|
458 |
+
return encoding, [first]
|
459 |
+
|
460 |
+
second = read_or_stop()
|
461 |
+
if not second:
|
462 |
+
return default, [first]
|
463 |
+
|
464 |
+
encoding = find_cookie(second)
|
465 |
+
if encoding:
|
466 |
+
return encoding, [first, second]
|
467 |
+
|
468 |
+
return default, [first, second]
|
469 |
+
|
470 |
+
# For converting & <-> & etc.
|
471 |
+
try:
|
472 |
+
from html import escape
|
473 |
+
except ImportError:
|
474 |
+
from cgi import escape
|
475 |
+
if sys.version_info[:2] < (3, 4):
|
476 |
+
unescape = HTMLParser().unescape
|
477 |
+
else:
|
478 |
+
from html import unescape
|
479 |
+
|
480 |
+
try:
|
481 |
+
from collections import ChainMap
|
482 |
+
except ImportError: # pragma: no cover
|
483 |
+
from collections import MutableMapping
|
484 |
+
|
485 |
+
try:
|
486 |
+
from reprlib import recursive_repr as _recursive_repr
|
487 |
+
except ImportError:
|
488 |
+
def _recursive_repr(fillvalue='...'):
|
489 |
+
'''
|
490 |
+
Decorator to make a repr function return fillvalue for a recursive
|
491 |
+
call
|
492 |
+
'''
|
493 |
+
|
494 |
+
def decorating_function(user_function):
|
495 |
+
repr_running = set()
|
496 |
+
|
497 |
+
def wrapper(self):
|
498 |
+
key = id(self), get_ident()
|
499 |
+
if key in repr_running:
|
500 |
+
return fillvalue
|
501 |
+
repr_running.add(key)
|
502 |
+
try:
|
503 |
+
result = user_function(self)
|
504 |
+
finally:
|
505 |
+
repr_running.discard(key)
|
506 |
+
return result
|
507 |
+
|
508 |
+
# Can't use functools.wraps() here because of bootstrap issues
|
509 |
+
wrapper.__module__ = getattr(user_function, '__module__')
|
510 |
+
wrapper.__doc__ = getattr(user_function, '__doc__')
|
511 |
+
wrapper.__name__ = getattr(user_function, '__name__')
|
512 |
+
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
513 |
+
return wrapper
|
514 |
+
|
515 |
+
return decorating_function
|
516 |
+
|
517 |
+
class ChainMap(MutableMapping):
|
518 |
+
''' A ChainMap groups multiple dicts (or other mappings) together
|
519 |
+
to create a single, updateable view.
|
520 |
+
|
521 |
+
The underlying mappings are stored in a list. That list is public and can
|
522 |
+
accessed or updated using the *maps* attribute. There is no other state.
|
523 |
+
|
524 |
+
Lookups search the underlying mappings successively until a key is found.
|
525 |
+
In contrast, writes, updates, and deletions only operate on the first
|
526 |
+
mapping.
|
527 |
+
|
528 |
+
'''
|
529 |
+
|
530 |
+
def __init__(self, *maps):
|
531 |
+
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
532 |
+
If no mappings are provided, a single empty dictionary is used.
|
533 |
+
|
534 |
+
'''
|
535 |
+
self.maps = list(maps) or [{}] # always at least one map
|
536 |
+
|
537 |
+
def __missing__(self, key):
|
538 |
+
raise KeyError(key)
|
539 |
+
|
540 |
+
def __getitem__(self, key):
|
541 |
+
for mapping in self.maps:
|
542 |
+
try:
|
543 |
+
return mapping[key] # can't use 'key in mapping' with defaultdict
|
544 |
+
except KeyError:
|
545 |
+
pass
|
546 |
+
return self.__missing__(key) # support subclasses that define __missing__
|
547 |
+
|
548 |
+
def get(self, key, default=None):
|
549 |
+
return self[key] if key in self else default
|
550 |
+
|
551 |
+
def __len__(self):
|
552 |
+
return len(set().union(*self.maps)) # reuses stored hash values if possible
|
553 |
+
|
554 |
+
def __iter__(self):
|
555 |
+
return iter(set().union(*self.maps))
|
556 |
+
|
557 |
+
def __contains__(self, key):
|
558 |
+
return any(key in m for m in self.maps)
|
559 |
+
|
560 |
+
def __bool__(self):
|
561 |
+
return any(self.maps)
|
562 |
+
|
563 |
+
@_recursive_repr()
|
564 |
+
def __repr__(self):
|
565 |
+
return '{0.__class__.__name__}({1})'.format(
|
566 |
+
self, ', '.join(map(repr, self.maps)))
|
567 |
+
|
568 |
+
@classmethod
|
569 |
+
def fromkeys(cls, iterable, *args):
|
570 |
+
'Create a ChainMap with a single dict created from the iterable.'
|
571 |
+
return cls(dict.fromkeys(iterable, *args))
|
572 |
+
|
573 |
+
def copy(self):
|
574 |
+
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
|
575 |
+
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
576 |
+
|
577 |
+
__copy__ = copy
|
578 |
+
|
579 |
+
def new_child(self): # like Django's Context.push()
|
580 |
+
'New ChainMap with a new dict followed by all previous maps.'
|
581 |
+
return self.__class__({}, *self.maps)
|
582 |
+
|
583 |
+
@property
|
584 |
+
def parents(self): # like Django's Context.pop()
|
585 |
+
'New ChainMap from maps[1:].'
|
586 |
+
return self.__class__(*self.maps[1:])
|
587 |
+
|
588 |
+
def __setitem__(self, key, value):
|
589 |
+
self.maps[0][key] = value
|
590 |
+
|
591 |
+
def __delitem__(self, key):
|
592 |
+
try:
|
593 |
+
del self.maps[0][key]
|
594 |
+
except KeyError:
|
595 |
+
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
|
596 |
+
|
597 |
+
def popitem(self):
|
598 |
+
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
|
599 |
+
try:
|
600 |
+
return self.maps[0].popitem()
|
601 |
+
except KeyError:
|
602 |
+
raise KeyError('No keys found in the first mapping.')
|
603 |
+
|
604 |
+
def pop(self, key, *args):
|
605 |
+
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
|
606 |
+
try:
|
607 |
+
return self.maps[0].pop(key, *args)
|
608 |
+
except KeyError:
|
609 |
+
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
|
610 |
+
|
611 |
+
def clear(self):
|
612 |
+
'Clear maps[0], leaving maps[1:] intact.'
|
613 |
+
self.maps[0].clear()
|
614 |
+
|
615 |
+
try:
|
616 |
+
from importlib.util import cache_from_source # Python >= 3.4
|
617 |
+
except ImportError: # pragma: no cover
|
618 |
+
def cache_from_source(path, debug_override=None):
|
619 |
+
assert path.endswith('.py')
|
620 |
+
if debug_override is None:
|
621 |
+
debug_override = __debug__
|
622 |
+
if debug_override:
|
623 |
+
suffix = 'c'
|
624 |
+
else:
|
625 |
+
suffix = 'o'
|
626 |
+
return path + suffix
|
627 |
+
|
628 |
+
try:
|
629 |
+
from collections import OrderedDict
|
630 |
+
except ImportError: # pragma: no cover
|
631 |
+
## {{{ http://code.activestate.com/recipes/576693/ (r9)
|
632 |
+
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
|
633 |
+
# Passes Python2.7's test suite and incorporates all the latest updates.
|
634 |
+
try:
|
635 |
+
from thread import get_ident as _get_ident
|
636 |
+
except ImportError:
|
637 |
+
from dummy_thread import get_ident as _get_ident
|
638 |
+
|
639 |
+
try:
|
640 |
+
from _abcoll import KeysView, ValuesView, ItemsView
|
641 |
+
except ImportError:
|
642 |
+
pass
|
643 |
+
|
644 |
+
|
645 |
+
class OrderedDict(dict):
|
646 |
+
'Dictionary that remembers insertion order'
|
647 |
+
# An inherited dict maps keys to values.
|
648 |
+
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
649 |
+
# The remaining methods are order-aware.
|
650 |
+
# Big-O running times for all methods are the same as for regular dictionaries.
|
651 |
+
|
652 |
+
# The internal self.__map dictionary maps keys to links in a doubly linked list.
|
653 |
+
# The circular doubly linked list starts and ends with a sentinel element.
|
654 |
+
# The sentinel element never gets deleted (this simplifies the algorithm).
|
655 |
+
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
656 |
+
|
657 |
+
def __init__(self, *args, **kwds):
|
658 |
+
'''Initialize an ordered dictionary. Signature is the same as for
|
659 |
+
regular dictionaries, but keyword arguments are not recommended
|
660 |
+
because their insertion order is arbitrary.
|
661 |
+
|
662 |
+
'''
|
663 |
+
if len(args) > 1:
|
664 |
+
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
665 |
+
try:
|
666 |
+
self.__root
|
667 |
+
except AttributeError:
|
668 |
+
self.__root = root = [] # sentinel node
|
669 |
+
root[:] = [root, root, None]
|
670 |
+
self.__map = {}
|
671 |
+
self.__update(*args, **kwds)
|
672 |
+
|
673 |
+
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
|
674 |
+
'od.__setitem__(i, y) <==> od[i]=y'
|
675 |
+
# Setting a new item creates a new link which goes at the end of the linked
|
676 |
+
# list, and the inherited dictionary is updated with the new key/value pair.
|
677 |
+
if key not in self:
|
678 |
+
root = self.__root
|
679 |
+
last = root[0]
|
680 |
+
last[1] = root[0] = self.__map[key] = [last, root, key]
|
681 |
+
dict_setitem(self, key, value)
|
682 |
+
|
683 |
+
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
684 |
+
'od.__delitem__(y) <==> del od[y]'
|
685 |
+
# Deleting an existing item uses self.__map to find the link which is
|
686 |
+
# then removed by updating the links in the predecessor and successor nodes.
|
687 |
+
dict_delitem(self, key)
|
688 |
+
link_prev, link_next, key = self.__map.pop(key)
|
689 |
+
link_prev[1] = link_next
|
690 |
+
link_next[0] = link_prev
|
691 |
+
|
692 |
+
def __iter__(self):
|
693 |
+
'od.__iter__() <==> iter(od)'
|
694 |
+
root = self.__root
|
695 |
+
curr = root[1]
|
696 |
+
while curr is not root:
|
697 |
+
yield curr[2]
|
698 |
+
curr = curr[1]
|
699 |
+
|
700 |
+
def __reversed__(self):
|
701 |
+
'od.__reversed__() <==> reversed(od)'
|
702 |
+
root = self.__root
|
703 |
+
curr = root[0]
|
704 |
+
while curr is not root:
|
705 |
+
yield curr[2]
|
706 |
+
curr = curr[0]
|
707 |
+
|
708 |
+
def clear(self):
|
709 |
+
'od.clear() -> None. Remove all items from od.'
|
710 |
+
try:
|
711 |
+
for node in self.__map.itervalues():
|
712 |
+
del node[:]
|
713 |
+
root = self.__root
|
714 |
+
root[:] = [root, root, None]
|
715 |
+
self.__map.clear()
|
716 |
+
except AttributeError:
|
717 |
+
pass
|
718 |
+
dict.clear(self)
|
719 |
+
|
720 |
+
def popitem(self, last=True):
|
721 |
+
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
722 |
+
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
723 |
+
|
724 |
+
'''
|
725 |
+
if not self:
|
726 |
+
raise KeyError('dictionary is empty')
|
727 |
+
root = self.__root
|
728 |
+
if last:
|
729 |
+
link = root[0]
|
730 |
+
link_prev = link[0]
|
731 |
+
link_prev[1] = root
|
732 |
+
root[0] = link_prev
|
733 |
+
else:
|
734 |
+
link = root[1]
|
735 |
+
link_next = link[1]
|
736 |
+
root[1] = link_next
|
737 |
+
link_next[0] = root
|
738 |
+
key = link[2]
|
739 |
+
del self.__map[key]
|
740 |
+
value = dict.pop(self, key)
|
741 |
+
return key, value
|
742 |
+
|
743 |
+
# -- the following methods do not depend on the internal structure --
|
744 |
+
|
745 |
+
def keys(self):
|
746 |
+
'od.keys() -> list of keys in od'
|
747 |
+
return list(self)
|
748 |
+
|
749 |
+
def values(self):
|
750 |
+
'od.values() -> list of values in od'
|
751 |
+
return [self[key] for key in self]
|
752 |
+
|
753 |
+
def items(self):
|
754 |
+
'od.items() -> list of (key, value) pairs in od'
|
755 |
+
return [(key, self[key]) for key in self]
|
756 |
+
|
757 |
+
def iterkeys(self):
|
758 |
+
'od.iterkeys() -> an iterator over the keys in od'
|
759 |
+
return iter(self)
|
760 |
+
|
761 |
+
def itervalues(self):
|
762 |
+
'od.itervalues -> an iterator over the values in od'
|
763 |
+
for k in self:
|
764 |
+
yield self[k]
|
765 |
+
|
766 |
+
def iteritems(self):
|
767 |
+
'od.iteritems -> an iterator over the (key, value) items in od'
|
768 |
+
for k in self:
|
769 |
+
yield (k, self[k])
|
770 |
+
|
771 |
+
def update(*args, **kwds):
|
772 |
+
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
|
773 |
+
|
774 |
+
If E is a dict instance, does: for k in E: od[k] = E[k]
|
775 |
+
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
|
776 |
+
Or if E is an iterable of items, does: for k, v in E: od[k] = v
|
777 |
+
In either case, this is followed by: for k, v in F.items(): od[k] = v
|
778 |
+
|
779 |
+
'''
|
780 |
+
if len(args) > 2:
|
781 |
+
raise TypeError('update() takes at most 2 positional '
|
782 |
+
'arguments (%d given)' % (len(args),))
|
783 |
+
elif not args:
|
784 |
+
raise TypeError('update() takes at least 1 argument (0 given)')
|
785 |
+
self = args[0]
|
786 |
+
# Make progressively weaker assumptions about "other"
|
787 |
+
other = ()
|
788 |
+
if len(args) == 2:
|
789 |
+
other = args[1]
|
790 |
+
if isinstance(other, dict):
|
791 |
+
for key in other:
|
792 |
+
self[key] = other[key]
|
793 |
+
elif hasattr(other, 'keys'):
|
794 |
+
for key in other.keys():
|
795 |
+
self[key] = other[key]
|
796 |
+
else:
|
797 |
+
for key, value in other:
|
798 |
+
self[key] = value
|
799 |
+
for key, value in kwds.items():
|
800 |
+
self[key] = value
|
801 |
+
|
802 |
+
__update = update # let subclasses override update without breaking __init__
|
803 |
+
|
804 |
+
__marker = object()
|
805 |
+
|
806 |
+
def pop(self, key, default=__marker):
|
807 |
+
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
808 |
+
If key is not found, d is returned if given, otherwise KeyError is raised.
|
809 |
+
|
810 |
+
'''
|
811 |
+
if key in self:
|
812 |
+
result = self[key]
|
813 |
+
del self[key]
|
814 |
+
return result
|
815 |
+
if default is self.__marker:
|
816 |
+
raise KeyError(key)
|
817 |
+
return default
|
818 |
+
|
819 |
+
def setdefault(self, key, default=None):
|
820 |
+
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
821 |
+
if key in self:
|
822 |
+
return self[key]
|
823 |
+
self[key] = default
|
824 |
+
return default
|
825 |
+
|
826 |
+
def __repr__(self, _repr_running=None):
|
827 |
+
'od.__repr__() <==> repr(od)'
|
828 |
+
if not _repr_running: _repr_running = {}
|
829 |
+
call_key = id(self), _get_ident()
|
830 |
+
if call_key in _repr_running:
|
831 |
+
return '...'
|
832 |
+
_repr_running[call_key] = 1
|
833 |
+
try:
|
834 |
+
if not self:
|
835 |
+
return '%s()' % (self.__class__.__name__,)
|
836 |
+
return '%s(%r)' % (self.__class__.__name__, self.items())
|
837 |
+
finally:
|
838 |
+
del _repr_running[call_key]
|
839 |
+
|
840 |
+
def __reduce__(self):
|
841 |
+
'Return state information for pickling'
|
842 |
+
items = [[k, self[k]] for k in self]
|
843 |
+
inst_dict = vars(self).copy()
|
844 |
+
for k in vars(OrderedDict()):
|
845 |
+
inst_dict.pop(k, None)
|
846 |
+
if inst_dict:
|
847 |
+
return (self.__class__, (items,), inst_dict)
|
848 |
+
return self.__class__, (items,)
|
849 |
+
|
850 |
+
def copy(self):
|
851 |
+
'od.copy() -> a shallow copy of od'
|
852 |
+
return self.__class__(self)
|
853 |
+
|
854 |
+
@classmethod
|
855 |
+
def fromkeys(cls, iterable, value=None):
|
856 |
+
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
|
857 |
+
and values equal to v (which defaults to None).
|
858 |
+
|
859 |
+
'''
|
860 |
+
d = cls()
|
861 |
+
for key in iterable:
|
862 |
+
d[key] = value
|
863 |
+
return d
|
864 |
+
|
865 |
+
def __eq__(self, other):
|
866 |
+
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
867 |
+
while comparison to a regular mapping is order-insensitive.
|
868 |
+
|
869 |
+
'''
|
870 |
+
if isinstance(other, OrderedDict):
|
871 |
+
return len(self)==len(other) and self.items() == other.items()
|
872 |
+
return dict.__eq__(self, other)
|
873 |
+
|
874 |
+
def __ne__(self, other):
|
875 |
+
return not self == other
|
876 |
+
|
877 |
+
# -- the following methods are only used in Python 2.7 --
|
878 |
+
|
879 |
+
def viewkeys(self):
|
880 |
+
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
881 |
+
return KeysView(self)
|
882 |
+
|
883 |
+
def viewvalues(self):
|
884 |
+
"od.viewvalues() -> an object providing a view on od's values"
|
885 |
+
return ValuesView(self)
|
886 |
+
|
887 |
+
def viewitems(self):
|
888 |
+
"od.viewitems() -> a set-like object providing a view on od's items"
|
889 |
+
return ItemsView(self)
|
890 |
+
|
891 |
+
try:
|
892 |
+
from logging.config import BaseConfigurator, valid_ident
|
893 |
+
except ImportError: # pragma: no cover
|
894 |
+
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
|
895 |
+
|
896 |
+
|
897 |
+
def valid_ident(s):
|
898 |
+
m = IDENTIFIER.match(s)
|
899 |
+
if not m:
|
900 |
+
raise ValueError('Not a valid Python identifier: %r' % s)
|
901 |
+
return True
|
902 |
+
|
903 |
+
|
904 |
+
# The ConvertingXXX classes are wrappers around standard Python containers,
|
905 |
+
# and they serve to convert any suitable values in the container. The
|
906 |
+
# conversion converts base dicts, lists and tuples to their wrapped
|
907 |
+
# equivalents, whereas strings which match a conversion format are converted
|
908 |
+
# appropriately.
|
909 |
+
#
|
910 |
+
# Each wrapper should have a configurator attribute holding the actual
|
911 |
+
# configurator to use for conversion.
|
912 |
+
|
913 |
+
class ConvertingDict(dict):
|
914 |
+
"""A converting dictionary wrapper."""
|
915 |
+
|
916 |
+
def __getitem__(self, key):
|
917 |
+
value = dict.__getitem__(self, key)
|
918 |
+
result = self.configurator.convert(value)
|
919 |
+
#If the converted value is different, save for next time
|
920 |
+
if value is not result:
|
921 |
+
self[key] = result
|
922 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
923 |
+
ConvertingTuple):
|
924 |
+
result.parent = self
|
925 |
+
result.key = key
|
926 |
+
return result
|
927 |
+
|
928 |
+
def get(self, key, default=None):
|
929 |
+
value = dict.get(self, key, default)
|
930 |
+
result = self.configurator.convert(value)
|
931 |
+
#If the converted value is different, save for next time
|
932 |
+
if value is not result:
|
933 |
+
self[key] = result
|
934 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
935 |
+
ConvertingTuple):
|
936 |
+
result.parent = self
|
937 |
+
result.key = key
|
938 |
+
return result
|
939 |
+
|
940 |
+
def pop(self, key, default=None):
|
941 |
+
value = dict.pop(self, key, default)
|
942 |
+
result = self.configurator.convert(value)
|
943 |
+
if value is not result:
|
944 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
945 |
+
ConvertingTuple):
|
946 |
+
result.parent = self
|
947 |
+
result.key = key
|
948 |
+
return result
|
949 |
+
|
950 |
+
class ConvertingList(list):
|
951 |
+
"""A converting list wrapper."""
|
952 |
+
def __getitem__(self, key):
|
953 |
+
value = list.__getitem__(self, key)
|
954 |
+
result = self.configurator.convert(value)
|
955 |
+
#If the converted value is different, save for next time
|
956 |
+
if value is not result:
|
957 |
+
self[key] = result
|
958 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
959 |
+
ConvertingTuple):
|
960 |
+
result.parent = self
|
961 |
+
result.key = key
|
962 |
+
return result
|
963 |
+
|
964 |
+
def pop(self, idx=-1):
|
965 |
+
value = list.pop(self, idx)
|
966 |
+
result = self.configurator.convert(value)
|
967 |
+
if value is not result:
|
968 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
969 |
+
ConvertingTuple):
|
970 |
+
result.parent = self
|
971 |
+
return result
|
972 |
+
|
973 |
+
class ConvertingTuple(tuple):
|
974 |
+
"""A converting tuple wrapper."""
|
975 |
+
def __getitem__(self, key):
|
976 |
+
value = tuple.__getitem__(self, key)
|
977 |
+
result = self.configurator.convert(value)
|
978 |
+
if value is not result:
|
979 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
980 |
+
ConvertingTuple):
|
981 |
+
result.parent = self
|
982 |
+
result.key = key
|
983 |
+
return result
|
984 |
+
|
985 |
+
class BaseConfigurator(object):
|
986 |
+
"""
|
987 |
+
The configurator base class which defines some useful defaults.
|
988 |
+
"""
|
989 |
+
|
990 |
+
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
|
991 |
+
|
992 |
+
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
|
993 |
+
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
|
994 |
+
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
|
995 |
+
DIGIT_PATTERN = re.compile(r'^\d+$')
|
996 |
+
|
997 |
+
value_converters = {
|
998 |
+
'ext' : 'ext_convert',
|
999 |
+
'cfg' : 'cfg_convert',
|
1000 |
+
}
|
1001 |
+
|
1002 |
+
# We might want to use a different one, e.g. importlib
|
1003 |
+
importer = staticmethod(__import__)
|
1004 |
+
|
1005 |
+
def __init__(self, config):
|
1006 |
+
self.config = ConvertingDict(config)
|
1007 |
+
self.config.configurator = self
|
1008 |
+
|
1009 |
+
def resolve(self, s):
|
1010 |
+
"""
|
1011 |
+
Resolve strings to objects using standard import and attribute
|
1012 |
+
syntax.
|
1013 |
+
"""
|
1014 |
+
name = s.split('.')
|
1015 |
+
used = name.pop(0)
|
1016 |
+
try:
|
1017 |
+
found = self.importer(used)
|
1018 |
+
for frag in name:
|
1019 |
+
used += '.' + frag
|
1020 |
+
try:
|
1021 |
+
found = getattr(found, frag)
|
1022 |
+
except AttributeError:
|
1023 |
+
self.importer(used)
|
1024 |
+
found = getattr(found, frag)
|
1025 |
+
return found
|
1026 |
+
except ImportError:
|
1027 |
+
e, tb = sys.exc_info()[1:]
|
1028 |
+
v = ValueError('Cannot resolve %r: %s' % (s, e))
|
1029 |
+
v.__cause__, v.__traceback__ = e, tb
|
1030 |
+
raise v
|
1031 |
+
|
1032 |
+
def ext_convert(self, value):
|
1033 |
+
"""Default converter for the ext:// protocol."""
|
1034 |
+
return self.resolve(value)
|
1035 |
+
|
1036 |
+
def cfg_convert(self, value):
|
1037 |
+
"""Default converter for the cfg:// protocol."""
|
1038 |
+
rest = value
|
1039 |
+
m = self.WORD_PATTERN.match(rest)
|
1040 |
+
if m is None:
|
1041 |
+
raise ValueError("Unable to convert %r" % value)
|
1042 |
+
else:
|
1043 |
+
rest = rest[m.end():]
|
1044 |
+
d = self.config[m.groups()[0]]
|
1045 |
+
#print d, rest
|
1046 |
+
while rest:
|
1047 |
+
m = self.DOT_PATTERN.match(rest)
|
1048 |
+
if m:
|
1049 |
+
d = d[m.groups()[0]]
|
1050 |
+
else:
|
1051 |
+
m = self.INDEX_PATTERN.match(rest)
|
1052 |
+
if m:
|
1053 |
+
idx = m.groups()[0]
|
1054 |
+
if not self.DIGIT_PATTERN.match(idx):
|
1055 |
+
d = d[idx]
|
1056 |
+
else:
|
1057 |
+
try:
|
1058 |
+
n = int(idx) # try as number first (most likely)
|
1059 |
+
d = d[n]
|
1060 |
+
except TypeError:
|
1061 |
+
d = d[idx]
|
1062 |
+
if m:
|
1063 |
+
rest = rest[m.end():]
|
1064 |
+
else:
|
1065 |
+
raise ValueError('Unable to convert '
|
1066 |
+
'%r at %r' % (value, rest))
|
1067 |
+
#rest should be empty
|
1068 |
+
return d
|
1069 |
+
|
1070 |
+
def convert(self, value):
|
1071 |
+
"""
|
1072 |
+
Convert values to an appropriate type. dicts, lists and tuples are
|
1073 |
+
replaced by their converting alternatives. Strings are checked to
|
1074 |
+
see if they have a conversion format and are converted if they do.
|
1075 |
+
"""
|
1076 |
+
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
|
1077 |
+
value = ConvertingDict(value)
|
1078 |
+
value.configurator = self
|
1079 |
+
elif not isinstance(value, ConvertingList) and isinstance(value, list):
|
1080 |
+
value = ConvertingList(value)
|
1081 |
+
value.configurator = self
|
1082 |
+
elif not isinstance(value, ConvertingTuple) and\
|
1083 |
+
isinstance(value, tuple):
|
1084 |
+
value = ConvertingTuple(value)
|
1085 |
+
value.configurator = self
|
1086 |
+
elif isinstance(value, string_types):
|
1087 |
+
m = self.CONVERT_PATTERN.match(value)
|
1088 |
+
if m:
|
1089 |
+
d = m.groupdict()
|
1090 |
+
prefix = d['prefix']
|
1091 |
+
converter = self.value_converters.get(prefix, None)
|
1092 |
+
if converter:
|
1093 |
+
suffix = d['suffix']
|
1094 |
+
converter = getattr(self, converter)
|
1095 |
+
value = converter(suffix)
|
1096 |
+
return value
|
1097 |
+
|
1098 |
+
def configure_custom(self, config):
|
1099 |
+
"""Configure an object with a user-supplied factory."""
|
1100 |
+
c = config.pop('()')
|
1101 |
+
if not callable(c):
|
1102 |
+
c = self.resolve(c)
|
1103 |
+
props = config.pop('.', None)
|
1104 |
+
# Check for valid identifiers
|
1105 |
+
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
1106 |
+
result = c(**kwargs)
|
1107 |
+
if props:
|
1108 |
+
for name, value in props.items():
|
1109 |
+
setattr(result, name, value)
|
1110 |
+
return result
|
1111 |
+
|
1112 |
+
def as_tuple(self, value):
|
1113 |
+
"""Utility function which converts lists to tuples."""
|
1114 |
+
if isinstance(value, list):
|
1115 |
+
value = tuple(value)
|
1116 |
+
return value
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/database.py
ADDED
@@ -0,0 +1,1345 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2012-2017 The Python Software Foundation.
|
4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
5 |
+
#
|
6 |
+
"""PEP 376 implementation."""
|
7 |
+
|
8 |
+
from __future__ import unicode_literals
|
9 |
+
|
10 |
+
import base64
|
11 |
+
import codecs
|
12 |
+
import contextlib
|
13 |
+
import hashlib
|
14 |
+
import logging
|
15 |
+
import os
|
16 |
+
import posixpath
|
17 |
+
import sys
|
18 |
+
import zipimport
|
19 |
+
|
20 |
+
from . import DistlibException, resources
|
21 |
+
from .compat import StringIO
|
22 |
+
from .version import get_scheme, UnsupportedVersionError
|
23 |
+
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
|
24 |
+
LEGACY_METADATA_FILENAME)
|
25 |
+
from .util import (parse_requirement, cached_property, parse_name_and_version,
|
26 |
+
read_exports, write_exports, CSVReader, CSVWriter)
|
27 |
+
|
28 |
+
|
29 |
+
__all__ = ['Distribution', 'BaseInstalledDistribution',
|
30 |
+
'InstalledDistribution', 'EggInfoDistribution',
|
31 |
+
'DistributionPath']
|
32 |
+
|
33 |
+
|
34 |
+
logger = logging.getLogger(__name__)
|
35 |
+
|
36 |
+
EXPORTS_FILENAME = 'pydist-exports.json'
|
37 |
+
COMMANDS_FILENAME = 'pydist-commands.json'
|
38 |
+
|
39 |
+
DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
|
40 |
+
'RESOURCES', EXPORTS_FILENAME, 'SHARED')
|
41 |
+
|
42 |
+
DISTINFO_EXT = '.dist-info'
|
43 |
+
|
44 |
+
|
45 |
+
class _Cache(object):
|
46 |
+
"""
|
47 |
+
A simple cache mapping names and .dist-info paths to distributions
|
48 |
+
"""
|
49 |
+
def __init__(self):
|
50 |
+
"""
|
51 |
+
Initialise an instance. There is normally one for each DistributionPath.
|
52 |
+
"""
|
53 |
+
self.name = {}
|
54 |
+
self.path = {}
|
55 |
+
self.generated = False
|
56 |
+
|
57 |
+
def clear(self):
|
58 |
+
"""
|
59 |
+
Clear the cache, setting it to its initial state.
|
60 |
+
"""
|
61 |
+
self.name.clear()
|
62 |
+
self.path.clear()
|
63 |
+
self.generated = False
|
64 |
+
|
65 |
+
def add(self, dist):
|
66 |
+
"""
|
67 |
+
Add a distribution to the cache.
|
68 |
+
:param dist: The distribution to add.
|
69 |
+
"""
|
70 |
+
if dist.path not in self.path:
|
71 |
+
self.path[dist.path] = dist
|
72 |
+
self.name.setdefault(dist.key, []).append(dist)
|
73 |
+
|
74 |
+
|
75 |
+
class DistributionPath(object):
|
76 |
+
"""
|
77 |
+
Represents a set of distributions installed on a path (typically sys.path).
|
78 |
+
"""
|
79 |
+
def __init__(self, path=None, include_egg=False):
|
80 |
+
"""
|
81 |
+
Create an instance from a path, optionally including legacy (distutils/
|
82 |
+
setuptools/distribute) distributions.
|
83 |
+
:param path: The path to use, as a list of directories. If not specified,
|
84 |
+
sys.path is used.
|
85 |
+
:param include_egg: If True, this instance will look for and return legacy
|
86 |
+
distributions as well as those based on PEP 376.
|
87 |
+
"""
|
88 |
+
if path is None:
|
89 |
+
path = sys.path
|
90 |
+
self.path = path
|
91 |
+
self._include_dist = True
|
92 |
+
self._include_egg = include_egg
|
93 |
+
|
94 |
+
self._cache = _Cache()
|
95 |
+
self._cache_egg = _Cache()
|
96 |
+
self._cache_enabled = True
|
97 |
+
self._scheme = get_scheme('default')
|
98 |
+
|
99 |
+
def _get_cache_enabled(self):
|
100 |
+
return self._cache_enabled
|
101 |
+
|
102 |
+
def _set_cache_enabled(self, value):
|
103 |
+
self._cache_enabled = value
|
104 |
+
|
105 |
+
cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
|
106 |
+
|
107 |
+
def clear_cache(self):
|
108 |
+
"""
|
109 |
+
Clears the internal cache.
|
110 |
+
"""
|
111 |
+
self._cache.clear()
|
112 |
+
self._cache_egg.clear()
|
113 |
+
|
114 |
+
|
115 |
+
def _yield_distributions(self):
|
116 |
+
"""
|
117 |
+
Yield .dist-info and/or .egg(-info) distributions.
|
118 |
+
"""
|
119 |
+
# We need to check if we've seen some resources already, because on
|
120 |
+
# some Linux systems (e.g. some Debian/Ubuntu variants) there are
|
121 |
+
# symlinks which alias other files in the environment.
|
122 |
+
seen = set()
|
123 |
+
for path in self.path:
|
124 |
+
finder = resources.finder_for_path(path)
|
125 |
+
if finder is None:
|
126 |
+
continue
|
127 |
+
r = finder.find('')
|
128 |
+
if not r or not r.is_container:
|
129 |
+
continue
|
130 |
+
rset = sorted(r.resources)
|
131 |
+
for entry in rset:
|
132 |
+
r = finder.find(entry)
|
133 |
+
if not r or r.path in seen:
|
134 |
+
continue
|
135 |
+
try:
|
136 |
+
if self._include_dist and entry.endswith(DISTINFO_EXT):
|
137 |
+
possible_filenames = [METADATA_FILENAME,
|
138 |
+
WHEEL_METADATA_FILENAME,
|
139 |
+
LEGACY_METADATA_FILENAME]
|
140 |
+
for metadata_filename in possible_filenames:
|
141 |
+
metadata_path = posixpath.join(entry, metadata_filename)
|
142 |
+
pydist = finder.find(metadata_path)
|
143 |
+
if pydist:
|
144 |
+
break
|
145 |
+
else:
|
146 |
+
continue
|
147 |
+
|
148 |
+
with contextlib.closing(pydist.as_stream()) as stream:
|
149 |
+
metadata = Metadata(fileobj=stream, scheme='legacy')
|
150 |
+
logger.debug('Found %s', r.path)
|
151 |
+
seen.add(r.path)
|
152 |
+
yield new_dist_class(r.path, metadata=metadata,
|
153 |
+
env=self)
|
154 |
+
elif self._include_egg and entry.endswith(('.egg-info',
|
155 |
+
'.egg')):
|
156 |
+
logger.debug('Found %s', r.path)
|
157 |
+
seen.add(r.path)
|
158 |
+
yield old_dist_class(r.path, self)
|
159 |
+
except Exception as e:
|
160 |
+
msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s'
|
161 |
+
logger.warning(msg, r.path, e)
|
162 |
+
import warnings
|
163 |
+
warnings.warn(msg % (r.path, e), stacklevel=2)
|
164 |
+
|
165 |
+
def _generate_cache(self):
|
166 |
+
"""
|
167 |
+
Scan the path for distributions and populate the cache with
|
168 |
+
those that are found.
|
169 |
+
"""
|
170 |
+
gen_dist = not self._cache.generated
|
171 |
+
gen_egg = self._include_egg and not self._cache_egg.generated
|
172 |
+
if gen_dist or gen_egg:
|
173 |
+
for dist in self._yield_distributions():
|
174 |
+
if isinstance(dist, InstalledDistribution):
|
175 |
+
self._cache.add(dist)
|
176 |
+
else:
|
177 |
+
self._cache_egg.add(dist)
|
178 |
+
|
179 |
+
if gen_dist:
|
180 |
+
self._cache.generated = True
|
181 |
+
if gen_egg:
|
182 |
+
self._cache_egg.generated = True
|
183 |
+
|
184 |
+
@classmethod
|
185 |
+
def distinfo_dirname(cls, name, version):
|
186 |
+
"""
|
187 |
+
The *name* and *version* parameters are converted into their
|
188 |
+
filename-escaped form, i.e. any ``'-'`` characters are replaced
|
189 |
+
with ``'_'`` other than the one in ``'dist-info'`` and the one
|
190 |
+
separating the name from the version number.
|
191 |
+
|
192 |
+
:parameter name: is converted to a standard distribution name by replacing
|
193 |
+
any runs of non- alphanumeric characters with a single
|
194 |
+
``'-'``.
|
195 |
+
:type name: string
|
196 |
+
:parameter version: is converted to a standard version string. Spaces
|
197 |
+
become dots, and all other non-alphanumeric characters
|
198 |
+
(except dots) become dashes, with runs of multiple
|
199 |
+
dashes condensed to a single dash.
|
200 |
+
:type version: string
|
201 |
+
:returns: directory name
|
202 |
+
:rtype: string"""
|
203 |
+
name = name.replace('-', '_')
|
204 |
+
return '-'.join([name, version]) + DISTINFO_EXT
|
205 |
+
|
206 |
+
def get_distributions(self):
|
207 |
+
"""
|
208 |
+
Provides an iterator that looks for distributions and returns
|
209 |
+
:class:`InstalledDistribution` or
|
210 |
+
:class:`EggInfoDistribution` instances for each one of them.
|
211 |
+
|
212 |
+
:rtype: iterator of :class:`InstalledDistribution` and
|
213 |
+
:class:`EggInfoDistribution` instances
|
214 |
+
"""
|
215 |
+
if not self._cache_enabled:
|
216 |
+
for dist in self._yield_distributions():
|
217 |
+
yield dist
|
218 |
+
else:
|
219 |
+
self._generate_cache()
|
220 |
+
|
221 |
+
for dist in self._cache.path.values():
|
222 |
+
yield dist
|
223 |
+
|
224 |
+
if self._include_egg:
|
225 |
+
for dist in self._cache_egg.path.values():
|
226 |
+
yield dist
|
227 |
+
|
228 |
+
def get_distribution(self, name):
|
229 |
+
"""
|
230 |
+
Looks for a named distribution on the path.
|
231 |
+
|
232 |
+
This function only returns the first result found, as no more than one
|
233 |
+
value is expected. If nothing is found, ``None`` is returned.
|
234 |
+
|
235 |
+
:rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
|
236 |
+
or ``None``
|
237 |
+
"""
|
238 |
+
result = None
|
239 |
+
name = name.lower()
|
240 |
+
if not self._cache_enabled:
|
241 |
+
for dist in self._yield_distributions():
|
242 |
+
if dist.key == name:
|
243 |
+
result = dist
|
244 |
+
break
|
245 |
+
else:
|
246 |
+
self._generate_cache()
|
247 |
+
|
248 |
+
if name in self._cache.name:
|
249 |
+
result = self._cache.name[name][0]
|
250 |
+
elif self._include_egg and name in self._cache_egg.name:
|
251 |
+
result = self._cache_egg.name[name][0]
|
252 |
+
return result
|
253 |
+
|
254 |
+
def provides_distribution(self, name, version=None):
|
255 |
+
"""
|
256 |
+
Iterates over all distributions to find which distributions provide *name*.
|
257 |
+
If a *version* is provided, it will be used to filter the results.
|
258 |
+
|
259 |
+
This function only returns the first result found, since no more than
|
260 |
+
one values are expected. If the directory is not found, returns ``None``.
|
261 |
+
|
262 |
+
:parameter version: a version specifier that indicates the version
|
263 |
+
required, conforming to the format in ``PEP-345``
|
264 |
+
|
265 |
+
:type name: string
|
266 |
+
:type version: string
|
267 |
+
"""
|
268 |
+
matcher = None
|
269 |
+
if version is not None:
|
270 |
+
try:
|
271 |
+
matcher = self._scheme.matcher('%s (%s)' % (name, version))
|
272 |
+
except ValueError:
|
273 |
+
raise DistlibException('invalid name or version: %r, %r' %
|
274 |
+
(name, version))
|
275 |
+
|
276 |
+
for dist in self.get_distributions():
|
277 |
+
# We hit a problem on Travis where enum34 was installed and doesn't
|
278 |
+
# have a provides attribute ...
|
279 |
+
if not hasattr(dist, 'provides'):
|
280 |
+
logger.debug('No "provides": %s', dist)
|
281 |
+
else:
|
282 |
+
provided = dist.provides
|
283 |
+
|
284 |
+
for p in provided:
|
285 |
+
p_name, p_ver = parse_name_and_version(p)
|
286 |
+
if matcher is None:
|
287 |
+
if p_name == name:
|
288 |
+
yield dist
|
289 |
+
break
|
290 |
+
else:
|
291 |
+
if p_name == name and matcher.match(p_ver):
|
292 |
+
yield dist
|
293 |
+
break
|
294 |
+
|
295 |
+
def get_file_path(self, name, relative_path):
|
296 |
+
"""
|
297 |
+
Return the path to a resource file.
|
298 |
+
"""
|
299 |
+
dist = self.get_distribution(name)
|
300 |
+
if dist is None:
|
301 |
+
raise LookupError('no distribution named %r found' % name)
|
302 |
+
return dist.get_resource_path(relative_path)
|
303 |
+
|
304 |
+
def get_exported_entries(self, category, name=None):
|
305 |
+
"""
|
306 |
+
Return all of the exported entries in a particular category.
|
307 |
+
|
308 |
+
:param category: The category to search for entries.
|
309 |
+
:param name: If specified, only entries with that name are returned.
|
310 |
+
"""
|
311 |
+
for dist in self.get_distributions():
|
312 |
+
r = dist.exports
|
313 |
+
if category in r:
|
314 |
+
d = r[category]
|
315 |
+
if name is not None:
|
316 |
+
if name in d:
|
317 |
+
yield d[name]
|
318 |
+
else:
|
319 |
+
for v in d.values():
|
320 |
+
yield v
|
321 |
+
|
322 |
+
|
323 |
+
class Distribution(object):
|
324 |
+
"""
|
325 |
+
A base class for distributions, whether installed or from indexes.
|
326 |
+
Either way, it must have some metadata, so that's all that's needed
|
327 |
+
for construction.
|
328 |
+
"""
|
329 |
+
|
330 |
+
build_time_dependency = False
|
331 |
+
"""
|
332 |
+
Set to True if it's known to be only a build-time dependency (i.e.
|
333 |
+
not needed after installation).
|
334 |
+
"""
|
335 |
+
|
336 |
+
requested = False
|
337 |
+
"""A boolean that indicates whether the ``REQUESTED`` metadata file is
|
338 |
+
present (in other words, whether the package was installed by user
|
339 |
+
request or it was installed as a dependency)."""
|
340 |
+
|
341 |
+
def __init__(self, metadata):
|
342 |
+
"""
|
343 |
+
Initialise an instance.
|
344 |
+
:param metadata: The instance of :class:`Metadata` describing this
|
345 |
+
distribution.
|
346 |
+
"""
|
347 |
+
self.metadata = metadata
|
348 |
+
self.name = metadata.name
|
349 |
+
self.key = self.name.lower() # for case-insensitive comparisons
|
350 |
+
self.version = metadata.version
|
351 |
+
self.locator = None
|
352 |
+
self.digest = None
|
353 |
+
self.extras = None # additional features requested
|
354 |
+
self.context = None # environment marker overrides
|
355 |
+
self.download_urls = set()
|
356 |
+
self.digests = {}
|
357 |
+
|
358 |
+
@property
|
359 |
+
def source_url(self):
|
360 |
+
"""
|
361 |
+
The source archive download URL for this distribution.
|
362 |
+
"""
|
363 |
+
return self.metadata.source_url
|
364 |
+
|
365 |
+
download_url = source_url # Backward compatibility
|
366 |
+
|
367 |
+
@property
|
368 |
+
def name_and_version(self):
|
369 |
+
"""
|
370 |
+
A utility property which displays the name and version in parentheses.
|
371 |
+
"""
|
372 |
+
return '%s (%s)' % (self.name, self.version)
|
373 |
+
|
374 |
+
@property
|
375 |
+
def provides(self):
|
376 |
+
"""
|
377 |
+
A set of distribution names and versions provided by this distribution.
|
378 |
+
:return: A set of "name (version)" strings.
|
379 |
+
"""
|
380 |
+
plist = self.metadata.provides
|
381 |
+
s = '%s (%s)' % (self.name, self.version)
|
382 |
+
if s not in plist:
|
383 |
+
plist.append(s)
|
384 |
+
return plist
|
385 |
+
|
386 |
+
def _get_requirements(self, req_attr):
|
387 |
+
md = self.metadata
|
388 |
+
logger.debug('Getting requirements from metadata %r', md.todict())
|
389 |
+
reqts = getattr(md, req_attr)
|
390 |
+
return set(md.get_requirements(reqts, extras=self.extras,
|
391 |
+
env=self.context))
|
392 |
+
|
393 |
+
@property
|
394 |
+
def run_requires(self):
|
395 |
+
return self._get_requirements('run_requires')
|
396 |
+
|
397 |
+
@property
|
398 |
+
def meta_requires(self):
|
399 |
+
return self._get_requirements('meta_requires')
|
400 |
+
|
401 |
+
@property
|
402 |
+
def build_requires(self):
|
403 |
+
return self._get_requirements('build_requires')
|
404 |
+
|
405 |
+
@property
|
406 |
+
def test_requires(self):
|
407 |
+
return self._get_requirements('test_requires')
|
408 |
+
|
409 |
+
@property
|
410 |
+
def dev_requires(self):
|
411 |
+
return self._get_requirements('dev_requires')
|
412 |
+
|
413 |
+
def matches_requirement(self, req):
|
414 |
+
"""
|
415 |
+
Say if this instance matches (fulfills) a requirement.
|
416 |
+
:param req: The requirement to match.
|
417 |
+
:rtype req: str
|
418 |
+
:return: True if it matches, else False.
|
419 |
+
"""
|
420 |
+
# Requirement may contain extras - parse to lose those
|
421 |
+
# from what's passed to the matcher
|
422 |
+
r = parse_requirement(req)
|
423 |
+
scheme = get_scheme(self.metadata.scheme)
|
424 |
+
try:
|
425 |
+
matcher = scheme.matcher(r.requirement)
|
426 |
+
except UnsupportedVersionError:
|
427 |
+
# XXX compat-mode if cannot read the version
|
428 |
+
logger.warning('could not read version %r - using name only',
|
429 |
+
req)
|
430 |
+
name = req.split()[0]
|
431 |
+
matcher = scheme.matcher(name)
|
432 |
+
|
433 |
+
name = matcher.key # case-insensitive
|
434 |
+
|
435 |
+
result = False
|
436 |
+
for p in self.provides:
|
437 |
+
p_name, p_ver = parse_name_and_version(p)
|
438 |
+
if p_name != name:
|
439 |
+
continue
|
440 |
+
try:
|
441 |
+
result = matcher.match(p_ver)
|
442 |
+
break
|
443 |
+
except UnsupportedVersionError:
|
444 |
+
pass
|
445 |
+
return result
|
446 |
+
|
447 |
+
def __repr__(self):
|
448 |
+
"""
|
449 |
+
Return a textual representation of this instance,
|
450 |
+
"""
|
451 |
+
if self.source_url:
|
452 |
+
suffix = ' [%s]' % self.source_url
|
453 |
+
else:
|
454 |
+
suffix = ''
|
455 |
+
return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)
|
456 |
+
|
457 |
+
def __eq__(self, other):
|
458 |
+
"""
|
459 |
+
See if this distribution is the same as another.
|
460 |
+
:param other: The distribution to compare with. To be equal to one
|
461 |
+
another. distributions must have the same type, name,
|
462 |
+
version and source_url.
|
463 |
+
:return: True if it is the same, else False.
|
464 |
+
"""
|
465 |
+
if type(other) is not type(self):
|
466 |
+
result = False
|
467 |
+
else:
|
468 |
+
result = (self.name == other.name and
|
469 |
+
self.version == other.version and
|
470 |
+
self.source_url == other.source_url)
|
471 |
+
return result
|
472 |
+
|
473 |
+
def __hash__(self):
|
474 |
+
"""
|
475 |
+
Compute hash in a way which matches the equality test.
|
476 |
+
"""
|
477 |
+
return hash(self.name) + hash(self.version) + hash(self.source_url)
|
478 |
+
|
479 |
+
|
480 |
+
class BaseInstalledDistribution(Distribution):
|
481 |
+
"""
|
482 |
+
This is the base class for installed distributions (whether PEP 376 or
|
483 |
+
legacy).
|
484 |
+
"""
|
485 |
+
|
486 |
+
hasher = None
|
487 |
+
|
488 |
+
def __init__(self, metadata, path, env=None):
|
489 |
+
"""
|
490 |
+
Initialise an instance.
|
491 |
+
:param metadata: An instance of :class:`Metadata` which describes the
|
492 |
+
distribution. This will normally have been initialised
|
493 |
+
from a metadata file in the ``path``.
|
494 |
+
:param path: The path of the ``.dist-info`` or ``.egg-info``
|
495 |
+
directory for the distribution.
|
496 |
+
:param env: This is normally the :class:`DistributionPath`
|
497 |
+
instance where this distribution was found.
|
498 |
+
"""
|
499 |
+
super(BaseInstalledDistribution, self).__init__(metadata)
|
500 |
+
self.path = path
|
501 |
+
self.dist_path = env
|
502 |
+
|
503 |
+
def get_hash(self, data, hasher=None):
|
504 |
+
"""
|
505 |
+
Get the hash of some data, using a particular hash algorithm, if
|
506 |
+
specified.
|
507 |
+
|
508 |
+
:param data: The data to be hashed.
|
509 |
+
:type data: bytes
|
510 |
+
:param hasher: The name of a hash implementation, supported by hashlib,
|
511 |
+
or ``None``. Examples of valid values are ``'sha1'``,
|
512 |
+
``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
|
513 |
+
``'sha512'``. If no hasher is specified, the ``hasher``
|
514 |
+
attribute of the :class:`InstalledDistribution` instance
|
515 |
+
is used. If the hasher is determined to be ``None``, MD5
|
516 |
+
is used as the hashing algorithm.
|
517 |
+
:returns: The hash of the data. If a hasher was explicitly specified,
|
518 |
+
the returned hash will be prefixed with the specified hasher
|
519 |
+
followed by '='.
|
520 |
+
:rtype: str
|
521 |
+
"""
|
522 |
+
if hasher is None:
|
523 |
+
hasher = self.hasher
|
524 |
+
if hasher is None:
|
525 |
+
hasher = hashlib.md5
|
526 |
+
prefix = ''
|
527 |
+
else:
|
528 |
+
hasher = getattr(hashlib, hasher)
|
529 |
+
prefix = '%s=' % self.hasher
|
530 |
+
digest = hasher(data).digest()
|
531 |
+
digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
|
532 |
+
return '%s%s' % (prefix, digest)
|
533 |
+
|
534 |
+
|
535 |
+
class InstalledDistribution(BaseInstalledDistribution):
|
536 |
+
"""
|
537 |
+
Created with the *path* of the ``.dist-info`` directory provided to the
|
538 |
+
constructor. It reads the metadata contained in ``pydist.json`` when it is
|
539 |
+
instantiated., or uses a passed in Metadata instance (useful for when
|
540 |
+
dry-run mode is being used).
|
541 |
+
"""
|
542 |
+
|
543 |
+
hasher = 'sha256'
|
544 |
+
|
545 |
+
def __init__(self, path, metadata=None, env=None):
|
546 |
+
self.modules = []
|
547 |
+
self.finder = finder = resources.finder_for_path(path)
|
548 |
+
if finder is None:
|
549 |
+
raise ValueError('finder unavailable for %s' % path)
|
550 |
+
if env and env._cache_enabled and path in env._cache.path:
|
551 |
+
metadata = env._cache.path[path].metadata
|
552 |
+
elif metadata is None:
|
553 |
+
r = finder.find(METADATA_FILENAME)
|
554 |
+
# Temporary - for Wheel 0.23 support
|
555 |
+
if r is None:
|
556 |
+
r = finder.find(WHEEL_METADATA_FILENAME)
|
557 |
+
# Temporary - for legacy support
|
558 |
+
if r is None:
|
559 |
+
r = finder.find(LEGACY_METADATA_FILENAME)
|
560 |
+
if r is None:
|
561 |
+
raise ValueError('no %s found in %s' % (METADATA_FILENAME,
|
562 |
+
path))
|
563 |
+
with contextlib.closing(r.as_stream()) as stream:
|
564 |
+
metadata = Metadata(fileobj=stream, scheme='legacy')
|
565 |
+
|
566 |
+
super(InstalledDistribution, self).__init__(metadata, path, env)
|
567 |
+
|
568 |
+
if env and env._cache_enabled:
|
569 |
+
env._cache.add(self)
|
570 |
+
|
571 |
+
r = finder.find('REQUESTED')
|
572 |
+
self.requested = r is not None
|
573 |
+
p = os.path.join(path, 'top_level.txt')
|
574 |
+
if os.path.exists(p):
|
575 |
+
with open(p, 'rb') as f:
|
576 |
+
data = f.read().decode('utf-8')
|
577 |
+
self.modules = data.splitlines()
|
578 |
+
|
579 |
+
def __repr__(self):
|
580 |
+
return '<InstalledDistribution %r %s at %r>' % (
|
581 |
+
self.name, self.version, self.path)
|
582 |
+
|
583 |
+
def __str__(self):
|
584 |
+
return "%s %s" % (self.name, self.version)
|
585 |
+
|
586 |
+
def _get_records(self):
|
587 |
+
"""
|
588 |
+
Get the list of installed files for the distribution
|
589 |
+
:return: A list of tuples of path, hash and size. Note that hash and
|
590 |
+
size might be ``None`` for some entries. The path is exactly
|
591 |
+
as stored in the file (which is as in PEP 376).
|
592 |
+
"""
|
593 |
+
results = []
|
594 |
+
r = self.get_distinfo_resource('RECORD')
|
595 |
+
with contextlib.closing(r.as_stream()) as stream:
|
596 |
+
with CSVReader(stream=stream) as record_reader:
|
597 |
+
# Base location is parent dir of .dist-info dir
|
598 |
+
#base_location = os.path.dirname(self.path)
|
599 |
+
#base_location = os.path.abspath(base_location)
|
600 |
+
for row in record_reader:
|
601 |
+
missing = [None for i in range(len(row), 3)]
|
602 |
+
path, checksum, size = row + missing
|
603 |
+
#if not os.path.isabs(path):
|
604 |
+
# path = path.replace('/', os.sep)
|
605 |
+
# path = os.path.join(base_location, path)
|
606 |
+
results.append((path, checksum, size))
|
607 |
+
return results
|
608 |
+
|
609 |
+
@cached_property
|
610 |
+
def exports(self):
|
611 |
+
"""
|
612 |
+
Return the information exported by this distribution.
|
613 |
+
:return: A dictionary of exports, mapping an export category to a dict
|
614 |
+
of :class:`ExportEntry` instances describing the individual
|
615 |
+
export entries, and keyed by name.
|
616 |
+
"""
|
617 |
+
result = {}
|
618 |
+
r = self.get_distinfo_resource(EXPORTS_FILENAME)
|
619 |
+
if r:
|
620 |
+
result = self.read_exports()
|
621 |
+
return result
|
622 |
+
|
623 |
+
def read_exports(self):
|
624 |
+
"""
|
625 |
+
Read exports data from a file in .ini format.
|
626 |
+
|
627 |
+
:return: A dictionary of exports, mapping an export category to a list
|
628 |
+
of :class:`ExportEntry` instances describing the individual
|
629 |
+
export entries.
|
630 |
+
"""
|
631 |
+
result = {}
|
632 |
+
r = self.get_distinfo_resource(EXPORTS_FILENAME)
|
633 |
+
if r:
|
634 |
+
with contextlib.closing(r.as_stream()) as stream:
|
635 |
+
result = read_exports(stream)
|
636 |
+
return result
|
637 |
+
|
638 |
+
def write_exports(self, exports):
|
639 |
+
"""
|
640 |
+
Write a dictionary of exports to a file in .ini format.
|
641 |
+
:param exports: A dictionary of exports, mapping an export category to
|
642 |
+
a list of :class:`ExportEntry` instances describing the
|
643 |
+
individual export entries.
|
644 |
+
"""
|
645 |
+
rf = self.get_distinfo_file(EXPORTS_FILENAME)
|
646 |
+
with open(rf, 'w') as f:
|
647 |
+
write_exports(exports, f)
|
648 |
+
|
649 |
+
def get_resource_path(self, relative_path):
|
650 |
+
"""
|
651 |
+
NOTE: This API may change in the future.
|
652 |
+
|
653 |
+
Return the absolute path to a resource file with the given relative
|
654 |
+
path.
|
655 |
+
|
656 |
+
:param relative_path: The path, relative to .dist-info, of the resource
|
657 |
+
of interest.
|
658 |
+
:return: The absolute path where the resource is to be found.
|
659 |
+
"""
|
660 |
+
r = self.get_distinfo_resource('RESOURCES')
|
661 |
+
with contextlib.closing(r.as_stream()) as stream:
|
662 |
+
with CSVReader(stream=stream) as resources_reader:
|
663 |
+
for relative, destination in resources_reader:
|
664 |
+
if relative == relative_path:
|
665 |
+
return destination
|
666 |
+
raise KeyError('no resource file with relative path %r '
|
667 |
+
'is installed' % relative_path)
|
668 |
+
|
669 |
+
def list_installed_files(self):
|
670 |
+
"""
|
671 |
+
Iterates over the ``RECORD`` entries and returns a tuple
|
672 |
+
``(path, hash, size)`` for each line.
|
673 |
+
|
674 |
+
:returns: iterator of (path, hash, size)
|
675 |
+
"""
|
676 |
+
for result in self._get_records():
|
677 |
+
yield result
|
678 |
+
|
679 |
+
def write_installed_files(self, paths, prefix, dry_run=False):
|
680 |
+
"""
|
681 |
+
Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
|
682 |
+
existing ``RECORD`` file is silently overwritten.
|
683 |
+
|
684 |
+
prefix is used to determine when to write absolute paths.
|
685 |
+
"""
|
686 |
+
prefix = os.path.join(prefix, '')
|
687 |
+
base = os.path.dirname(self.path)
|
688 |
+
base_under_prefix = base.startswith(prefix)
|
689 |
+
base = os.path.join(base, '')
|
690 |
+
record_path = self.get_distinfo_file('RECORD')
|
691 |
+
logger.info('creating %s', record_path)
|
692 |
+
if dry_run:
|
693 |
+
return None
|
694 |
+
with CSVWriter(record_path) as writer:
|
695 |
+
for path in paths:
|
696 |
+
if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
|
697 |
+
# do not put size and hash, as in PEP-376
|
698 |
+
hash_value = size = ''
|
699 |
+
else:
|
700 |
+
size = '%d' % os.path.getsize(path)
|
701 |
+
with open(path, 'rb') as fp:
|
702 |
+
hash_value = self.get_hash(fp.read())
|
703 |
+
if path.startswith(base) or (base_under_prefix and
|
704 |
+
path.startswith(prefix)):
|
705 |
+
path = os.path.relpath(path, base)
|
706 |
+
writer.writerow((path, hash_value, size))
|
707 |
+
|
708 |
+
# add the RECORD file itself
|
709 |
+
if record_path.startswith(base):
|
710 |
+
record_path = os.path.relpath(record_path, base)
|
711 |
+
writer.writerow((record_path, '', ''))
|
712 |
+
return record_path
|
713 |
+
|
714 |
+
def check_installed_files(self):
|
715 |
+
"""
|
716 |
+
Checks that the hashes and sizes of the files in ``RECORD`` are
|
717 |
+
matched by the files themselves. Returns a (possibly empty) list of
|
718 |
+
mismatches. Each entry in the mismatch list will be a tuple consisting
|
719 |
+
of the path, 'exists', 'size' or 'hash' according to what didn't match
|
720 |
+
(existence is checked first, then size, then hash), the expected
|
721 |
+
value and the actual value.
|
722 |
+
"""
|
723 |
+
mismatches = []
|
724 |
+
base = os.path.dirname(self.path)
|
725 |
+
record_path = self.get_distinfo_file('RECORD')
|
726 |
+
for path, hash_value, size in self.list_installed_files():
|
727 |
+
if not os.path.isabs(path):
|
728 |
+
path = os.path.join(base, path)
|
729 |
+
if path == record_path:
|
730 |
+
continue
|
731 |
+
if not os.path.exists(path):
|
732 |
+
mismatches.append((path, 'exists', True, False))
|
733 |
+
elif os.path.isfile(path):
|
734 |
+
actual_size = str(os.path.getsize(path))
|
735 |
+
if size and actual_size != size:
|
736 |
+
mismatches.append((path, 'size', size, actual_size))
|
737 |
+
elif hash_value:
|
738 |
+
if '=' in hash_value:
|
739 |
+
hasher = hash_value.split('=', 1)[0]
|
740 |
+
else:
|
741 |
+
hasher = None
|
742 |
+
|
743 |
+
with open(path, 'rb') as f:
|
744 |
+
actual_hash = self.get_hash(f.read(), hasher)
|
745 |
+
if actual_hash != hash_value:
|
746 |
+
mismatches.append((path, 'hash', hash_value, actual_hash))
|
747 |
+
return mismatches
|
748 |
+
|
749 |
+
@cached_property
|
750 |
+
def shared_locations(self):
|
751 |
+
"""
|
752 |
+
A dictionary of shared locations whose keys are in the set 'prefix',
|
753 |
+
'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
|
754 |
+
The corresponding value is the absolute path of that category for
|
755 |
+
this distribution, and takes into account any paths selected by the
|
756 |
+
user at installation time (e.g. via command-line arguments). In the
|
757 |
+
case of the 'namespace' key, this would be a list of absolute paths
|
758 |
+
for the roots of namespace packages in this distribution.
|
759 |
+
|
760 |
+
The first time this property is accessed, the relevant information is
|
761 |
+
read from the SHARED file in the .dist-info directory.
|
762 |
+
"""
|
763 |
+
result = {}
|
764 |
+
shared_path = os.path.join(self.path, 'SHARED')
|
765 |
+
if os.path.isfile(shared_path):
|
766 |
+
with codecs.open(shared_path, 'r', encoding='utf-8') as f:
|
767 |
+
lines = f.read().splitlines()
|
768 |
+
for line in lines:
|
769 |
+
key, value = line.split('=', 1)
|
770 |
+
if key == 'namespace':
|
771 |
+
result.setdefault(key, []).append(value)
|
772 |
+
else:
|
773 |
+
result[key] = value
|
774 |
+
return result
|
775 |
+
|
776 |
+
def write_shared_locations(self, paths, dry_run=False):
|
777 |
+
"""
|
778 |
+
Write shared location information to the SHARED file in .dist-info.
|
779 |
+
:param paths: A dictionary as described in the documentation for
|
780 |
+
:meth:`shared_locations`.
|
781 |
+
:param dry_run: If True, the action is logged but no file is actually
|
782 |
+
written.
|
783 |
+
:return: The path of the file written to.
|
784 |
+
"""
|
785 |
+
shared_path = os.path.join(self.path, 'SHARED')
|
786 |
+
logger.info('creating %s', shared_path)
|
787 |
+
if dry_run:
|
788 |
+
return None
|
789 |
+
lines = []
|
790 |
+
for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
|
791 |
+
path = paths[key]
|
792 |
+
if os.path.isdir(paths[key]):
|
793 |
+
lines.append('%s=%s' % (key, path))
|
794 |
+
for ns in paths.get('namespace', ()):
|
795 |
+
lines.append('namespace=%s' % ns)
|
796 |
+
|
797 |
+
with codecs.open(shared_path, 'w', encoding='utf-8') as f:
|
798 |
+
f.write('\n'.join(lines))
|
799 |
+
return shared_path
|
800 |
+
|
801 |
+
def get_distinfo_resource(self, path):
|
802 |
+
if path not in DIST_FILES:
|
803 |
+
raise DistlibException('invalid path for a dist-info file: '
|
804 |
+
'%r at %r' % (path, self.path))
|
805 |
+
finder = resources.finder_for_path(self.path)
|
806 |
+
if finder is None:
|
807 |
+
raise DistlibException('Unable to get a finder for %s' % self.path)
|
808 |
+
return finder.find(path)
|
809 |
+
|
810 |
+
def get_distinfo_file(self, path):
|
811 |
+
"""
|
812 |
+
Returns a path located under the ``.dist-info`` directory. Returns a
|
813 |
+
string representing the path.
|
814 |
+
|
815 |
+
:parameter path: a ``'/'``-separated path relative to the
|
816 |
+
``.dist-info`` directory or an absolute path;
|
817 |
+
If *path* is an absolute path and doesn't start
|
818 |
+
with the ``.dist-info`` directory path,
|
819 |
+
a :class:`DistlibException` is raised
|
820 |
+
:type path: str
|
821 |
+
:rtype: str
|
822 |
+
"""
|
823 |
+
# Check if it is an absolute path # XXX use relpath, add tests
|
824 |
+
if path.find(os.sep) >= 0:
|
825 |
+
# it's an absolute path?
|
826 |
+
distinfo_dirname, path = path.split(os.sep)[-2:]
|
827 |
+
if distinfo_dirname != self.path.split(os.sep)[-1]:
|
828 |
+
raise DistlibException(
|
829 |
+
'dist-info file %r does not belong to the %r %s '
|
830 |
+
'distribution' % (path, self.name, self.version))
|
831 |
+
|
832 |
+
# The file must be relative
|
833 |
+
if path not in DIST_FILES:
|
834 |
+
raise DistlibException('invalid path for a dist-info file: '
|
835 |
+
'%r at %r' % (path, self.path))
|
836 |
+
|
837 |
+
return os.path.join(self.path, path)
|
838 |
+
|
839 |
+
def list_distinfo_files(self):
|
840 |
+
"""
|
841 |
+
Iterates over the ``RECORD`` entries and returns paths for each line if
|
842 |
+
the path is pointing to a file located in the ``.dist-info`` directory
|
843 |
+
or one of its subdirectories.
|
844 |
+
|
845 |
+
:returns: iterator of paths
|
846 |
+
"""
|
847 |
+
base = os.path.dirname(self.path)
|
848 |
+
for path, checksum, size in self._get_records():
|
849 |
+
# XXX add separator or use real relpath algo
|
850 |
+
if not os.path.isabs(path):
|
851 |
+
path = os.path.join(base, path)
|
852 |
+
if path.startswith(self.path):
|
853 |
+
yield path
|
854 |
+
|
855 |
+
def __eq__(self, other):
|
856 |
+
return (isinstance(other, InstalledDistribution) and
|
857 |
+
self.path == other.path)
|
858 |
+
|
859 |
+
# See http://docs.python.org/reference/datamodel#object.__hash__
|
860 |
+
__hash__ = object.__hash__
|
861 |
+
|
862 |
+
|
863 |
+
class EggInfoDistribution(BaseInstalledDistribution):
|
864 |
+
"""Created with the *path* of the ``.egg-info`` directory or file provided
|
865 |
+
to the constructor. It reads the metadata contained in the file itself, or
|
866 |
+
if the given path happens to be a directory, the metadata is read from the
|
867 |
+
file ``PKG-INFO`` under that directory."""
|
868 |
+
|
869 |
+
requested = True # as we have no way of knowing, assume it was
|
870 |
+
shared_locations = {}
|
871 |
+
|
872 |
+
def __init__(self, path, env=None):
|
873 |
+
def set_name_and_version(s, n, v):
|
874 |
+
s.name = n
|
875 |
+
s.key = n.lower() # for case-insensitive comparisons
|
876 |
+
s.version = v
|
877 |
+
|
878 |
+
self.path = path
|
879 |
+
self.dist_path = env
|
880 |
+
if env and env._cache_enabled and path in env._cache_egg.path:
|
881 |
+
metadata = env._cache_egg.path[path].metadata
|
882 |
+
set_name_and_version(self, metadata.name, metadata.version)
|
883 |
+
else:
|
884 |
+
metadata = self._get_metadata(path)
|
885 |
+
|
886 |
+
# Need to be set before caching
|
887 |
+
set_name_and_version(self, metadata.name, metadata.version)
|
888 |
+
|
889 |
+
if env and env._cache_enabled:
|
890 |
+
env._cache_egg.add(self)
|
891 |
+
super(EggInfoDistribution, self).__init__(metadata, path, env)
|
892 |
+
|
893 |
+
def _get_metadata(self, path):
|
894 |
+
requires = None
|
895 |
+
|
896 |
+
def parse_requires_data(data):
|
897 |
+
"""Create a list of dependencies from a requires.txt file.
|
898 |
+
|
899 |
+
*data*: the contents of a setuptools-produced requires.txt file.
|
900 |
+
"""
|
901 |
+
reqs = []
|
902 |
+
lines = data.splitlines()
|
903 |
+
for line in lines:
|
904 |
+
line = line.strip()
|
905 |
+
if line.startswith('['):
|
906 |
+
logger.warning('Unexpected line: quitting requirement scan: %r',
|
907 |
+
line)
|
908 |
+
break
|
909 |
+
r = parse_requirement(line)
|
910 |
+
if not r:
|
911 |
+
logger.warning('Not recognised as a requirement: %r', line)
|
912 |
+
continue
|
913 |
+
if r.extras:
|
914 |
+
logger.warning('extra requirements in requires.txt are '
|
915 |
+
'not supported')
|
916 |
+
if not r.constraints:
|
917 |
+
reqs.append(r.name)
|
918 |
+
else:
|
919 |
+
cons = ', '.join('%s%s' % c for c in r.constraints)
|
920 |
+
reqs.append('%s (%s)' % (r.name, cons))
|
921 |
+
return reqs
|
922 |
+
|
923 |
+
def parse_requires_path(req_path):
|
924 |
+
"""Create a list of dependencies from a requires.txt file.
|
925 |
+
|
926 |
+
*req_path*: the path to a setuptools-produced requires.txt file.
|
927 |
+
"""
|
928 |
+
|
929 |
+
reqs = []
|
930 |
+
try:
|
931 |
+
with codecs.open(req_path, 'r', 'utf-8') as fp:
|
932 |
+
reqs = parse_requires_data(fp.read())
|
933 |
+
except IOError:
|
934 |
+
pass
|
935 |
+
return reqs
|
936 |
+
|
937 |
+
tl_path = tl_data = None
|
938 |
+
if path.endswith('.egg'):
|
939 |
+
if os.path.isdir(path):
|
940 |
+
p = os.path.join(path, 'EGG-INFO')
|
941 |
+
meta_path = os.path.join(p, 'PKG-INFO')
|
942 |
+
metadata = Metadata(path=meta_path, scheme='legacy')
|
943 |
+
req_path = os.path.join(p, 'requires.txt')
|
944 |
+
tl_path = os.path.join(p, 'top_level.txt')
|
945 |
+
requires = parse_requires_path(req_path)
|
946 |
+
else:
|
947 |
+
# FIXME handle the case where zipfile is not available
|
948 |
+
zipf = zipimport.zipimporter(path)
|
949 |
+
fileobj = StringIO(
|
950 |
+
zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
|
951 |
+
metadata = Metadata(fileobj=fileobj, scheme='legacy')
|
952 |
+
try:
|
953 |
+
data = zipf.get_data('EGG-INFO/requires.txt')
|
954 |
+
tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8')
|
955 |
+
requires = parse_requires_data(data.decode('utf-8'))
|
956 |
+
except IOError:
|
957 |
+
requires = None
|
958 |
+
elif path.endswith('.egg-info'):
|
959 |
+
if os.path.isdir(path):
|
960 |
+
req_path = os.path.join(path, 'requires.txt')
|
961 |
+
requires = parse_requires_path(req_path)
|
962 |
+
path = os.path.join(path, 'PKG-INFO')
|
963 |
+
tl_path = os.path.join(path, 'top_level.txt')
|
964 |
+
metadata = Metadata(path=path, scheme='legacy')
|
965 |
+
else:
|
966 |
+
raise DistlibException('path must end with .egg-info or .egg, '
|
967 |
+
'got %r' % path)
|
968 |
+
|
969 |
+
if requires:
|
970 |
+
metadata.add_requirements(requires)
|
971 |
+
# look for top-level modules in top_level.txt, if present
|
972 |
+
if tl_data is None:
|
973 |
+
if tl_path is not None and os.path.exists(tl_path):
|
974 |
+
with open(tl_path, 'rb') as f:
|
975 |
+
tl_data = f.read().decode('utf-8')
|
976 |
+
if not tl_data:
|
977 |
+
tl_data = []
|
978 |
+
else:
|
979 |
+
tl_data = tl_data.splitlines()
|
980 |
+
self.modules = tl_data
|
981 |
+
return metadata
|
982 |
+
|
983 |
+
def __repr__(self):
|
984 |
+
return '<EggInfoDistribution %r %s at %r>' % (
|
985 |
+
self.name, self.version, self.path)
|
986 |
+
|
987 |
+
def __str__(self):
|
988 |
+
return "%s %s" % (self.name, self.version)
|
989 |
+
|
990 |
+
def check_installed_files(self):
|
991 |
+
"""
|
992 |
+
Checks that the hashes and sizes of the files in ``RECORD`` are
|
993 |
+
matched by the files themselves. Returns a (possibly empty) list of
|
994 |
+
mismatches. Each entry in the mismatch list will be a tuple consisting
|
995 |
+
of the path, 'exists', 'size' or 'hash' according to what didn't match
|
996 |
+
(existence is checked first, then size, then hash), the expected
|
997 |
+
value and the actual value.
|
998 |
+
"""
|
999 |
+
mismatches = []
|
1000 |
+
record_path = os.path.join(self.path, 'installed-files.txt')
|
1001 |
+
if os.path.exists(record_path):
|
1002 |
+
for path, _, _ in self.list_installed_files():
|
1003 |
+
if path == record_path:
|
1004 |
+
continue
|
1005 |
+
if not os.path.exists(path):
|
1006 |
+
mismatches.append((path, 'exists', True, False))
|
1007 |
+
return mismatches
|
1008 |
+
|
1009 |
+
def list_installed_files(self):
|
1010 |
+
"""
|
1011 |
+
Iterates over the ``installed-files.txt`` entries and returns a tuple
|
1012 |
+
``(path, hash, size)`` for each line.
|
1013 |
+
|
1014 |
+
:returns: a list of (path, hash, size)
|
1015 |
+
"""
|
1016 |
+
|
1017 |
+
def _md5(path):
|
1018 |
+
f = open(path, 'rb')
|
1019 |
+
try:
|
1020 |
+
content = f.read()
|
1021 |
+
finally:
|
1022 |
+
f.close()
|
1023 |
+
return hashlib.md5(content).hexdigest()
|
1024 |
+
|
1025 |
+
def _size(path):
|
1026 |
+
return os.stat(path).st_size
|
1027 |
+
|
1028 |
+
record_path = os.path.join(self.path, 'installed-files.txt')
|
1029 |
+
result = []
|
1030 |
+
if os.path.exists(record_path):
|
1031 |
+
with codecs.open(record_path, 'r', encoding='utf-8') as f:
|
1032 |
+
for line in f:
|
1033 |
+
line = line.strip()
|
1034 |
+
p = os.path.normpath(os.path.join(self.path, line))
|
1035 |
+
# "./" is present as a marker between installed files
|
1036 |
+
# and installation metadata files
|
1037 |
+
if not os.path.exists(p):
|
1038 |
+
logger.warning('Non-existent file: %s', p)
|
1039 |
+
if p.endswith(('.pyc', '.pyo')):
|
1040 |
+
continue
|
1041 |
+
#otherwise fall through and fail
|
1042 |
+
if not os.path.isdir(p):
|
1043 |
+
result.append((p, _md5(p), _size(p)))
|
1044 |
+
result.append((record_path, None, None))
|
1045 |
+
return result
|
1046 |
+
|
1047 |
+
def list_distinfo_files(self, absolute=False):
|
1048 |
+
"""
|
1049 |
+
Iterates over the ``installed-files.txt`` entries and returns paths for
|
1050 |
+
each line if the path is pointing to a file located in the
|
1051 |
+
``.egg-info`` directory or one of its subdirectories.
|
1052 |
+
|
1053 |
+
:parameter absolute: If *absolute* is ``True``, each returned path is
|
1054 |
+
transformed into a local absolute path. Otherwise the
|
1055 |
+
raw value from ``installed-files.txt`` is returned.
|
1056 |
+
:type absolute: boolean
|
1057 |
+
:returns: iterator of paths
|
1058 |
+
"""
|
1059 |
+
record_path = os.path.join(self.path, 'installed-files.txt')
|
1060 |
+
if os.path.exists(record_path):
|
1061 |
+
skip = True
|
1062 |
+
with codecs.open(record_path, 'r', encoding='utf-8') as f:
|
1063 |
+
for line in f:
|
1064 |
+
line = line.strip()
|
1065 |
+
if line == './':
|
1066 |
+
skip = False
|
1067 |
+
continue
|
1068 |
+
if not skip:
|
1069 |
+
p = os.path.normpath(os.path.join(self.path, line))
|
1070 |
+
if p.startswith(self.path):
|
1071 |
+
if absolute:
|
1072 |
+
yield p
|
1073 |
+
else:
|
1074 |
+
yield line
|
1075 |
+
|
1076 |
+
def __eq__(self, other):
|
1077 |
+
return (isinstance(other, EggInfoDistribution) and
|
1078 |
+
self.path == other.path)
|
1079 |
+
|
1080 |
+
# See http://docs.python.org/reference/datamodel#object.__hash__
|
1081 |
+
__hash__ = object.__hash__
|
1082 |
+
|
1083 |
+
new_dist_class = InstalledDistribution
|
1084 |
+
old_dist_class = EggInfoDistribution
|
1085 |
+
|
1086 |
+
|
1087 |
+
class DependencyGraph(object):
|
1088 |
+
"""
|
1089 |
+
Represents a dependency graph between distributions.
|
1090 |
+
|
1091 |
+
The dependency relationships are stored in an ``adjacency_list`` that maps
|
1092 |
+
distributions to a list of ``(other, label)`` tuples where ``other``
|
1093 |
+
is a distribution and the edge is labeled with ``label`` (i.e. the version
|
1094 |
+
specifier, if such was provided). Also, for more efficient traversal, for
|
1095 |
+
every distribution ``x``, a list of predecessors is kept in
|
1096 |
+
``reverse_list[x]``. An edge from distribution ``a`` to
|
1097 |
+
distribution ``b`` means that ``a`` depends on ``b``. If any missing
|
1098 |
+
dependencies are found, they are stored in ``missing``, which is a
|
1099 |
+
dictionary that maps distributions to a list of requirements that were not
|
1100 |
+
provided by any other distributions.
|
1101 |
+
"""
|
1102 |
+
|
1103 |
+
def __init__(self):
|
1104 |
+
self.adjacency_list = {}
|
1105 |
+
self.reverse_list = {}
|
1106 |
+
self.missing = {}
|
1107 |
+
|
1108 |
+
def add_distribution(self, distribution):
|
1109 |
+
"""Add the *distribution* to the graph.
|
1110 |
+
|
1111 |
+
:type distribution: :class:`distutils2.database.InstalledDistribution`
|
1112 |
+
or :class:`distutils2.database.EggInfoDistribution`
|
1113 |
+
"""
|
1114 |
+
self.adjacency_list[distribution] = []
|
1115 |
+
self.reverse_list[distribution] = []
|
1116 |
+
#self.missing[distribution] = []
|
1117 |
+
|
1118 |
+
def add_edge(self, x, y, label=None):
|
1119 |
+
"""Add an edge from distribution *x* to distribution *y* with the given
|
1120 |
+
*label*.
|
1121 |
+
|
1122 |
+
:type x: :class:`distutils2.database.InstalledDistribution` or
|
1123 |
+
:class:`distutils2.database.EggInfoDistribution`
|
1124 |
+
:type y: :class:`distutils2.database.InstalledDistribution` or
|
1125 |
+
:class:`distutils2.database.EggInfoDistribution`
|
1126 |
+
:type label: ``str`` or ``None``
|
1127 |
+
"""
|
1128 |
+
self.adjacency_list[x].append((y, label))
|
1129 |
+
# multiple edges are allowed, so be careful
|
1130 |
+
if x not in self.reverse_list[y]:
|
1131 |
+
self.reverse_list[y].append(x)
|
1132 |
+
|
1133 |
+
def add_missing(self, distribution, requirement):
|
1134 |
+
"""
|
1135 |
+
Add a missing *requirement* for the given *distribution*.
|
1136 |
+
|
1137 |
+
:type distribution: :class:`distutils2.database.InstalledDistribution`
|
1138 |
+
or :class:`distutils2.database.EggInfoDistribution`
|
1139 |
+
:type requirement: ``str``
|
1140 |
+
"""
|
1141 |
+
logger.debug('%s missing %r', distribution, requirement)
|
1142 |
+
self.missing.setdefault(distribution, []).append(requirement)
|
1143 |
+
|
1144 |
+
def _repr_dist(self, dist):
|
1145 |
+
return '%s %s' % (dist.name, dist.version)
|
1146 |
+
|
1147 |
+
def repr_node(self, dist, level=1):
|
1148 |
+
"""Prints only a subgraph"""
|
1149 |
+
output = [self._repr_dist(dist)]
|
1150 |
+
for other, label in self.adjacency_list[dist]:
|
1151 |
+
dist = self._repr_dist(other)
|
1152 |
+
if label is not None:
|
1153 |
+
dist = '%s [%s]' % (dist, label)
|
1154 |
+
output.append(' ' * level + str(dist))
|
1155 |
+
suboutput = self.repr_node(other, level + 1)
|
1156 |
+
subs = suboutput.split('\n')
|
1157 |
+
output.extend(subs[1:])
|
1158 |
+
return '\n'.join(output)
|
1159 |
+
|
1160 |
+
def to_dot(self, f, skip_disconnected=True):
|
1161 |
+
"""Writes a DOT output for the graph to the provided file *f*.
|
1162 |
+
|
1163 |
+
If *skip_disconnected* is set to ``True``, then all distributions
|
1164 |
+
that are not dependent on any other distribution are skipped.
|
1165 |
+
|
1166 |
+
:type f: has to support ``file``-like operations
|
1167 |
+
:type skip_disconnected: ``bool``
|
1168 |
+
"""
|
1169 |
+
disconnected = []
|
1170 |
+
|
1171 |
+
f.write("digraph dependencies {\n")
|
1172 |
+
for dist, adjs in self.adjacency_list.items():
|
1173 |
+
if len(adjs) == 0 and not skip_disconnected:
|
1174 |
+
disconnected.append(dist)
|
1175 |
+
for other, label in adjs:
|
1176 |
+
if not label is None:
|
1177 |
+
f.write('"%s" -> "%s" [label="%s"]\n' %
|
1178 |
+
(dist.name, other.name, label))
|
1179 |
+
else:
|
1180 |
+
f.write('"%s" -> "%s"\n' % (dist.name, other.name))
|
1181 |
+
if not skip_disconnected and len(disconnected) > 0:
|
1182 |
+
f.write('subgraph disconnected {\n')
|
1183 |
+
f.write('label = "Disconnected"\n')
|
1184 |
+
f.write('bgcolor = red\n')
|
1185 |
+
|
1186 |
+
for dist in disconnected:
|
1187 |
+
f.write('"%s"' % dist.name)
|
1188 |
+
f.write('\n')
|
1189 |
+
f.write('}\n')
|
1190 |
+
f.write('}\n')
|
1191 |
+
|
1192 |
+
def topological_sort(self):
|
1193 |
+
"""
|
1194 |
+
Perform a topological sort of the graph.
|
1195 |
+
:return: A tuple, the first element of which is a topologically sorted
|
1196 |
+
list of distributions, and the second element of which is a
|
1197 |
+
list of distributions that cannot be sorted because they have
|
1198 |
+
circular dependencies and so form a cycle.
|
1199 |
+
"""
|
1200 |
+
result = []
|
1201 |
+
# Make a shallow copy of the adjacency list
|
1202 |
+
alist = {}
|
1203 |
+
for k, v in self.adjacency_list.items():
|
1204 |
+
alist[k] = v[:]
|
1205 |
+
while True:
|
1206 |
+
# See what we can remove in this run
|
1207 |
+
to_remove = []
|
1208 |
+
for k, v in list(alist.items())[:]:
|
1209 |
+
if not v:
|
1210 |
+
to_remove.append(k)
|
1211 |
+
del alist[k]
|
1212 |
+
if not to_remove:
|
1213 |
+
# What's left in alist (if anything) is a cycle.
|
1214 |
+
break
|
1215 |
+
# Remove from the adjacency list of others
|
1216 |
+
for k, v in alist.items():
|
1217 |
+
alist[k] = [(d, r) for d, r in v if d not in to_remove]
|
1218 |
+
logger.debug('Moving to result: %s',
|
1219 |
+
['%s (%s)' % (d.name, d.version) for d in to_remove])
|
1220 |
+
result.extend(to_remove)
|
1221 |
+
return result, list(alist.keys())
|
1222 |
+
|
1223 |
+
def __repr__(self):
|
1224 |
+
"""Representation of the graph"""
|
1225 |
+
output = []
|
1226 |
+
for dist, adjs in self.adjacency_list.items():
|
1227 |
+
output.append(self.repr_node(dist))
|
1228 |
+
return '\n'.join(output)
|
1229 |
+
|
1230 |
+
|
1231 |
+
def make_graph(dists, scheme='default'):
|
1232 |
+
"""Makes a dependency graph from the given distributions.
|
1233 |
+
|
1234 |
+
:parameter dists: a list of distributions
|
1235 |
+
:type dists: list of :class:`distutils2.database.InstalledDistribution` and
|
1236 |
+
:class:`distutils2.database.EggInfoDistribution` instances
|
1237 |
+
:rtype: a :class:`DependencyGraph` instance
|
1238 |
+
"""
|
1239 |
+
scheme = get_scheme(scheme)
|
1240 |
+
graph = DependencyGraph()
|
1241 |
+
provided = {} # maps names to lists of (version, dist) tuples
|
1242 |
+
|
1243 |
+
# first, build the graph and find out what's provided
|
1244 |
+
for dist in dists:
|
1245 |
+
graph.add_distribution(dist)
|
1246 |
+
|
1247 |
+
for p in dist.provides:
|
1248 |
+
name, version = parse_name_and_version(p)
|
1249 |
+
logger.debug('Add to provided: %s, %s, %s', name, version, dist)
|
1250 |
+
provided.setdefault(name, []).append((version, dist))
|
1251 |
+
|
1252 |
+
# now make the edges
|
1253 |
+
for dist in dists:
|
1254 |
+
requires = (dist.run_requires | dist.meta_requires |
|
1255 |
+
dist.build_requires | dist.dev_requires)
|
1256 |
+
for req in requires:
|
1257 |
+
try:
|
1258 |
+
matcher = scheme.matcher(req)
|
1259 |
+
except UnsupportedVersionError:
|
1260 |
+
# XXX compat-mode if cannot read the version
|
1261 |
+
logger.warning('could not read version %r - using name only',
|
1262 |
+
req)
|
1263 |
+
name = req.split()[0]
|
1264 |
+
matcher = scheme.matcher(name)
|
1265 |
+
|
1266 |
+
name = matcher.key # case-insensitive
|
1267 |
+
|
1268 |
+
matched = False
|
1269 |
+
if name in provided:
|
1270 |
+
for version, provider in provided[name]:
|
1271 |
+
try:
|
1272 |
+
match = matcher.match(version)
|
1273 |
+
except UnsupportedVersionError:
|
1274 |
+
match = False
|
1275 |
+
|
1276 |
+
if match:
|
1277 |
+
graph.add_edge(dist, provider, req)
|
1278 |
+
matched = True
|
1279 |
+
break
|
1280 |
+
if not matched:
|
1281 |
+
graph.add_missing(dist, req)
|
1282 |
+
return graph
|
1283 |
+
|
1284 |
+
|
1285 |
+
def get_dependent_dists(dists, dist):
|
1286 |
+
"""Recursively generate a list of distributions from *dists* that are
|
1287 |
+
dependent on *dist*.
|
1288 |
+
|
1289 |
+
:param dists: a list of distributions
|
1290 |
+
:param dist: a distribution, member of *dists* for which we are interested
|
1291 |
+
"""
|
1292 |
+
if dist not in dists:
|
1293 |
+
raise DistlibException('given distribution %r is not a member '
|
1294 |
+
'of the list' % dist.name)
|
1295 |
+
graph = make_graph(dists)
|
1296 |
+
|
1297 |
+
dep = [dist] # dependent distributions
|
1298 |
+
todo = graph.reverse_list[dist] # list of nodes we should inspect
|
1299 |
+
|
1300 |
+
while todo:
|
1301 |
+
d = todo.pop()
|
1302 |
+
dep.append(d)
|
1303 |
+
for succ in graph.reverse_list[d]:
|
1304 |
+
if succ not in dep:
|
1305 |
+
todo.append(succ)
|
1306 |
+
|
1307 |
+
dep.pop(0) # remove dist from dep, was there to prevent infinite loops
|
1308 |
+
return dep
|
1309 |
+
|
1310 |
+
|
1311 |
+
def get_required_dists(dists, dist):
|
1312 |
+
"""Recursively generate a list of distributions from *dists* that are
|
1313 |
+
required by *dist*.
|
1314 |
+
|
1315 |
+
:param dists: a list of distributions
|
1316 |
+
:param dist: a distribution, member of *dists* for which we are interested
|
1317 |
+
"""
|
1318 |
+
if dist not in dists:
|
1319 |
+
raise DistlibException('given distribution %r is not a member '
|
1320 |
+
'of the list' % dist.name)
|
1321 |
+
graph = make_graph(dists)
|
1322 |
+
|
1323 |
+
req = [] # required distributions
|
1324 |
+
todo = graph.adjacency_list[dist] # list of nodes we should inspect
|
1325 |
+
|
1326 |
+
while todo:
|
1327 |
+
d = todo.pop()[0]
|
1328 |
+
req.append(d)
|
1329 |
+
for pred in graph.adjacency_list[d]:
|
1330 |
+
if pred not in req:
|
1331 |
+
todo.append(pred)
|
1332 |
+
|
1333 |
+
return req
|
1334 |
+
|
1335 |
+
|
1336 |
+
def make_dist(name, version, **kwargs):
|
1337 |
+
"""
|
1338 |
+
A convenience method for making a dist given just a name and version.
|
1339 |
+
"""
|
1340 |
+
summary = kwargs.pop('summary', 'Placeholder for summary')
|
1341 |
+
md = Metadata(**kwargs)
|
1342 |
+
md.name = name
|
1343 |
+
md.version = version
|
1344 |
+
md.summary = summary or 'Placeholder for summary'
|
1345 |
+
return Distribution(md)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/index.py
ADDED
@@ -0,0 +1,509 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2013 Vinay Sajip.
|
4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
6 |
+
#
|
7 |
+
import hashlib
|
8 |
+
import logging
|
9 |
+
import os
|
10 |
+
import shutil
|
11 |
+
import subprocess
|
12 |
+
import tempfile
|
13 |
+
try:
|
14 |
+
from threading import Thread
|
15 |
+
except ImportError:
|
16 |
+
from dummy_threading import Thread
|
17 |
+
|
18 |
+
from . import DistlibException
|
19 |
+
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
|
20 |
+
urlparse, build_opener, string_types)
|
21 |
+
from .util import zip_dir, ServerProxy
|
22 |
+
|
23 |
+
logger = logging.getLogger(__name__)
|
24 |
+
|
25 |
+
DEFAULT_INDEX = 'https://pypi.org/pypi'
|
26 |
+
DEFAULT_REALM = 'pypi'
|
27 |
+
|
28 |
+
class PackageIndex(object):
|
29 |
+
"""
|
30 |
+
This class represents a package index compatible with PyPI, the Python
|
31 |
+
Package Index.
|
32 |
+
"""
|
33 |
+
|
34 |
+
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
|
35 |
+
|
36 |
+
def __init__(self, url=None):
|
37 |
+
"""
|
38 |
+
Initialise an instance.
|
39 |
+
|
40 |
+
:param url: The URL of the index. If not specified, the URL for PyPI is
|
41 |
+
used.
|
42 |
+
"""
|
43 |
+
self.url = url or DEFAULT_INDEX
|
44 |
+
self.read_configuration()
|
45 |
+
scheme, netloc, path, params, query, frag = urlparse(self.url)
|
46 |
+
if params or query or frag or scheme not in ('http', 'https'):
|
47 |
+
raise DistlibException('invalid repository: %s' % self.url)
|
48 |
+
self.password_handler = None
|
49 |
+
self.ssl_verifier = None
|
50 |
+
self.gpg = None
|
51 |
+
self.gpg_home = None
|
52 |
+
with open(os.devnull, 'w') as sink:
|
53 |
+
# Use gpg by default rather than gpg2, as gpg2 insists on
|
54 |
+
# prompting for passwords
|
55 |
+
for s in ('gpg', 'gpg2'):
|
56 |
+
try:
|
57 |
+
rc = subprocess.check_call([s, '--version'], stdout=sink,
|
58 |
+
stderr=sink)
|
59 |
+
if rc == 0:
|
60 |
+
self.gpg = s
|
61 |
+
break
|
62 |
+
except OSError:
|
63 |
+
pass
|
64 |
+
|
65 |
+
def _get_pypirc_command(self):
|
66 |
+
"""
|
67 |
+
Get the distutils command for interacting with PyPI configurations.
|
68 |
+
:return: the command.
|
69 |
+
"""
|
70 |
+
from .util import _get_pypirc_command as cmd
|
71 |
+
return cmd()
|
72 |
+
|
73 |
+
def read_configuration(self):
|
74 |
+
"""
|
75 |
+
Read the PyPI access configuration as supported by distutils. This populates
|
76 |
+
``username``, ``password``, ``realm`` and ``url`` attributes from the
|
77 |
+
configuration.
|
78 |
+
"""
|
79 |
+
from .util import _load_pypirc
|
80 |
+
cfg = _load_pypirc(self)
|
81 |
+
self.username = cfg.get('username')
|
82 |
+
self.password = cfg.get('password')
|
83 |
+
self.realm = cfg.get('realm', 'pypi')
|
84 |
+
self.url = cfg.get('repository', self.url)
|
85 |
+
|
86 |
+
def save_configuration(self):
|
87 |
+
"""
|
88 |
+
Save the PyPI access configuration. You must have set ``username`` and
|
89 |
+
``password`` attributes before calling this method.
|
90 |
+
"""
|
91 |
+
self.check_credentials()
|
92 |
+
from .util import _store_pypirc
|
93 |
+
_store_pypirc(self)
|
94 |
+
|
95 |
+
def check_credentials(self):
|
96 |
+
"""
|
97 |
+
Check that ``username`` and ``password`` have been set, and raise an
|
98 |
+
exception if not.
|
99 |
+
"""
|
100 |
+
if self.username is None or self.password is None:
|
101 |
+
raise DistlibException('username and password must be set')
|
102 |
+
pm = HTTPPasswordMgr()
|
103 |
+
_, netloc, _, _, _, _ = urlparse(self.url)
|
104 |
+
pm.add_password(self.realm, netloc, self.username, self.password)
|
105 |
+
self.password_handler = HTTPBasicAuthHandler(pm)
|
106 |
+
|
107 |
+
def register(self, metadata):
|
108 |
+
"""
|
109 |
+
Register a distribution on PyPI, using the provided metadata.
|
110 |
+
|
111 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
112 |
+
and version number for the distribution to be
|
113 |
+
registered.
|
114 |
+
:return: The HTTP response received from PyPI upon submission of the
|
115 |
+
request.
|
116 |
+
"""
|
117 |
+
self.check_credentials()
|
118 |
+
metadata.validate()
|
119 |
+
d = metadata.todict()
|
120 |
+
d[':action'] = 'verify'
|
121 |
+
request = self.encode_request(d.items(), [])
|
122 |
+
response = self.send_request(request)
|
123 |
+
d[':action'] = 'submit'
|
124 |
+
request = self.encode_request(d.items(), [])
|
125 |
+
return self.send_request(request)
|
126 |
+
|
127 |
+
def _reader(self, name, stream, outbuf):
|
128 |
+
"""
|
129 |
+
Thread runner for reading lines of from a subprocess into a buffer.
|
130 |
+
|
131 |
+
:param name: The logical name of the stream (used for logging only).
|
132 |
+
:param stream: The stream to read from. This will typically a pipe
|
133 |
+
connected to the output stream of a subprocess.
|
134 |
+
:param outbuf: The list to append the read lines to.
|
135 |
+
"""
|
136 |
+
while True:
|
137 |
+
s = stream.readline()
|
138 |
+
if not s:
|
139 |
+
break
|
140 |
+
s = s.decode('utf-8').rstrip()
|
141 |
+
outbuf.append(s)
|
142 |
+
logger.debug('%s: %s' % (name, s))
|
143 |
+
stream.close()
|
144 |
+
|
145 |
+
def get_sign_command(self, filename, signer, sign_password,
|
146 |
+
keystore=None):
|
147 |
+
"""
|
148 |
+
Return a suitable command for signing a file.
|
149 |
+
|
150 |
+
:param filename: The pathname to the file to be signed.
|
151 |
+
:param signer: The identifier of the signer of the file.
|
152 |
+
:param sign_password: The passphrase for the signer's
|
153 |
+
private key used for signing.
|
154 |
+
:param keystore: The path to a directory which contains the keys
|
155 |
+
used in verification. If not specified, the
|
156 |
+
instance's ``gpg_home`` attribute is used instead.
|
157 |
+
:return: The signing command as a list suitable to be
|
158 |
+
passed to :class:`subprocess.Popen`.
|
159 |
+
"""
|
160 |
+
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
161 |
+
if keystore is None:
|
162 |
+
keystore = self.gpg_home
|
163 |
+
if keystore:
|
164 |
+
cmd.extend(['--homedir', keystore])
|
165 |
+
if sign_password is not None:
|
166 |
+
cmd.extend(['--batch', '--passphrase-fd', '0'])
|
167 |
+
td = tempfile.mkdtemp()
|
168 |
+
sf = os.path.join(td, os.path.basename(filename) + '.asc')
|
169 |
+
cmd.extend(['--detach-sign', '--armor', '--local-user',
|
170 |
+
signer, '--output', sf, filename])
|
171 |
+
logger.debug('invoking: %s', ' '.join(cmd))
|
172 |
+
return cmd, sf
|
173 |
+
|
174 |
+
def run_command(self, cmd, input_data=None):
|
175 |
+
"""
|
176 |
+
Run a command in a child process , passing it any input data specified.
|
177 |
+
|
178 |
+
:param cmd: The command to run.
|
179 |
+
:param input_data: If specified, this must be a byte string containing
|
180 |
+
data to be sent to the child process.
|
181 |
+
:return: A tuple consisting of the subprocess' exit code, a list of
|
182 |
+
lines read from the subprocess' ``stdout``, and a list of
|
183 |
+
lines read from the subprocess' ``stderr``.
|
184 |
+
"""
|
185 |
+
kwargs = {
|
186 |
+
'stdout': subprocess.PIPE,
|
187 |
+
'stderr': subprocess.PIPE,
|
188 |
+
}
|
189 |
+
if input_data is not None:
|
190 |
+
kwargs['stdin'] = subprocess.PIPE
|
191 |
+
stdout = []
|
192 |
+
stderr = []
|
193 |
+
p = subprocess.Popen(cmd, **kwargs)
|
194 |
+
# We don't use communicate() here because we may need to
|
195 |
+
# get clever with interacting with the command
|
196 |
+
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
|
197 |
+
t1.start()
|
198 |
+
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
|
199 |
+
t2.start()
|
200 |
+
if input_data is not None:
|
201 |
+
p.stdin.write(input_data)
|
202 |
+
p.stdin.close()
|
203 |
+
|
204 |
+
p.wait()
|
205 |
+
t1.join()
|
206 |
+
t2.join()
|
207 |
+
return p.returncode, stdout, stderr
|
208 |
+
|
209 |
+
def sign_file(self, filename, signer, sign_password, keystore=None):
|
210 |
+
"""
|
211 |
+
Sign a file.
|
212 |
+
|
213 |
+
:param filename: The pathname to the file to be signed.
|
214 |
+
:param signer: The identifier of the signer of the file.
|
215 |
+
:param sign_password: The passphrase for the signer's
|
216 |
+
private key used for signing.
|
217 |
+
:param keystore: The path to a directory which contains the keys
|
218 |
+
used in signing. If not specified, the instance's
|
219 |
+
``gpg_home`` attribute is used instead.
|
220 |
+
:return: The absolute pathname of the file where the signature is
|
221 |
+
stored.
|
222 |
+
"""
|
223 |
+
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
|
224 |
+
keystore)
|
225 |
+
rc, stdout, stderr = self.run_command(cmd,
|
226 |
+
sign_password.encode('utf-8'))
|
227 |
+
if rc != 0:
|
228 |
+
raise DistlibException('sign command failed with error '
|
229 |
+
'code %s' % rc)
|
230 |
+
return sig_file
|
231 |
+
|
232 |
+
def upload_file(self, metadata, filename, signer=None, sign_password=None,
|
233 |
+
filetype='sdist', pyversion='source', keystore=None):
|
234 |
+
"""
|
235 |
+
Upload a release file to the index.
|
236 |
+
|
237 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
238 |
+
and version number for the file to be uploaded.
|
239 |
+
:param filename: The pathname of the file to be uploaded.
|
240 |
+
:param signer: The identifier of the signer of the file.
|
241 |
+
:param sign_password: The passphrase for the signer's
|
242 |
+
private key used for signing.
|
243 |
+
:param filetype: The type of the file being uploaded. This is the
|
244 |
+
distutils command which produced that file, e.g.
|
245 |
+
``sdist`` or ``bdist_wheel``.
|
246 |
+
:param pyversion: The version of Python which the release relates
|
247 |
+
to. For code compatible with any Python, this would
|
248 |
+
be ``source``, otherwise it would be e.g. ``3.2``.
|
249 |
+
:param keystore: The path to a directory which contains the keys
|
250 |
+
used in signing. If not specified, the instance's
|
251 |
+
``gpg_home`` attribute is used instead.
|
252 |
+
:return: The HTTP response received from PyPI upon submission of the
|
253 |
+
request.
|
254 |
+
"""
|
255 |
+
self.check_credentials()
|
256 |
+
if not os.path.exists(filename):
|
257 |
+
raise DistlibException('not found: %s' % filename)
|
258 |
+
metadata.validate()
|
259 |
+
d = metadata.todict()
|
260 |
+
sig_file = None
|
261 |
+
if signer:
|
262 |
+
if not self.gpg:
|
263 |
+
logger.warning('no signing program available - not signed')
|
264 |
+
else:
|
265 |
+
sig_file = self.sign_file(filename, signer, sign_password,
|
266 |
+
keystore)
|
267 |
+
with open(filename, 'rb') as f:
|
268 |
+
file_data = f.read()
|
269 |
+
md5_digest = hashlib.md5(file_data).hexdigest()
|
270 |
+
sha256_digest = hashlib.sha256(file_data).hexdigest()
|
271 |
+
d.update({
|
272 |
+
':action': 'file_upload',
|
273 |
+
'protocol_version': '1',
|
274 |
+
'filetype': filetype,
|
275 |
+
'pyversion': pyversion,
|
276 |
+
'md5_digest': md5_digest,
|
277 |
+
'sha256_digest': sha256_digest,
|
278 |
+
})
|
279 |
+
files = [('content', os.path.basename(filename), file_data)]
|
280 |
+
if sig_file:
|
281 |
+
with open(sig_file, 'rb') as f:
|
282 |
+
sig_data = f.read()
|
283 |
+
files.append(('gpg_signature', os.path.basename(sig_file),
|
284 |
+
sig_data))
|
285 |
+
shutil.rmtree(os.path.dirname(sig_file))
|
286 |
+
request = self.encode_request(d.items(), files)
|
287 |
+
return self.send_request(request)
|
288 |
+
|
289 |
+
def upload_documentation(self, metadata, doc_dir):
|
290 |
+
"""
|
291 |
+
Upload documentation to the index.
|
292 |
+
|
293 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
294 |
+
and version number for the documentation to be
|
295 |
+
uploaded.
|
296 |
+
:param doc_dir: The pathname of the directory which contains the
|
297 |
+
documentation. This should be the directory that
|
298 |
+
contains the ``index.html`` for the documentation.
|
299 |
+
:return: The HTTP response received from PyPI upon submission of the
|
300 |
+
request.
|
301 |
+
"""
|
302 |
+
self.check_credentials()
|
303 |
+
if not os.path.isdir(doc_dir):
|
304 |
+
raise DistlibException('not a directory: %r' % doc_dir)
|
305 |
+
fn = os.path.join(doc_dir, 'index.html')
|
306 |
+
if not os.path.exists(fn):
|
307 |
+
raise DistlibException('not found: %r' % fn)
|
308 |
+
metadata.validate()
|
309 |
+
name, version = metadata.name, metadata.version
|
310 |
+
zip_data = zip_dir(doc_dir).getvalue()
|
311 |
+
fields = [(':action', 'doc_upload'),
|
312 |
+
('name', name), ('version', version)]
|
313 |
+
files = [('content', name, zip_data)]
|
314 |
+
request = self.encode_request(fields, files)
|
315 |
+
return self.send_request(request)
|
316 |
+
|
317 |
+
def get_verify_command(self, signature_filename, data_filename,
|
318 |
+
keystore=None):
|
319 |
+
"""
|
320 |
+
Return a suitable command for verifying a file.
|
321 |
+
|
322 |
+
:param signature_filename: The pathname to the file containing the
|
323 |
+
signature.
|
324 |
+
:param data_filename: The pathname to the file containing the
|
325 |
+
signed data.
|
326 |
+
:param keystore: The path to a directory which contains the keys
|
327 |
+
used in verification. If not specified, the
|
328 |
+
instance's ``gpg_home`` attribute is used instead.
|
329 |
+
:return: The verifying command as a list suitable to be
|
330 |
+
passed to :class:`subprocess.Popen`.
|
331 |
+
"""
|
332 |
+
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
333 |
+
if keystore is None:
|
334 |
+
keystore = self.gpg_home
|
335 |
+
if keystore:
|
336 |
+
cmd.extend(['--homedir', keystore])
|
337 |
+
cmd.extend(['--verify', signature_filename, data_filename])
|
338 |
+
logger.debug('invoking: %s', ' '.join(cmd))
|
339 |
+
return cmd
|
340 |
+
|
341 |
+
def verify_signature(self, signature_filename, data_filename,
|
342 |
+
keystore=None):
|
343 |
+
"""
|
344 |
+
Verify a signature for a file.
|
345 |
+
|
346 |
+
:param signature_filename: The pathname to the file containing the
|
347 |
+
signature.
|
348 |
+
:param data_filename: The pathname to the file containing the
|
349 |
+
signed data.
|
350 |
+
:param keystore: The path to a directory which contains the keys
|
351 |
+
used in verification. If not specified, the
|
352 |
+
instance's ``gpg_home`` attribute is used instead.
|
353 |
+
:return: True if the signature was verified, else False.
|
354 |
+
"""
|
355 |
+
if not self.gpg:
|
356 |
+
raise DistlibException('verification unavailable because gpg '
|
357 |
+
'unavailable')
|
358 |
+
cmd = self.get_verify_command(signature_filename, data_filename,
|
359 |
+
keystore)
|
360 |
+
rc, stdout, stderr = self.run_command(cmd)
|
361 |
+
if rc not in (0, 1):
|
362 |
+
raise DistlibException('verify command failed with error '
|
363 |
+
'code %s' % rc)
|
364 |
+
return rc == 0
|
365 |
+
|
366 |
+
def download_file(self, url, destfile, digest=None, reporthook=None):
|
367 |
+
"""
|
368 |
+
This is a convenience method for downloading a file from an URL.
|
369 |
+
Normally, this will be a file from the index, though currently
|
370 |
+
no check is made for this (i.e. a file can be downloaded from
|
371 |
+
anywhere).
|
372 |
+
|
373 |
+
The method is just like the :func:`urlretrieve` function in the
|
374 |
+
standard library, except that it allows digest computation to be
|
375 |
+
done during download and checking that the downloaded data
|
376 |
+
matched any expected value.
|
377 |
+
|
378 |
+
:param url: The URL of the file to be downloaded (assumed to be
|
379 |
+
available via an HTTP GET request).
|
380 |
+
:param destfile: The pathname where the downloaded file is to be
|
381 |
+
saved.
|
382 |
+
:param digest: If specified, this must be a (hasher, value)
|
383 |
+
tuple, where hasher is the algorithm used (e.g.
|
384 |
+
``'md5'``) and ``value`` is the expected value.
|
385 |
+
:param reporthook: The same as for :func:`urlretrieve` in the
|
386 |
+
standard library.
|
387 |
+
"""
|
388 |
+
if digest is None:
|
389 |
+
digester = None
|
390 |
+
logger.debug('No digest specified')
|
391 |
+
else:
|
392 |
+
if isinstance(digest, (list, tuple)):
|
393 |
+
hasher, digest = digest
|
394 |
+
else:
|
395 |
+
hasher = 'md5'
|
396 |
+
digester = getattr(hashlib, hasher)()
|
397 |
+
logger.debug('Digest specified: %s' % digest)
|
398 |
+
# The following code is equivalent to urlretrieve.
|
399 |
+
# We need to do it this way so that we can compute the
|
400 |
+
# digest of the file as we go.
|
401 |
+
with open(destfile, 'wb') as dfp:
|
402 |
+
# addinfourl is not a context manager on 2.x
|
403 |
+
# so we have to use try/finally
|
404 |
+
sfp = self.send_request(Request(url))
|
405 |
+
try:
|
406 |
+
headers = sfp.info()
|
407 |
+
blocksize = 8192
|
408 |
+
size = -1
|
409 |
+
read = 0
|
410 |
+
blocknum = 0
|
411 |
+
if "content-length" in headers:
|
412 |
+
size = int(headers["Content-Length"])
|
413 |
+
if reporthook:
|
414 |
+
reporthook(blocknum, blocksize, size)
|
415 |
+
while True:
|
416 |
+
block = sfp.read(blocksize)
|
417 |
+
if not block:
|
418 |
+
break
|
419 |
+
read += len(block)
|
420 |
+
dfp.write(block)
|
421 |
+
if digester:
|
422 |
+
digester.update(block)
|
423 |
+
blocknum += 1
|
424 |
+
if reporthook:
|
425 |
+
reporthook(blocknum, blocksize, size)
|
426 |
+
finally:
|
427 |
+
sfp.close()
|
428 |
+
|
429 |
+
# check that we got the whole file, if we can
|
430 |
+
if size >= 0 and read < size:
|
431 |
+
raise DistlibException(
|
432 |
+
'retrieval incomplete: got only %d out of %d bytes'
|
433 |
+
% (read, size))
|
434 |
+
# if we have a digest, it must match.
|
435 |
+
if digester:
|
436 |
+
actual = digester.hexdigest()
|
437 |
+
if digest != actual:
|
438 |
+
raise DistlibException('%s digest mismatch for %s: expected '
|
439 |
+
'%s, got %s' % (hasher, destfile,
|
440 |
+
digest, actual))
|
441 |
+
logger.debug('Digest verified: %s', digest)
|
442 |
+
|
443 |
+
def send_request(self, req):
|
444 |
+
"""
|
445 |
+
Send a standard library :class:`Request` to PyPI and return its
|
446 |
+
response.
|
447 |
+
|
448 |
+
:param req: The request to send.
|
449 |
+
:return: The HTTP response from PyPI (a standard library HTTPResponse).
|
450 |
+
"""
|
451 |
+
handlers = []
|
452 |
+
if self.password_handler:
|
453 |
+
handlers.append(self.password_handler)
|
454 |
+
if self.ssl_verifier:
|
455 |
+
handlers.append(self.ssl_verifier)
|
456 |
+
opener = build_opener(*handlers)
|
457 |
+
return opener.open(req)
|
458 |
+
|
459 |
+
def encode_request(self, fields, files):
|
460 |
+
"""
|
461 |
+
Encode fields and files for posting to an HTTP server.
|
462 |
+
|
463 |
+
:param fields: The fields to send as a list of (fieldname, value)
|
464 |
+
tuples.
|
465 |
+
:param files: The files to send as a list of (fieldname, filename,
|
466 |
+
file_bytes) tuple.
|
467 |
+
"""
|
468 |
+
# Adapted from packaging, which in turn was adapted from
|
469 |
+
# http://code.activestate.com/recipes/146306
|
470 |
+
|
471 |
+
parts = []
|
472 |
+
boundary = self.boundary
|
473 |
+
for k, values in fields:
|
474 |
+
if not isinstance(values, (list, tuple)):
|
475 |
+
values = [values]
|
476 |
+
|
477 |
+
for v in values:
|
478 |
+
parts.extend((
|
479 |
+
b'--' + boundary,
|
480 |
+
('Content-Disposition: form-data; name="%s"' %
|
481 |
+
k).encode('utf-8'),
|
482 |
+
b'',
|
483 |
+
v.encode('utf-8')))
|
484 |
+
for key, filename, value in files:
|
485 |
+
parts.extend((
|
486 |
+
b'--' + boundary,
|
487 |
+
('Content-Disposition: form-data; name="%s"; filename="%s"' %
|
488 |
+
(key, filename)).encode('utf-8'),
|
489 |
+
b'',
|
490 |
+
value))
|
491 |
+
|
492 |
+
parts.extend((b'--' + boundary + b'--', b''))
|
493 |
+
|
494 |
+
body = b'\r\n'.join(parts)
|
495 |
+
ct = b'multipart/form-data; boundary=' + boundary
|
496 |
+
headers = {
|
497 |
+
'Content-type': ct,
|
498 |
+
'Content-length': str(len(body))
|
499 |
+
}
|
500 |
+
return Request(self.url, body, headers)
|
501 |
+
|
502 |
+
def search(self, terms, operator=None):
|
503 |
+
if isinstance(terms, string_types):
|
504 |
+
terms = {'name': terms}
|
505 |
+
rpc_proxy = ServerProxy(self.url, timeout=3.0)
|
506 |
+
try:
|
507 |
+
return rpc_proxy.search(terms, operator or 'and')
|
508 |
+
finally:
|
509 |
+
rpc_proxy('close')()
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py
ADDED
@@ -0,0 +1,393 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2012-2013 Python Software Foundation.
|
4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
5 |
+
#
|
6 |
+
"""
|
7 |
+
Class representing the list of files in a distribution.
|
8 |
+
|
9 |
+
Equivalent to distutils.filelist, but fixes some problems.
|
10 |
+
"""
|
11 |
+
import fnmatch
|
12 |
+
import logging
|
13 |
+
import os
|
14 |
+
import re
|
15 |
+
import sys
|
16 |
+
|
17 |
+
from . import DistlibException
|
18 |
+
from .compat import fsdecode
|
19 |
+
from .util import convert_path
|
20 |
+
|
21 |
+
|
22 |
+
__all__ = ['Manifest']
|
23 |
+
|
24 |
+
logger = logging.getLogger(__name__)
|
25 |
+
|
26 |
+
# a \ followed by some spaces + EOL
|
27 |
+
_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
|
28 |
+
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
|
29 |
+
|
30 |
+
#
|
31 |
+
# Due to the different results returned by fnmatch.translate, we need
|
32 |
+
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
|
33 |
+
# to be brought in for Python 3.6 onwards.
|
34 |
+
#
|
35 |
+
_PYTHON_VERSION = sys.version_info[:2]
|
36 |
+
|
37 |
+
class Manifest(object):
|
38 |
+
"""A list of files built by on exploring the filesystem and filtered by
|
39 |
+
applying various patterns to what we find there.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __init__(self, base=None):
|
43 |
+
"""
|
44 |
+
Initialise an instance.
|
45 |
+
|
46 |
+
:param base: The base directory to explore under.
|
47 |
+
"""
|
48 |
+
self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
|
49 |
+
self.prefix = self.base + os.sep
|
50 |
+
self.allfiles = None
|
51 |
+
self.files = set()
|
52 |
+
|
53 |
+
#
|
54 |
+
# Public API
|
55 |
+
#
|
56 |
+
|
57 |
+
def findall(self):
|
58 |
+
"""Find all files under the base and set ``allfiles`` to the absolute
|
59 |
+
pathnames of files found.
|
60 |
+
"""
|
61 |
+
from stat import S_ISREG, S_ISDIR, S_ISLNK
|
62 |
+
|
63 |
+
self.allfiles = allfiles = []
|
64 |
+
root = self.base
|
65 |
+
stack = [root]
|
66 |
+
pop = stack.pop
|
67 |
+
push = stack.append
|
68 |
+
|
69 |
+
while stack:
|
70 |
+
root = pop()
|
71 |
+
names = os.listdir(root)
|
72 |
+
|
73 |
+
for name in names:
|
74 |
+
fullname = os.path.join(root, name)
|
75 |
+
|
76 |
+
# Avoid excess stat calls -- just one will do, thank you!
|
77 |
+
stat = os.stat(fullname)
|
78 |
+
mode = stat.st_mode
|
79 |
+
if S_ISREG(mode):
|
80 |
+
allfiles.append(fsdecode(fullname))
|
81 |
+
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
82 |
+
push(fullname)
|
83 |
+
|
84 |
+
def add(self, item):
|
85 |
+
"""
|
86 |
+
Add a file to the manifest.
|
87 |
+
|
88 |
+
:param item: The pathname to add. This can be relative to the base.
|
89 |
+
"""
|
90 |
+
if not item.startswith(self.prefix):
|
91 |
+
item = os.path.join(self.base, item)
|
92 |
+
self.files.add(os.path.normpath(item))
|
93 |
+
|
94 |
+
def add_many(self, items):
|
95 |
+
"""
|
96 |
+
Add a list of files to the manifest.
|
97 |
+
|
98 |
+
:param items: The pathnames to add. These can be relative to the base.
|
99 |
+
"""
|
100 |
+
for item in items:
|
101 |
+
self.add(item)
|
102 |
+
|
103 |
+
def sorted(self, wantdirs=False):
|
104 |
+
"""
|
105 |
+
Return sorted files in directory order
|
106 |
+
"""
|
107 |
+
|
108 |
+
def add_dir(dirs, d):
|
109 |
+
dirs.add(d)
|
110 |
+
logger.debug('add_dir added %s', d)
|
111 |
+
if d != self.base:
|
112 |
+
parent, _ = os.path.split(d)
|
113 |
+
assert parent not in ('', '/')
|
114 |
+
add_dir(dirs, parent)
|
115 |
+
|
116 |
+
result = set(self.files) # make a copy!
|
117 |
+
if wantdirs:
|
118 |
+
dirs = set()
|
119 |
+
for f in result:
|
120 |
+
add_dir(dirs, os.path.dirname(f))
|
121 |
+
result |= dirs
|
122 |
+
return [os.path.join(*path_tuple) for path_tuple in
|
123 |
+
sorted(os.path.split(path) for path in result)]
|
124 |
+
|
125 |
+
def clear(self):
|
126 |
+
"""Clear all collected files."""
|
127 |
+
self.files = set()
|
128 |
+
self.allfiles = []
|
129 |
+
|
130 |
+
def process_directive(self, directive):
|
131 |
+
"""
|
132 |
+
Process a directive which either adds some files from ``allfiles`` to
|
133 |
+
``files``, or removes some files from ``files``.
|
134 |
+
|
135 |
+
:param directive: The directive to process. This should be in a format
|
136 |
+
compatible with distutils ``MANIFEST.in`` files:
|
137 |
+
|
138 |
+
http://docs.python.org/distutils/sourcedist.html#commands
|
139 |
+
"""
|
140 |
+
# Parse the line: split it up, make sure the right number of words
|
141 |
+
# is there, and return the relevant words. 'action' is always
|
142 |
+
# defined: it's the first word of the line. Which of the other
|
143 |
+
# three are defined depends on the action; it'll be either
|
144 |
+
# patterns, (dir and patterns), or (dirpattern).
|
145 |
+
action, patterns, thedir, dirpattern = self._parse_directive(directive)
|
146 |
+
|
147 |
+
# OK, now we know that the action is valid and we have the
|
148 |
+
# right number of words on the line for that action -- so we
|
149 |
+
# can proceed with minimal error-checking.
|
150 |
+
if action == 'include':
|
151 |
+
for pattern in patterns:
|
152 |
+
if not self._include_pattern(pattern, anchor=True):
|
153 |
+
logger.warning('no files found matching %r', pattern)
|
154 |
+
|
155 |
+
elif action == 'exclude':
|
156 |
+
for pattern in patterns:
|
157 |
+
found = self._exclude_pattern(pattern, anchor=True)
|
158 |
+
#if not found:
|
159 |
+
# logger.warning('no previously-included files '
|
160 |
+
# 'found matching %r', pattern)
|
161 |
+
|
162 |
+
elif action == 'global-include':
|
163 |
+
for pattern in patterns:
|
164 |
+
if not self._include_pattern(pattern, anchor=False):
|
165 |
+
logger.warning('no files found matching %r '
|
166 |
+
'anywhere in distribution', pattern)
|
167 |
+
|
168 |
+
elif action == 'global-exclude':
|
169 |
+
for pattern in patterns:
|
170 |
+
found = self._exclude_pattern(pattern, anchor=False)
|
171 |
+
#if not found:
|
172 |
+
# logger.warning('no previously-included files '
|
173 |
+
# 'matching %r found anywhere in '
|
174 |
+
# 'distribution', pattern)
|
175 |
+
|
176 |
+
elif action == 'recursive-include':
|
177 |
+
for pattern in patterns:
|
178 |
+
if not self._include_pattern(pattern, prefix=thedir):
|
179 |
+
logger.warning('no files found matching %r '
|
180 |
+
'under directory %r', pattern, thedir)
|
181 |
+
|
182 |
+
elif action == 'recursive-exclude':
|
183 |
+
for pattern in patterns:
|
184 |
+
found = self._exclude_pattern(pattern, prefix=thedir)
|
185 |
+
#if not found:
|
186 |
+
# logger.warning('no previously-included files '
|
187 |
+
# 'matching %r found under directory %r',
|
188 |
+
# pattern, thedir)
|
189 |
+
|
190 |
+
elif action == 'graft':
|
191 |
+
if not self._include_pattern(None, prefix=dirpattern):
|
192 |
+
logger.warning('no directories found matching %r',
|
193 |
+
dirpattern)
|
194 |
+
|
195 |
+
elif action == 'prune':
|
196 |
+
if not self._exclude_pattern(None, prefix=dirpattern):
|
197 |
+
logger.warning('no previously-included directories found '
|
198 |
+
'matching %r', dirpattern)
|
199 |
+
else: # pragma: no cover
|
200 |
+
# This should never happen, as it should be caught in
|
201 |
+
# _parse_template_line
|
202 |
+
raise DistlibException(
|
203 |
+
'invalid action %r' % action)
|
204 |
+
|
205 |
+
#
|
206 |
+
# Private API
|
207 |
+
#
|
208 |
+
|
209 |
+
def _parse_directive(self, directive):
|
210 |
+
"""
|
211 |
+
Validate a directive.
|
212 |
+
:param directive: The directive to validate.
|
213 |
+
:return: A tuple of action, patterns, thedir, dir_patterns
|
214 |
+
"""
|
215 |
+
words = directive.split()
|
216 |
+
if len(words) == 1 and words[0] not in ('include', 'exclude',
|
217 |
+
'global-include',
|
218 |
+
'global-exclude',
|
219 |
+
'recursive-include',
|
220 |
+
'recursive-exclude',
|
221 |
+
'graft', 'prune'):
|
222 |
+
# no action given, let's use the default 'include'
|
223 |
+
words.insert(0, 'include')
|
224 |
+
|
225 |
+
action = words[0]
|
226 |
+
patterns = thedir = dir_pattern = None
|
227 |
+
|
228 |
+
if action in ('include', 'exclude',
|
229 |
+
'global-include', 'global-exclude'):
|
230 |
+
if len(words) < 2:
|
231 |
+
raise DistlibException(
|
232 |
+
'%r expects <pattern1> <pattern2> ...' % action)
|
233 |
+
|
234 |
+
patterns = [convert_path(word) for word in words[1:]]
|
235 |
+
|
236 |
+
elif action in ('recursive-include', 'recursive-exclude'):
|
237 |
+
if len(words) < 3:
|
238 |
+
raise DistlibException(
|
239 |
+
'%r expects <dir> <pattern1> <pattern2> ...' % action)
|
240 |
+
|
241 |
+
thedir = convert_path(words[1])
|
242 |
+
patterns = [convert_path(word) for word in words[2:]]
|
243 |
+
|
244 |
+
elif action in ('graft', 'prune'):
|
245 |
+
if len(words) != 2:
|
246 |
+
raise DistlibException(
|
247 |
+
'%r expects a single <dir_pattern>' % action)
|
248 |
+
|
249 |
+
dir_pattern = convert_path(words[1])
|
250 |
+
|
251 |
+
else:
|
252 |
+
raise DistlibException('unknown action %r' % action)
|
253 |
+
|
254 |
+
return action, patterns, thedir, dir_pattern
|
255 |
+
|
256 |
+
def _include_pattern(self, pattern, anchor=True, prefix=None,
|
257 |
+
is_regex=False):
|
258 |
+
"""Select strings (presumably filenames) from 'self.files' that
|
259 |
+
match 'pattern', a Unix-style wildcard (glob) pattern.
|
260 |
+
|
261 |
+
Patterns are not quite the same as implemented by the 'fnmatch'
|
262 |
+
module: '*' and '?' match non-special characters, where "special"
|
263 |
+
is platform-dependent: slash on Unix; colon, slash, and backslash on
|
264 |
+
DOS/Windows; and colon on Mac OS.
|
265 |
+
|
266 |
+
If 'anchor' is true (the default), then the pattern match is more
|
267 |
+
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
268 |
+
'anchor' is false, both of these will match.
|
269 |
+
|
270 |
+
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
271 |
+
(itself a pattern) and ending with 'pattern', with anything in between
|
272 |
+
them, will match. 'anchor' is ignored in this case.
|
273 |
+
|
274 |
+
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
275 |
+
'pattern' is assumed to be either a string containing a regex or a
|
276 |
+
regex object -- no translation is done, the regex is just compiled
|
277 |
+
and used as-is.
|
278 |
+
|
279 |
+
Selected strings will be added to self.files.
|
280 |
+
|
281 |
+
Return True if files are found.
|
282 |
+
"""
|
283 |
+
# XXX docstring lying about what the special chars are?
|
284 |
+
found = False
|
285 |
+
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
286 |
+
|
287 |
+
# delayed loading of allfiles list
|
288 |
+
if self.allfiles is None:
|
289 |
+
self.findall()
|
290 |
+
|
291 |
+
for name in self.allfiles:
|
292 |
+
if pattern_re.search(name):
|
293 |
+
self.files.add(name)
|
294 |
+
found = True
|
295 |
+
return found
|
296 |
+
|
297 |
+
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
|
298 |
+
is_regex=False):
|
299 |
+
"""Remove strings (presumably filenames) from 'files' that match
|
300 |
+
'pattern'.
|
301 |
+
|
302 |
+
Other parameters are the same as for 'include_pattern()', above.
|
303 |
+
The list 'self.files' is modified in place. Return True if files are
|
304 |
+
found.
|
305 |
+
|
306 |
+
This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
|
307 |
+
packaging source distributions
|
308 |
+
"""
|
309 |
+
found = False
|
310 |
+
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
311 |
+
for f in list(self.files):
|
312 |
+
if pattern_re.search(f):
|
313 |
+
self.files.remove(f)
|
314 |
+
found = True
|
315 |
+
return found
|
316 |
+
|
317 |
+
def _translate_pattern(self, pattern, anchor=True, prefix=None,
|
318 |
+
is_regex=False):
|
319 |
+
"""Translate a shell-like wildcard pattern to a compiled regular
|
320 |
+
expression.
|
321 |
+
|
322 |
+
Return the compiled regex. If 'is_regex' true,
|
323 |
+
then 'pattern' is directly compiled to a regex (if it's a string)
|
324 |
+
or just returned as-is (assumes it's a regex object).
|
325 |
+
"""
|
326 |
+
if is_regex:
|
327 |
+
if isinstance(pattern, str):
|
328 |
+
return re.compile(pattern)
|
329 |
+
else:
|
330 |
+
return pattern
|
331 |
+
|
332 |
+
if _PYTHON_VERSION > (3, 2):
|
333 |
+
# ditch start and end characters
|
334 |
+
start, _, end = self._glob_to_re('_').partition('_')
|
335 |
+
|
336 |
+
if pattern:
|
337 |
+
pattern_re = self._glob_to_re(pattern)
|
338 |
+
if _PYTHON_VERSION > (3, 2):
|
339 |
+
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
340 |
+
else:
|
341 |
+
pattern_re = ''
|
342 |
+
|
343 |
+
base = re.escape(os.path.join(self.base, ''))
|
344 |
+
if prefix is not None:
|
345 |
+
# ditch end of pattern character
|
346 |
+
if _PYTHON_VERSION <= (3, 2):
|
347 |
+
empty_pattern = self._glob_to_re('')
|
348 |
+
prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
|
349 |
+
else:
|
350 |
+
prefix_re = self._glob_to_re(prefix)
|
351 |
+
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
352 |
+
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
353 |
+
sep = os.sep
|
354 |
+
if os.sep == '\\':
|
355 |
+
sep = r'\\'
|
356 |
+
if _PYTHON_VERSION <= (3, 2):
|
357 |
+
pattern_re = '^' + base + sep.join((prefix_re,
|
358 |
+
'.*' + pattern_re))
|
359 |
+
else:
|
360 |
+
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
361 |
+
pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
|
362 |
+
pattern_re, end)
|
363 |
+
else: # no prefix -- respect anchor flag
|
364 |
+
if anchor:
|
365 |
+
if _PYTHON_VERSION <= (3, 2):
|
366 |
+
pattern_re = '^' + base + pattern_re
|
367 |
+
else:
|
368 |
+
pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
|
369 |
+
|
370 |
+
return re.compile(pattern_re)
|
371 |
+
|
372 |
+
def _glob_to_re(self, pattern):
|
373 |
+
"""Translate a shell-like glob pattern to a regular expression.
|
374 |
+
|
375 |
+
Return a string containing the regex. Differs from
|
376 |
+
'fnmatch.translate()' in that '*' does not match "special characters"
|
377 |
+
(which are platform-specific).
|
378 |
+
"""
|
379 |
+
pattern_re = fnmatch.translate(pattern)
|
380 |
+
|
381 |
+
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
382 |
+
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
383 |
+
# and by extension they shouldn't match such "special characters" under
|
384 |
+
# any OS. So change all non-escaped dots in the RE to match any
|
385 |
+
# character except the special characters (currently: just os.sep).
|
386 |
+
sep = os.sep
|
387 |
+
if os.sep == '\\':
|
388 |
+
# we're using a regex to manipulate a regex, so we need
|
389 |
+
# to escape the backslash twice
|
390 |
+
sep = r'\\\\'
|
391 |
+
escaped = r'\1[^%s]' % sep
|
392 |
+
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
393 |
+
return pattern_re
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/markers.py
ADDED
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2012-2017 Vinay Sajip.
|
4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
6 |
+
#
|
7 |
+
"""
|
8 |
+
Parser for the environment markers micro-language defined in PEP 508.
|
9 |
+
"""
|
10 |
+
|
11 |
+
# Note: In PEP 345, the micro-language was Python compatible, so the ast
|
12 |
+
# module could be used to parse it. However, PEP 508 introduced operators such
|
13 |
+
# as ~= and === which aren't in Python, necessitating a different approach.
|
14 |
+
|
15 |
+
import os
|
16 |
+
import re
|
17 |
+
import sys
|
18 |
+
import platform
|
19 |
+
|
20 |
+
from .compat import string_types
|
21 |
+
from .util import in_venv, parse_marker
|
22 |
+
from .version import NormalizedVersion as NV
|
23 |
+
|
24 |
+
__all__ = ['interpret']
|
25 |
+
|
26 |
+
_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")')
|
27 |
+
|
28 |
+
def _is_literal(o):
|
29 |
+
if not isinstance(o, string_types) or not o:
|
30 |
+
return False
|
31 |
+
return o[0] in '\'"'
|
32 |
+
|
33 |
+
def _get_versions(s):
|
34 |
+
result = []
|
35 |
+
for m in _VERSION_PATTERN.finditer(s):
|
36 |
+
result.append(NV(m.groups()[0]))
|
37 |
+
return set(result)
|
38 |
+
|
39 |
+
class Evaluator(object):
|
40 |
+
"""
|
41 |
+
This class is used to evaluate marker expessions.
|
42 |
+
"""
|
43 |
+
|
44 |
+
operations = {
|
45 |
+
'==': lambda x, y: x == y,
|
46 |
+
'===': lambda x, y: x == y,
|
47 |
+
'~=': lambda x, y: x == y or x > y,
|
48 |
+
'!=': lambda x, y: x != y,
|
49 |
+
'<': lambda x, y: x < y,
|
50 |
+
'<=': lambda x, y: x == y or x < y,
|
51 |
+
'>': lambda x, y: x > y,
|
52 |
+
'>=': lambda x, y: x == y or x > y,
|
53 |
+
'and': lambda x, y: x and y,
|
54 |
+
'or': lambda x, y: x or y,
|
55 |
+
'in': lambda x, y: x in y,
|
56 |
+
'not in': lambda x, y: x not in y,
|
57 |
+
}
|
58 |
+
|
59 |
+
def evaluate(self, expr, context):
|
60 |
+
"""
|
61 |
+
Evaluate a marker expression returned by the :func:`parse_requirement`
|
62 |
+
function in the specified context.
|
63 |
+
"""
|
64 |
+
if isinstance(expr, string_types):
|
65 |
+
if expr[0] in '\'"':
|
66 |
+
result = expr[1:-1]
|
67 |
+
else:
|
68 |
+
if expr not in context:
|
69 |
+
raise SyntaxError('unknown variable: %s' % expr)
|
70 |
+
result = context[expr]
|
71 |
+
else:
|
72 |
+
assert isinstance(expr, dict)
|
73 |
+
op = expr['op']
|
74 |
+
if op not in self.operations:
|
75 |
+
raise NotImplementedError('op not implemented: %s' % op)
|
76 |
+
elhs = expr['lhs']
|
77 |
+
erhs = expr['rhs']
|
78 |
+
if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
|
79 |
+
raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
|
80 |
+
|
81 |
+
lhs = self.evaluate(elhs, context)
|
82 |
+
rhs = self.evaluate(erhs, context)
|
83 |
+
if ((elhs == 'python_version' or erhs == 'python_version') and
|
84 |
+
op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
|
85 |
+
lhs = NV(lhs)
|
86 |
+
rhs = NV(rhs)
|
87 |
+
elif elhs == 'python_version' and op in ('in', 'not in'):
|
88 |
+
lhs = NV(lhs)
|
89 |
+
rhs = _get_versions(rhs)
|
90 |
+
result = self.operations[op](lhs, rhs)
|
91 |
+
return result
|
92 |
+
|
93 |
+
_DIGITS = re.compile(r'\d+\.\d+')
|
94 |
+
|
95 |
+
def default_context():
|
96 |
+
def format_full_version(info):
|
97 |
+
version = '%s.%s.%s' % (info.major, info.minor, info.micro)
|
98 |
+
kind = info.releaselevel
|
99 |
+
if kind != 'final':
|
100 |
+
version += kind[0] + str(info.serial)
|
101 |
+
return version
|
102 |
+
|
103 |
+
if hasattr(sys, 'implementation'):
|
104 |
+
implementation_version = format_full_version(sys.implementation.version)
|
105 |
+
implementation_name = sys.implementation.name
|
106 |
+
else:
|
107 |
+
implementation_version = '0'
|
108 |
+
implementation_name = ''
|
109 |
+
|
110 |
+
ppv = platform.python_version()
|
111 |
+
m = _DIGITS.match(ppv)
|
112 |
+
pv = m.group(0)
|
113 |
+
result = {
|
114 |
+
'implementation_name': implementation_name,
|
115 |
+
'implementation_version': implementation_version,
|
116 |
+
'os_name': os.name,
|
117 |
+
'platform_machine': platform.machine(),
|
118 |
+
'platform_python_implementation': platform.python_implementation(),
|
119 |
+
'platform_release': platform.release(),
|
120 |
+
'platform_system': platform.system(),
|
121 |
+
'platform_version': platform.version(),
|
122 |
+
'platform_in_venv': str(in_venv()),
|
123 |
+
'python_full_version': ppv,
|
124 |
+
'python_version': pv,
|
125 |
+
'sys_platform': sys.platform,
|
126 |
+
}
|
127 |
+
return result
|
128 |
+
|
129 |
+
DEFAULT_CONTEXT = default_context()
|
130 |
+
del default_context
|
131 |
+
|
132 |
+
evaluator = Evaluator()
|
133 |
+
|
134 |
+
def interpret(marker, execution_context=None):
|
135 |
+
"""
|
136 |
+
Interpret a marker and return a result depending on environment.
|
137 |
+
|
138 |
+
:param marker: The marker to interpret.
|
139 |
+
:type marker: str
|
140 |
+
:param execution_context: The context used for name lookup.
|
141 |
+
:type execution_context: mapping
|
142 |
+
"""
|
143 |
+
try:
|
144 |
+
expr, rest = parse_marker(marker)
|
145 |
+
except Exception as e:
|
146 |
+
raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
|
147 |
+
if rest and rest[0] != '#':
|
148 |
+
raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
|
149 |
+
context = dict(DEFAULT_CONTEXT)
|
150 |
+
if execution_context:
|
151 |
+
context.update(execution_context)
|
152 |
+
return evaluator.evaluate(expr, context)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/metadata.py
ADDED
@@ -0,0 +1,1058 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2012 The Python Software Foundation.
|
4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
5 |
+
#
|
6 |
+
"""Implementation of the Metadata for Python packages PEPs.
|
7 |
+
|
8 |
+
Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0).
|
9 |
+
"""
|
10 |
+
from __future__ import unicode_literals
|
11 |
+
|
12 |
+
import codecs
|
13 |
+
from email import message_from_file
|
14 |
+
import json
|
15 |
+
import logging
|
16 |
+
import re
|
17 |
+
|
18 |
+
|
19 |
+
from . import DistlibException, __version__
|
20 |
+
from .compat import StringIO, string_types, text_type
|
21 |
+
from .markers import interpret
|
22 |
+
from .util import extract_by_key, get_extras
|
23 |
+
from .version import get_scheme, PEP440_VERSION_RE
|
24 |
+
|
25 |
+
logger = logging.getLogger(__name__)
|
26 |
+
|
27 |
+
|
28 |
+
class MetadataMissingError(DistlibException):
|
29 |
+
"""A required metadata is missing"""
|
30 |
+
|
31 |
+
|
32 |
+
class MetadataConflictError(DistlibException):
|
33 |
+
"""Attempt to read or write metadata fields that are conflictual."""
|
34 |
+
|
35 |
+
|
36 |
+
class MetadataUnrecognizedVersionError(DistlibException):
|
37 |
+
"""Unknown metadata version number."""
|
38 |
+
|
39 |
+
|
40 |
+
class MetadataInvalidError(DistlibException):
|
41 |
+
"""A metadata value is invalid"""
|
42 |
+
|
43 |
+
# public API of this module
|
44 |
+
__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
|
45 |
+
|
46 |
+
# Encoding used for the PKG-INFO files
|
47 |
+
PKG_INFO_ENCODING = 'utf-8'
|
48 |
+
|
49 |
+
# preferred version. Hopefully will be changed
|
50 |
+
# to 1.2 once PEP 345 is supported everywhere
|
51 |
+
PKG_INFO_PREFERRED_VERSION = '1.1'
|
52 |
+
|
53 |
+
_LINE_PREFIX_1_2 = re.compile('\n \\|')
|
54 |
+
_LINE_PREFIX_PRE_1_2 = re.compile('\n ')
|
55 |
+
_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
56 |
+
'Summary', 'Description',
|
57 |
+
'Keywords', 'Home-page', 'Author', 'Author-email',
|
58 |
+
'License')
|
59 |
+
|
60 |
+
_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
61 |
+
'Supported-Platform', 'Summary', 'Description',
|
62 |
+
'Keywords', 'Home-page', 'Author', 'Author-email',
|
63 |
+
'License', 'Classifier', 'Download-URL', 'Obsoletes',
|
64 |
+
'Provides', 'Requires')
|
65 |
+
|
66 |
+
_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
|
67 |
+
'Download-URL')
|
68 |
+
|
69 |
+
_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
70 |
+
'Supported-Platform', 'Summary', 'Description',
|
71 |
+
'Keywords', 'Home-page', 'Author', 'Author-email',
|
72 |
+
'Maintainer', 'Maintainer-email', 'License',
|
73 |
+
'Classifier', 'Download-URL', 'Obsoletes-Dist',
|
74 |
+
'Project-URL', 'Provides-Dist', 'Requires-Dist',
|
75 |
+
'Requires-Python', 'Requires-External')
|
76 |
+
|
77 |
+
_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
|
78 |
+
'Obsoletes-Dist', 'Requires-External', 'Maintainer',
|
79 |
+
'Maintainer-email', 'Project-URL')
|
80 |
+
|
81 |
+
_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
82 |
+
'Supported-Platform', 'Summary', 'Description',
|
83 |
+
'Keywords', 'Home-page', 'Author', 'Author-email',
|
84 |
+
'Maintainer', 'Maintainer-email', 'License',
|
85 |
+
'Classifier', 'Download-URL', 'Obsoletes-Dist',
|
86 |
+
'Project-URL', 'Provides-Dist', 'Requires-Dist',
|
87 |
+
'Requires-Python', 'Requires-External', 'Private-Version',
|
88 |
+
'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
|
89 |
+
'Provides-Extra')
|
90 |
+
|
91 |
+
_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
|
92 |
+
'Setup-Requires-Dist', 'Extension')
|
93 |
+
|
94 |
+
# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
|
95 |
+
# the metadata. Include them in the tuple literal below to allow them
|
96 |
+
# (for now).
|
97 |
+
# Ditto for Obsoletes - see issue #140.
|
98 |
+
_566_FIELDS = _426_FIELDS + ('Description-Content-Type',
|
99 |
+
'Requires', 'Provides', 'Obsoletes')
|
100 |
+
|
101 |
+
_566_MARKERS = ('Description-Content-Type',)
|
102 |
+
|
103 |
+
_ALL_FIELDS = set()
|
104 |
+
_ALL_FIELDS.update(_241_FIELDS)
|
105 |
+
_ALL_FIELDS.update(_314_FIELDS)
|
106 |
+
_ALL_FIELDS.update(_345_FIELDS)
|
107 |
+
_ALL_FIELDS.update(_426_FIELDS)
|
108 |
+
_ALL_FIELDS.update(_566_FIELDS)
|
109 |
+
|
110 |
+
EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
|
111 |
+
|
112 |
+
|
113 |
+
def _version2fieldlist(version):
|
114 |
+
if version == '1.0':
|
115 |
+
return _241_FIELDS
|
116 |
+
elif version == '1.1':
|
117 |
+
return _314_FIELDS
|
118 |
+
elif version == '1.2':
|
119 |
+
return _345_FIELDS
|
120 |
+
elif version in ('1.3', '2.1'):
|
121 |
+
# avoid adding field names if already there
|
122 |
+
return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS)
|
123 |
+
elif version == '2.0':
|
124 |
+
return _426_FIELDS
|
125 |
+
raise MetadataUnrecognizedVersionError(version)
|
126 |
+
|
127 |
+
|
128 |
+
def _best_version(fields):
|
129 |
+
"""Detect the best version depending on the fields used."""
|
130 |
+
def _has_marker(keys, markers):
|
131 |
+
for marker in markers:
|
132 |
+
if marker in keys:
|
133 |
+
return True
|
134 |
+
return False
|
135 |
+
|
136 |
+
keys = []
|
137 |
+
for key, value in fields.items():
|
138 |
+
if value in ([], 'UNKNOWN', None):
|
139 |
+
continue
|
140 |
+
keys.append(key)
|
141 |
+
|
142 |
+
possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1']
|
143 |
+
|
144 |
+
# first let's try to see if a field is not part of one of the version
|
145 |
+
for key in keys:
|
146 |
+
if key not in _241_FIELDS and '1.0' in possible_versions:
|
147 |
+
possible_versions.remove('1.0')
|
148 |
+
logger.debug('Removed 1.0 due to %s', key)
|
149 |
+
if key not in _314_FIELDS and '1.1' in possible_versions:
|
150 |
+
possible_versions.remove('1.1')
|
151 |
+
logger.debug('Removed 1.1 due to %s', key)
|
152 |
+
if key not in _345_FIELDS and '1.2' in possible_versions:
|
153 |
+
possible_versions.remove('1.2')
|
154 |
+
logger.debug('Removed 1.2 due to %s', key)
|
155 |
+
if key not in _566_FIELDS and '1.3' in possible_versions:
|
156 |
+
possible_versions.remove('1.3')
|
157 |
+
logger.debug('Removed 1.3 due to %s', key)
|
158 |
+
if key not in _566_FIELDS and '2.1' in possible_versions:
|
159 |
+
if key != 'Description': # In 2.1, description allowed after headers
|
160 |
+
possible_versions.remove('2.1')
|
161 |
+
logger.debug('Removed 2.1 due to %s', key)
|
162 |
+
if key not in _426_FIELDS and '2.0' in possible_versions:
|
163 |
+
possible_versions.remove('2.0')
|
164 |
+
logger.debug('Removed 2.0 due to %s', key)
|
165 |
+
|
166 |
+
# possible_version contains qualified versions
|
167 |
+
if len(possible_versions) == 1:
|
168 |
+
return possible_versions[0] # found !
|
169 |
+
elif len(possible_versions) == 0:
|
170 |
+
logger.debug('Out of options - unknown metadata set: %s', fields)
|
171 |
+
raise MetadataConflictError('Unknown metadata set')
|
172 |
+
|
173 |
+
# let's see if one unique marker is found
|
174 |
+
is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
|
175 |
+
is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
|
176 |
+
is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
|
177 |
+
is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
|
178 |
+
if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1:
|
179 |
+
raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields')
|
180 |
+
|
181 |
+
# we have the choice, 1.0, or 1.2, or 2.0
|
182 |
+
# - 1.0 has a broken Summary field but works with all tools
|
183 |
+
# - 1.1 is to avoid
|
184 |
+
# - 1.2 fixes Summary but has little adoption
|
185 |
+
# - 2.0 adds more features and is very new
|
186 |
+
if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0:
|
187 |
+
# we couldn't find any specific marker
|
188 |
+
if PKG_INFO_PREFERRED_VERSION in possible_versions:
|
189 |
+
return PKG_INFO_PREFERRED_VERSION
|
190 |
+
if is_1_1:
|
191 |
+
return '1.1'
|
192 |
+
if is_1_2:
|
193 |
+
return '1.2'
|
194 |
+
if is_2_1:
|
195 |
+
return '2.1'
|
196 |
+
|
197 |
+
return '2.0'
|
198 |
+
|
199 |
+
# This follows the rules about transforming keys as described in
|
200 |
+
# https://www.python.org/dev/peps/pep-0566/#id17
|
201 |
+
_ATTR2FIELD = {
|
202 |
+
name.lower().replace("-", "_"): name for name in _ALL_FIELDS
|
203 |
+
}
|
204 |
+
_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()}
|
205 |
+
|
206 |
+
_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
|
207 |
+
_VERSIONS_FIELDS = ('Requires-Python',)
|
208 |
+
_VERSION_FIELDS = ('Version',)
|
209 |
+
_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
|
210 |
+
'Requires', 'Provides', 'Obsoletes-Dist',
|
211 |
+
'Provides-Dist', 'Requires-Dist', 'Requires-External',
|
212 |
+
'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
|
213 |
+
'Provides-Extra', 'Extension')
|
214 |
+
_LISTTUPLEFIELDS = ('Project-URL',)
|
215 |
+
|
216 |
+
_ELEMENTSFIELD = ('Keywords',)
|
217 |
+
|
218 |
+
_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
|
219 |
+
|
220 |
+
_MISSING = object()
|
221 |
+
|
222 |
+
_FILESAFE = re.compile('[^A-Za-z0-9.]+')
|
223 |
+
|
224 |
+
|
225 |
+
def _get_name_and_version(name, version, for_filename=False):
|
226 |
+
"""Return the distribution name with version.
|
227 |
+
|
228 |
+
If for_filename is true, return a filename-escaped form."""
|
229 |
+
if for_filename:
|
230 |
+
# For both name and version any runs of non-alphanumeric or '.'
|
231 |
+
# characters are replaced with a single '-'. Additionally any
|
232 |
+
# spaces in the version string become '.'
|
233 |
+
name = _FILESAFE.sub('-', name)
|
234 |
+
version = _FILESAFE.sub('-', version.replace(' ', '.'))
|
235 |
+
return '%s-%s' % (name, version)
|
236 |
+
|
237 |
+
|
238 |
+
class LegacyMetadata(object):
|
239 |
+
"""The legacy metadata of a release.
|
240 |
+
|
241 |
+
Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can
|
242 |
+
instantiate the class with one of these arguments (or none):
|
243 |
+
- *path*, the path to a metadata file
|
244 |
+
- *fileobj* give a file-like object with metadata as content
|
245 |
+
- *mapping* is a dict-like object
|
246 |
+
- *scheme* is a version scheme name
|
247 |
+
"""
|
248 |
+
# TODO document the mapping API and UNKNOWN default key
|
249 |
+
|
250 |
+
def __init__(self, path=None, fileobj=None, mapping=None,
|
251 |
+
scheme='default'):
|
252 |
+
if [path, fileobj, mapping].count(None) < 2:
|
253 |
+
raise TypeError('path, fileobj and mapping are exclusive')
|
254 |
+
self._fields = {}
|
255 |
+
self.requires_files = []
|
256 |
+
self._dependencies = None
|
257 |
+
self.scheme = scheme
|
258 |
+
if path is not None:
|
259 |
+
self.read(path)
|
260 |
+
elif fileobj is not None:
|
261 |
+
self.read_file(fileobj)
|
262 |
+
elif mapping is not None:
|
263 |
+
self.update(mapping)
|
264 |
+
self.set_metadata_version()
|
265 |
+
|
266 |
+
def set_metadata_version(self):
|
267 |
+
self._fields['Metadata-Version'] = _best_version(self._fields)
|
268 |
+
|
269 |
+
def _write_field(self, fileobj, name, value):
|
270 |
+
fileobj.write('%s: %s\n' % (name, value))
|
271 |
+
|
272 |
+
def __getitem__(self, name):
|
273 |
+
return self.get(name)
|
274 |
+
|
275 |
+
def __setitem__(self, name, value):
|
276 |
+
return self.set(name, value)
|
277 |
+
|
278 |
+
def __delitem__(self, name):
|
279 |
+
field_name = self._convert_name(name)
|
280 |
+
try:
|
281 |
+
del self._fields[field_name]
|
282 |
+
except KeyError:
|
283 |
+
raise KeyError(name)
|
284 |
+
|
285 |
+
def __contains__(self, name):
|
286 |
+
return (name in self._fields or
|
287 |
+
self._convert_name(name) in self._fields)
|
288 |
+
|
289 |
+
def _convert_name(self, name):
|
290 |
+
if name in _ALL_FIELDS:
|
291 |
+
return name
|
292 |
+
name = name.replace('-', '_').lower()
|
293 |
+
return _ATTR2FIELD.get(name, name)
|
294 |
+
|
295 |
+
def _default_value(self, name):
|
296 |
+
if name in _LISTFIELDS or name in _ELEMENTSFIELD:
|
297 |
+
return []
|
298 |
+
return 'UNKNOWN'
|
299 |
+
|
300 |
+
def _remove_line_prefix(self, value):
|
301 |
+
if self.metadata_version in ('1.0', '1.1'):
|
302 |
+
return _LINE_PREFIX_PRE_1_2.sub('\n', value)
|
303 |
+
else:
|
304 |
+
return _LINE_PREFIX_1_2.sub('\n', value)
|
305 |
+
|
306 |
+
def __getattr__(self, name):
|
307 |
+
if name in _ATTR2FIELD:
|
308 |
+
return self[name]
|
309 |
+
raise AttributeError(name)
|
310 |
+
|
311 |
+
#
|
312 |
+
# Public API
|
313 |
+
#
|
314 |
+
|
315 |
+
# dependencies = property(_get_dependencies, _set_dependencies)
|
316 |
+
|
317 |
+
def get_fullname(self, filesafe=False):
|
318 |
+
"""Return the distribution name with version.
|
319 |
+
|
320 |
+
If filesafe is true, return a filename-escaped form."""
|
321 |
+
return _get_name_and_version(self['Name'], self['Version'], filesafe)
|
322 |
+
|
323 |
+
def is_field(self, name):
|
324 |
+
"""return True if name is a valid metadata key"""
|
325 |
+
name = self._convert_name(name)
|
326 |
+
return name in _ALL_FIELDS
|
327 |
+
|
328 |
+
def is_multi_field(self, name):
|
329 |
+
name = self._convert_name(name)
|
330 |
+
return name in _LISTFIELDS
|
331 |
+
|
332 |
+
def read(self, filepath):
|
333 |
+
"""Read the metadata values from a file path."""
|
334 |
+
fp = codecs.open(filepath, 'r', encoding='utf-8')
|
335 |
+
try:
|
336 |
+
self.read_file(fp)
|
337 |
+
finally:
|
338 |
+
fp.close()
|
339 |
+
|
340 |
+
def read_file(self, fileob):
|
341 |
+
"""Read the metadata values from a file object."""
|
342 |
+
msg = message_from_file(fileob)
|
343 |
+
self._fields['Metadata-Version'] = msg['metadata-version']
|
344 |
+
|
345 |
+
# When reading, get all the fields we can
|
346 |
+
for field in _ALL_FIELDS:
|
347 |
+
if field not in msg:
|
348 |
+
continue
|
349 |
+
if field in _LISTFIELDS:
|
350 |
+
# we can have multiple lines
|
351 |
+
values = msg.get_all(field)
|
352 |
+
if field in _LISTTUPLEFIELDS and values is not None:
|
353 |
+
values = [tuple(value.split(',')) for value in values]
|
354 |
+
self.set(field, values)
|
355 |
+
else:
|
356 |
+
# single line
|
357 |
+
value = msg[field]
|
358 |
+
if value is not None and value != 'UNKNOWN':
|
359 |
+
self.set(field, value)
|
360 |
+
|
361 |
+
# PEP 566 specifies that the body be used for the description, if
|
362 |
+
# available
|
363 |
+
body = msg.get_payload()
|
364 |
+
self["Description"] = body if body else self["Description"]
|
365 |
+
# logger.debug('Attempting to set metadata for %s', self)
|
366 |
+
# self.set_metadata_version()
|
367 |
+
|
368 |
+
def write(self, filepath, skip_unknown=False):
|
369 |
+
"""Write the metadata fields to filepath."""
|
370 |
+
fp = codecs.open(filepath, 'w', encoding='utf-8')
|
371 |
+
try:
|
372 |
+
self.write_file(fp, skip_unknown)
|
373 |
+
finally:
|
374 |
+
fp.close()
|
375 |
+
|
376 |
+
def write_file(self, fileobject, skip_unknown=False):
|
377 |
+
"""Write the PKG-INFO format data to a file object."""
|
378 |
+
self.set_metadata_version()
|
379 |
+
|
380 |
+
for field in _version2fieldlist(self['Metadata-Version']):
|
381 |
+
values = self.get(field)
|
382 |
+
if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
|
383 |
+
continue
|
384 |
+
if field in _ELEMENTSFIELD:
|
385 |
+
self._write_field(fileobject, field, ','.join(values))
|
386 |
+
continue
|
387 |
+
if field not in _LISTFIELDS:
|
388 |
+
if field == 'Description':
|
389 |
+
if self.metadata_version in ('1.0', '1.1'):
|
390 |
+
values = values.replace('\n', '\n ')
|
391 |
+
else:
|
392 |
+
values = values.replace('\n', '\n |')
|
393 |
+
values = [values]
|
394 |
+
|
395 |
+
if field in _LISTTUPLEFIELDS:
|
396 |
+
values = [','.join(value) for value in values]
|
397 |
+
|
398 |
+
for value in values:
|
399 |
+
self._write_field(fileobject, field, value)
|
400 |
+
|
401 |
+
def update(self, other=None, **kwargs):
|
402 |
+
"""Set metadata values from the given iterable `other` and kwargs.
|
403 |
+
|
404 |
+
Behavior is like `dict.update`: If `other` has a ``keys`` method,
|
405 |
+
they are looped over and ``self[key]`` is assigned ``other[key]``.
|
406 |
+
Else, ``other`` is an iterable of ``(key, value)`` iterables.
|
407 |
+
|
408 |
+
Keys that don't match a metadata field or that have an empty value are
|
409 |
+
dropped.
|
410 |
+
"""
|
411 |
+
def _set(key, value):
|
412 |
+
if key in _ATTR2FIELD and value:
|
413 |
+
self.set(self._convert_name(key), value)
|
414 |
+
|
415 |
+
if not other:
|
416 |
+
# other is None or empty container
|
417 |
+
pass
|
418 |
+
elif hasattr(other, 'keys'):
|
419 |
+
for k in other.keys():
|
420 |
+
_set(k, other[k])
|
421 |
+
else:
|
422 |
+
for k, v in other:
|
423 |
+
_set(k, v)
|
424 |
+
|
425 |
+
if kwargs:
|
426 |
+
for k, v in kwargs.items():
|
427 |
+
_set(k, v)
|
428 |
+
|
429 |
+
def set(self, name, value):
|
430 |
+
"""Control then set a metadata field."""
|
431 |
+
name = self._convert_name(name)
|
432 |
+
|
433 |
+
if ((name in _ELEMENTSFIELD or name == 'Platform') and
|
434 |
+
not isinstance(value, (list, tuple))):
|
435 |
+
if isinstance(value, string_types):
|
436 |
+
value = [v.strip() for v in value.split(',')]
|
437 |
+
else:
|
438 |
+
value = []
|
439 |
+
elif (name in _LISTFIELDS and
|
440 |
+
not isinstance(value, (list, tuple))):
|
441 |
+
if isinstance(value, string_types):
|
442 |
+
value = [value]
|
443 |
+
else:
|
444 |
+
value = []
|
445 |
+
|
446 |
+
if logger.isEnabledFor(logging.WARNING):
|
447 |
+
project_name = self['Name']
|
448 |
+
|
449 |
+
scheme = get_scheme(self.scheme)
|
450 |
+
if name in _PREDICATE_FIELDS and value is not None:
|
451 |
+
for v in value:
|
452 |
+
# check that the values are valid
|
453 |
+
if not scheme.is_valid_matcher(v.split(';')[0]):
|
454 |
+
logger.warning(
|
455 |
+
"'%s': '%s' is not valid (field '%s')",
|
456 |
+
project_name, v, name)
|
457 |
+
# FIXME this rejects UNKNOWN, is that right?
|
458 |
+
elif name in _VERSIONS_FIELDS and value is not None:
|
459 |
+
if not scheme.is_valid_constraint_list(value):
|
460 |
+
logger.warning("'%s': '%s' is not a valid version (field '%s')",
|
461 |
+
project_name, value, name)
|
462 |
+
elif name in _VERSION_FIELDS and value is not None:
|
463 |
+
if not scheme.is_valid_version(value):
|
464 |
+
logger.warning("'%s': '%s' is not a valid version (field '%s')",
|
465 |
+
project_name, value, name)
|
466 |
+
|
467 |
+
if name in _UNICODEFIELDS:
|
468 |
+
if name == 'Description':
|
469 |
+
value = self._remove_line_prefix(value)
|
470 |
+
|
471 |
+
self._fields[name] = value
|
472 |
+
|
473 |
+
def get(self, name, default=_MISSING):
|
474 |
+
"""Get a metadata field."""
|
475 |
+
name = self._convert_name(name)
|
476 |
+
if name not in self._fields:
|
477 |
+
if default is _MISSING:
|
478 |
+
default = self._default_value(name)
|
479 |
+
return default
|
480 |
+
if name in _UNICODEFIELDS:
|
481 |
+
value = self._fields[name]
|
482 |
+
return value
|
483 |
+
elif name in _LISTFIELDS:
|
484 |
+
value = self._fields[name]
|
485 |
+
if value is None:
|
486 |
+
return []
|
487 |
+
res = []
|
488 |
+
for val in value:
|
489 |
+
if name not in _LISTTUPLEFIELDS:
|
490 |
+
res.append(val)
|
491 |
+
else:
|
492 |
+
# That's for Project-URL
|
493 |
+
res.append((val[0], val[1]))
|
494 |
+
return res
|
495 |
+
|
496 |
+
elif name in _ELEMENTSFIELD:
|
497 |
+
value = self._fields[name]
|
498 |
+
if isinstance(value, string_types):
|
499 |
+
return value.split(',')
|
500 |
+
return self._fields[name]
|
501 |
+
|
502 |
+
def check(self, strict=False):
|
503 |
+
"""Check if the metadata is compliant. If strict is True then raise if
|
504 |
+
no Name or Version are provided"""
|
505 |
+
self.set_metadata_version()
|
506 |
+
|
507 |
+
# XXX should check the versions (if the file was loaded)
|
508 |
+
missing, warnings = [], []
|
509 |
+
|
510 |
+
for attr in ('Name', 'Version'): # required by PEP 345
|
511 |
+
if attr not in self:
|
512 |
+
missing.append(attr)
|
513 |
+
|
514 |
+
if strict and missing != []:
|
515 |
+
msg = 'missing required metadata: %s' % ', '.join(missing)
|
516 |
+
raise MetadataMissingError(msg)
|
517 |
+
|
518 |
+
for attr in ('Home-page', 'Author'):
|
519 |
+
if attr not in self:
|
520 |
+
missing.append(attr)
|
521 |
+
|
522 |
+
# checking metadata 1.2 (XXX needs to check 1.1, 1.0)
|
523 |
+
if self['Metadata-Version'] != '1.2':
|
524 |
+
return missing, warnings
|
525 |
+
|
526 |
+
scheme = get_scheme(self.scheme)
|
527 |
+
|
528 |
+
def are_valid_constraints(value):
|
529 |
+
for v in value:
|
530 |
+
if not scheme.is_valid_matcher(v.split(';')[0]):
|
531 |
+
return False
|
532 |
+
return True
|
533 |
+
|
534 |
+
for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
|
535 |
+
(_VERSIONS_FIELDS,
|
536 |
+
scheme.is_valid_constraint_list),
|
537 |
+
(_VERSION_FIELDS,
|
538 |
+
scheme.is_valid_version)):
|
539 |
+
for field in fields:
|
540 |
+
value = self.get(field, None)
|
541 |
+
if value is not None and not controller(value):
|
542 |
+
warnings.append("Wrong value for '%s': %s" % (field, value))
|
543 |
+
|
544 |
+
return missing, warnings
|
545 |
+
|
546 |
+
def todict(self, skip_missing=False):
|
547 |
+
"""Return fields as a dict.
|
548 |
+
|
549 |
+
Field names will be converted to use the underscore-lowercase style
|
550 |
+
instead of hyphen-mixed case (i.e. home_page instead of Home-page).
|
551 |
+
This is as per https://www.python.org/dev/peps/pep-0566/#id17.
|
552 |
+
"""
|
553 |
+
self.set_metadata_version()
|
554 |
+
|
555 |
+
fields = _version2fieldlist(self['Metadata-Version'])
|
556 |
+
|
557 |
+
data = {}
|
558 |
+
|
559 |
+
for field_name in fields:
|
560 |
+
if not skip_missing or field_name in self._fields:
|
561 |
+
key = _FIELD2ATTR[field_name]
|
562 |
+
if key != 'project_url':
|
563 |
+
data[key] = self[field_name]
|
564 |
+
else:
|
565 |
+
data[key] = [','.join(u) for u in self[field_name]]
|
566 |
+
|
567 |
+
return data
|
568 |
+
|
569 |
+
def add_requirements(self, requirements):
|
570 |
+
if self['Metadata-Version'] == '1.1':
|
571 |
+
# we can't have 1.1 metadata *and* Setuptools requires
|
572 |
+
for field in ('Obsoletes', 'Requires', 'Provides'):
|
573 |
+
if field in self:
|
574 |
+
del self[field]
|
575 |
+
self['Requires-Dist'] += requirements
|
576 |
+
|
577 |
+
# Mapping API
|
578 |
+
# TODO could add iter* variants
|
579 |
+
|
580 |
+
def keys(self):
|
581 |
+
return list(_version2fieldlist(self['Metadata-Version']))
|
582 |
+
|
583 |
+
def __iter__(self):
|
584 |
+
for key in self.keys():
|
585 |
+
yield key
|
586 |
+
|
587 |
+
def values(self):
|
588 |
+
return [self[key] for key in self.keys()]
|
589 |
+
|
590 |
+
def items(self):
|
591 |
+
return [(key, self[key]) for key in self.keys()]
|
592 |
+
|
593 |
+
def __repr__(self):
|
594 |
+
return '<%s %s %s>' % (self.__class__.__name__, self.name,
|
595 |
+
self.version)
|
596 |
+
|
597 |
+
|
598 |
+
METADATA_FILENAME = 'pydist.json'
|
599 |
+
WHEEL_METADATA_FILENAME = 'metadata.json'
|
600 |
+
LEGACY_METADATA_FILENAME = 'METADATA'
|
601 |
+
|
602 |
+
|
603 |
+
class Metadata(object):
|
604 |
+
"""
|
605 |
+
The metadata of a release. This implementation uses 2.0 (JSON)
|
606 |
+
metadata where possible. If not possible, it wraps a LegacyMetadata
|
607 |
+
instance which handles the key-value metadata format.
|
608 |
+
"""
|
609 |
+
|
610 |
+
METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
|
611 |
+
|
612 |
+
NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
|
613 |
+
|
614 |
+
VERSION_MATCHER = PEP440_VERSION_RE
|
615 |
+
|
616 |
+
SUMMARY_MATCHER = re.compile('.{1,2047}')
|
617 |
+
|
618 |
+
METADATA_VERSION = '2.0'
|
619 |
+
|
620 |
+
GENERATOR = 'distlib (%s)' % __version__
|
621 |
+
|
622 |
+
MANDATORY_KEYS = {
|
623 |
+
'name': (),
|
624 |
+
'version': (),
|
625 |
+
'summary': ('legacy',),
|
626 |
+
}
|
627 |
+
|
628 |
+
INDEX_KEYS = ('name version license summary description author '
|
629 |
+
'author_email keywords platform home_page classifiers '
|
630 |
+
'download_url')
|
631 |
+
|
632 |
+
DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
|
633 |
+
'dev_requires provides meta_requires obsoleted_by '
|
634 |
+
'supports_environments')
|
635 |
+
|
636 |
+
SYNTAX_VALIDATORS = {
|
637 |
+
'metadata_version': (METADATA_VERSION_MATCHER, ()),
|
638 |
+
'name': (NAME_MATCHER, ('legacy',)),
|
639 |
+
'version': (VERSION_MATCHER, ('legacy',)),
|
640 |
+
'summary': (SUMMARY_MATCHER, ('legacy',)),
|
641 |
+
}
|
642 |
+
|
643 |
+
__slots__ = ('_legacy', '_data', 'scheme')
|
644 |
+
|
645 |
+
def __init__(self, path=None, fileobj=None, mapping=None,
|
646 |
+
scheme='default'):
|
647 |
+
if [path, fileobj, mapping].count(None) < 2:
|
648 |
+
raise TypeError('path, fileobj and mapping are exclusive')
|
649 |
+
self._legacy = None
|
650 |
+
self._data = None
|
651 |
+
self.scheme = scheme
|
652 |
+
#import pdb; pdb.set_trace()
|
653 |
+
if mapping is not None:
|
654 |
+
try:
|
655 |
+
self._validate_mapping(mapping, scheme)
|
656 |
+
self._data = mapping
|
657 |
+
except MetadataUnrecognizedVersionError:
|
658 |
+
self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
|
659 |
+
self.validate()
|
660 |
+
else:
|
661 |
+
data = None
|
662 |
+
if path:
|
663 |
+
with open(path, 'rb') as f:
|
664 |
+
data = f.read()
|
665 |
+
elif fileobj:
|
666 |
+
data = fileobj.read()
|
667 |
+
if data is None:
|
668 |
+
# Initialised with no args - to be added
|
669 |
+
self._data = {
|
670 |
+
'metadata_version': self.METADATA_VERSION,
|
671 |
+
'generator': self.GENERATOR,
|
672 |
+
}
|
673 |
+
else:
|
674 |
+
if not isinstance(data, text_type):
|
675 |
+
data = data.decode('utf-8')
|
676 |
+
try:
|
677 |
+
self._data = json.loads(data)
|
678 |
+
self._validate_mapping(self._data, scheme)
|
679 |
+
except ValueError:
|
680 |
+
# Note: MetadataUnrecognizedVersionError does not
|
681 |
+
# inherit from ValueError (it's a DistlibException,
|
682 |
+
# which should not inherit from ValueError).
|
683 |
+
# The ValueError comes from the json.load - if that
|
684 |
+
# succeeds and we get a validation error, we want
|
685 |
+
# that to propagate
|
686 |
+
self._legacy = LegacyMetadata(fileobj=StringIO(data),
|
687 |
+
scheme=scheme)
|
688 |
+
self.validate()
|
689 |
+
|
690 |
+
common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
|
691 |
+
|
692 |
+
none_list = (None, list)
|
693 |
+
none_dict = (None, dict)
|
694 |
+
|
695 |
+
mapped_keys = {
|
696 |
+
'run_requires': ('Requires-Dist', list),
|
697 |
+
'build_requires': ('Setup-Requires-Dist', list),
|
698 |
+
'dev_requires': none_list,
|
699 |
+
'test_requires': none_list,
|
700 |
+
'meta_requires': none_list,
|
701 |
+
'extras': ('Provides-Extra', list),
|
702 |
+
'modules': none_list,
|
703 |
+
'namespaces': none_list,
|
704 |
+
'exports': none_dict,
|
705 |
+
'commands': none_dict,
|
706 |
+
'classifiers': ('Classifier', list),
|
707 |
+
'source_url': ('Download-URL', None),
|
708 |
+
'metadata_version': ('Metadata-Version', None),
|
709 |
+
}
|
710 |
+
|
711 |
+
del none_list, none_dict
|
712 |
+
|
713 |
+
def __getattribute__(self, key):
|
714 |
+
common = object.__getattribute__(self, 'common_keys')
|
715 |
+
mapped = object.__getattribute__(self, 'mapped_keys')
|
716 |
+
if key in mapped:
|
717 |
+
lk, maker = mapped[key]
|
718 |
+
if self._legacy:
|
719 |
+
if lk is None:
|
720 |
+
result = None if maker is None else maker()
|
721 |
+
else:
|
722 |
+
result = self._legacy.get(lk)
|
723 |
+
else:
|
724 |
+
value = None if maker is None else maker()
|
725 |
+
if key not in ('commands', 'exports', 'modules', 'namespaces',
|
726 |
+
'classifiers'):
|
727 |
+
result = self._data.get(key, value)
|
728 |
+
else:
|
729 |
+
# special cases for PEP 459
|
730 |
+
sentinel = object()
|
731 |
+
result = sentinel
|
732 |
+
d = self._data.get('extensions')
|
733 |
+
if d:
|
734 |
+
if key == 'commands':
|
735 |
+
result = d.get('python.commands', value)
|
736 |
+
elif key == 'classifiers':
|
737 |
+
d = d.get('python.details')
|
738 |
+
if d:
|
739 |
+
result = d.get(key, value)
|
740 |
+
else:
|
741 |
+
d = d.get('python.exports')
|
742 |
+
if not d:
|
743 |
+
d = self._data.get('python.exports')
|
744 |
+
if d:
|
745 |
+
result = d.get(key, value)
|
746 |
+
if result is sentinel:
|
747 |
+
result = value
|
748 |
+
elif key not in common:
|
749 |
+
result = object.__getattribute__(self, key)
|
750 |
+
elif self._legacy:
|
751 |
+
result = self._legacy.get(key)
|
752 |
+
else:
|
753 |
+
result = self._data.get(key)
|
754 |
+
return result
|
755 |
+
|
756 |
+
def _validate_value(self, key, value, scheme=None):
|
757 |
+
if key in self.SYNTAX_VALIDATORS:
|
758 |
+
pattern, exclusions = self.SYNTAX_VALIDATORS[key]
|
759 |
+
if (scheme or self.scheme) not in exclusions:
|
760 |
+
m = pattern.match(value)
|
761 |
+
if not m:
|
762 |
+
raise MetadataInvalidError("'%s' is an invalid value for "
|
763 |
+
"the '%s' property" % (value,
|
764 |
+
key))
|
765 |
+
|
766 |
+
def __setattr__(self, key, value):
|
767 |
+
self._validate_value(key, value)
|
768 |
+
common = object.__getattribute__(self, 'common_keys')
|
769 |
+
mapped = object.__getattribute__(self, 'mapped_keys')
|
770 |
+
if key in mapped:
|
771 |
+
lk, _ = mapped[key]
|
772 |
+
if self._legacy:
|
773 |
+
if lk is None:
|
774 |
+
raise NotImplementedError
|
775 |
+
self._legacy[lk] = value
|
776 |
+
elif key not in ('commands', 'exports', 'modules', 'namespaces',
|
777 |
+
'classifiers'):
|
778 |
+
self._data[key] = value
|
779 |
+
else:
|
780 |
+
# special cases for PEP 459
|
781 |
+
d = self._data.setdefault('extensions', {})
|
782 |
+
if key == 'commands':
|
783 |
+
d['python.commands'] = value
|
784 |
+
elif key == 'classifiers':
|
785 |
+
d = d.setdefault('python.details', {})
|
786 |
+
d[key] = value
|
787 |
+
else:
|
788 |
+
d = d.setdefault('python.exports', {})
|
789 |
+
d[key] = value
|
790 |
+
elif key not in common:
|
791 |
+
object.__setattr__(self, key, value)
|
792 |
+
else:
|
793 |
+
if key == 'keywords':
|
794 |
+
if isinstance(value, string_types):
|
795 |
+
value = value.strip()
|
796 |
+
if value:
|
797 |
+
value = value.split()
|
798 |
+
else:
|
799 |
+
value = []
|
800 |
+
if self._legacy:
|
801 |
+
self._legacy[key] = value
|
802 |
+
else:
|
803 |
+
self._data[key] = value
|
804 |
+
|
805 |
+
@property
|
806 |
+
def name_and_version(self):
|
807 |
+
return _get_name_and_version(self.name, self.version, True)
|
808 |
+
|
809 |
+
@property
|
810 |
+
def provides(self):
|
811 |
+
if self._legacy:
|
812 |
+
result = self._legacy['Provides-Dist']
|
813 |
+
else:
|
814 |
+
result = self._data.setdefault('provides', [])
|
815 |
+
s = '%s (%s)' % (self.name, self.version)
|
816 |
+
if s not in result:
|
817 |
+
result.append(s)
|
818 |
+
return result
|
819 |
+
|
820 |
+
@provides.setter
|
821 |
+
def provides(self, value):
|
822 |
+
if self._legacy:
|
823 |
+
self._legacy['Provides-Dist'] = value
|
824 |
+
else:
|
825 |
+
self._data['provides'] = value
|
826 |
+
|
827 |
+
def get_requirements(self, reqts, extras=None, env=None):
|
828 |
+
"""
|
829 |
+
Base method to get dependencies, given a set of extras
|
830 |
+
to satisfy and an optional environment context.
|
831 |
+
:param reqts: A list of sometimes-wanted dependencies,
|
832 |
+
perhaps dependent on extras and environment.
|
833 |
+
:param extras: A list of optional components being requested.
|
834 |
+
:param env: An optional environment for marker evaluation.
|
835 |
+
"""
|
836 |
+
if self._legacy:
|
837 |
+
result = reqts
|
838 |
+
else:
|
839 |
+
result = []
|
840 |
+
extras = get_extras(extras or [], self.extras)
|
841 |
+
for d in reqts:
|
842 |
+
if 'extra' not in d and 'environment' not in d:
|
843 |
+
# unconditional
|
844 |
+
include = True
|
845 |
+
else:
|
846 |
+
if 'extra' not in d:
|
847 |
+
# Not extra-dependent - only environment-dependent
|
848 |
+
include = True
|
849 |
+
else:
|
850 |
+
include = d.get('extra') in extras
|
851 |
+
if include:
|
852 |
+
# Not excluded because of extras, check environment
|
853 |
+
marker = d.get('environment')
|
854 |
+
if marker:
|
855 |
+
include = interpret(marker, env)
|
856 |
+
if include:
|
857 |
+
result.extend(d['requires'])
|
858 |
+
for key in ('build', 'dev', 'test'):
|
859 |
+
e = ':%s:' % key
|
860 |
+
if e in extras:
|
861 |
+
extras.remove(e)
|
862 |
+
# A recursive call, but it should terminate since 'test'
|
863 |
+
# has been removed from the extras
|
864 |
+
reqts = self._data.get('%s_requires' % key, [])
|
865 |
+
result.extend(self.get_requirements(reqts, extras=extras,
|
866 |
+
env=env))
|
867 |
+
return result
|
868 |
+
|
869 |
+
@property
|
870 |
+
def dictionary(self):
|
871 |
+
if self._legacy:
|
872 |
+
return self._from_legacy()
|
873 |
+
return self._data
|
874 |
+
|
875 |
+
@property
|
876 |
+
def dependencies(self):
|
877 |
+
if self._legacy:
|
878 |
+
raise NotImplementedError
|
879 |
+
else:
|
880 |
+
return extract_by_key(self._data, self.DEPENDENCY_KEYS)
|
881 |
+
|
882 |
+
@dependencies.setter
|
883 |
+
def dependencies(self, value):
|
884 |
+
if self._legacy:
|
885 |
+
raise NotImplementedError
|
886 |
+
else:
|
887 |
+
self._data.update(value)
|
888 |
+
|
889 |
+
def _validate_mapping(self, mapping, scheme):
|
890 |
+
if mapping.get('metadata_version') != self.METADATA_VERSION:
|
891 |
+
raise MetadataUnrecognizedVersionError()
|
892 |
+
missing = []
|
893 |
+
for key, exclusions in self.MANDATORY_KEYS.items():
|
894 |
+
if key not in mapping:
|
895 |
+
if scheme not in exclusions:
|
896 |
+
missing.append(key)
|
897 |
+
if missing:
|
898 |
+
msg = 'Missing metadata items: %s' % ', '.join(missing)
|
899 |
+
raise MetadataMissingError(msg)
|
900 |
+
for k, v in mapping.items():
|
901 |
+
self._validate_value(k, v, scheme)
|
902 |
+
|
903 |
+
def validate(self):
|
904 |
+
if self._legacy:
|
905 |
+
missing, warnings = self._legacy.check(True)
|
906 |
+
if missing or warnings:
|
907 |
+
logger.warning('Metadata: missing: %s, warnings: %s',
|
908 |
+
missing, warnings)
|
909 |
+
else:
|
910 |
+
self._validate_mapping(self._data, self.scheme)
|
911 |
+
|
912 |
+
def todict(self):
|
913 |
+
if self._legacy:
|
914 |
+
return self._legacy.todict(True)
|
915 |
+
else:
|
916 |
+
result = extract_by_key(self._data, self.INDEX_KEYS)
|
917 |
+
return result
|
918 |
+
|
919 |
+
def _from_legacy(self):
|
920 |
+
assert self._legacy and not self._data
|
921 |
+
result = {
|
922 |
+
'metadata_version': self.METADATA_VERSION,
|
923 |
+
'generator': self.GENERATOR,
|
924 |
+
}
|
925 |
+
lmd = self._legacy.todict(True) # skip missing ones
|
926 |
+
for k in ('name', 'version', 'license', 'summary', 'description',
|
927 |
+
'classifier'):
|
928 |
+
if k in lmd:
|
929 |
+
if k == 'classifier':
|
930 |
+
nk = 'classifiers'
|
931 |
+
else:
|
932 |
+
nk = k
|
933 |
+
result[nk] = lmd[k]
|
934 |
+
kw = lmd.get('Keywords', [])
|
935 |
+
if kw == ['']:
|
936 |
+
kw = []
|
937 |
+
result['keywords'] = kw
|
938 |
+
keys = (('requires_dist', 'run_requires'),
|
939 |
+
('setup_requires_dist', 'build_requires'))
|
940 |
+
for ok, nk in keys:
|
941 |
+
if ok in lmd and lmd[ok]:
|
942 |
+
result[nk] = [{'requires': lmd[ok]}]
|
943 |
+
result['provides'] = self.provides
|
944 |
+
author = {}
|
945 |
+
maintainer = {}
|
946 |
+
return result
|
947 |
+
|
948 |
+
LEGACY_MAPPING = {
|
949 |
+
'name': 'Name',
|
950 |
+
'version': 'Version',
|
951 |
+
('extensions', 'python.details', 'license'): 'License',
|
952 |
+
'summary': 'Summary',
|
953 |
+
'description': 'Description',
|
954 |
+
('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page',
|
955 |
+
('extensions', 'python.project', 'contacts', 0, 'name'): 'Author',
|
956 |
+
('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email',
|
957 |
+
'source_url': 'Download-URL',
|
958 |
+
('extensions', 'python.details', 'classifiers'): 'Classifier',
|
959 |
+
}
|
960 |
+
|
961 |
+
def _to_legacy(self):
|
962 |
+
def process_entries(entries):
|
963 |
+
reqts = set()
|
964 |
+
for e in entries:
|
965 |
+
extra = e.get('extra')
|
966 |
+
env = e.get('environment')
|
967 |
+
rlist = e['requires']
|
968 |
+
for r in rlist:
|
969 |
+
if not env and not extra:
|
970 |
+
reqts.add(r)
|
971 |
+
else:
|
972 |
+
marker = ''
|
973 |
+
if extra:
|
974 |
+
marker = 'extra == "%s"' % extra
|
975 |
+
if env:
|
976 |
+
if marker:
|
977 |
+
marker = '(%s) and %s' % (env, marker)
|
978 |
+
else:
|
979 |
+
marker = env
|
980 |
+
reqts.add(';'.join((r, marker)))
|
981 |
+
return reqts
|
982 |
+
|
983 |
+
assert self._data and not self._legacy
|
984 |
+
result = LegacyMetadata()
|
985 |
+
nmd = self._data
|
986 |
+
# import pdb; pdb.set_trace()
|
987 |
+
for nk, ok in self.LEGACY_MAPPING.items():
|
988 |
+
if not isinstance(nk, tuple):
|
989 |
+
if nk in nmd:
|
990 |
+
result[ok] = nmd[nk]
|
991 |
+
else:
|
992 |
+
d = nmd
|
993 |
+
found = True
|
994 |
+
for k in nk:
|
995 |
+
try:
|
996 |
+
d = d[k]
|
997 |
+
except (KeyError, IndexError):
|
998 |
+
found = False
|
999 |
+
break
|
1000 |
+
if found:
|
1001 |
+
result[ok] = d
|
1002 |
+
r1 = process_entries(self.run_requires + self.meta_requires)
|
1003 |
+
r2 = process_entries(self.build_requires + self.dev_requires)
|
1004 |
+
if self.extras:
|
1005 |
+
result['Provides-Extra'] = sorted(self.extras)
|
1006 |
+
result['Requires-Dist'] = sorted(r1)
|
1007 |
+
result['Setup-Requires-Dist'] = sorted(r2)
|
1008 |
+
# TODO: any other fields wanted
|
1009 |
+
return result
|
1010 |
+
|
1011 |
+
def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
|
1012 |
+
if [path, fileobj].count(None) != 1:
|
1013 |
+
raise ValueError('Exactly one of path and fileobj is needed')
|
1014 |
+
self.validate()
|
1015 |
+
if legacy:
|
1016 |
+
if self._legacy:
|
1017 |
+
legacy_md = self._legacy
|
1018 |
+
else:
|
1019 |
+
legacy_md = self._to_legacy()
|
1020 |
+
if path:
|
1021 |
+
legacy_md.write(path, skip_unknown=skip_unknown)
|
1022 |
+
else:
|
1023 |
+
legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
|
1024 |
+
else:
|
1025 |
+
if self._legacy:
|
1026 |
+
d = self._from_legacy()
|
1027 |
+
else:
|
1028 |
+
d = self._data
|
1029 |
+
if fileobj:
|
1030 |
+
json.dump(d, fileobj, ensure_ascii=True, indent=2,
|
1031 |
+
sort_keys=True)
|
1032 |
+
else:
|
1033 |
+
with codecs.open(path, 'w', 'utf-8') as f:
|
1034 |
+
json.dump(d, f, ensure_ascii=True, indent=2,
|
1035 |
+
sort_keys=True)
|
1036 |
+
|
1037 |
+
def add_requirements(self, requirements):
|
1038 |
+
if self._legacy:
|
1039 |
+
self._legacy.add_requirements(requirements)
|
1040 |
+
else:
|
1041 |
+
run_requires = self._data.setdefault('run_requires', [])
|
1042 |
+
always = None
|
1043 |
+
for entry in run_requires:
|
1044 |
+
if 'environment' not in entry and 'extra' not in entry:
|
1045 |
+
always = entry
|
1046 |
+
break
|
1047 |
+
if always is None:
|
1048 |
+
always = { 'requires': requirements }
|
1049 |
+
run_requires.insert(0, always)
|
1050 |
+
else:
|
1051 |
+
rset = set(always['requires']) | set(requirements)
|
1052 |
+
always['requires'] = sorted(rset)
|
1053 |
+
|
1054 |
+
def __repr__(self):
|
1055 |
+
name = self.name or '(no name)'
|
1056 |
+
version = self.version or 'no version'
|
1057 |
+
return '<%s %s %s (%s)>' % (self.__class__.__name__,
|
1058 |
+
self.metadata_version, name, version)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py
ADDED
@@ -0,0 +1,358 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2013-2017 Vinay Sajip.
|
4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
6 |
+
#
|
7 |
+
from __future__ import unicode_literals
|
8 |
+
|
9 |
+
import bisect
|
10 |
+
import io
|
11 |
+
import logging
|
12 |
+
import os
|
13 |
+
import pkgutil
|
14 |
+
import sys
|
15 |
+
import types
|
16 |
+
import zipimport
|
17 |
+
|
18 |
+
from . import DistlibException
|
19 |
+
from .util import cached_property, get_cache_base, Cache
|
20 |
+
|
21 |
+
logger = logging.getLogger(__name__)
|
22 |
+
|
23 |
+
|
24 |
+
cache = None # created when needed
|
25 |
+
|
26 |
+
|
27 |
+
class ResourceCache(Cache):
|
28 |
+
def __init__(self, base=None):
|
29 |
+
if base is None:
|
30 |
+
# Use native string to avoid issues on 2.x: see Python #20140.
|
31 |
+
base = os.path.join(get_cache_base(), str('resource-cache'))
|
32 |
+
super(ResourceCache, self).__init__(base)
|
33 |
+
|
34 |
+
def is_stale(self, resource, path):
|
35 |
+
"""
|
36 |
+
Is the cache stale for the given resource?
|
37 |
+
|
38 |
+
:param resource: The :class:`Resource` being cached.
|
39 |
+
:param path: The path of the resource in the cache.
|
40 |
+
:return: True if the cache is stale.
|
41 |
+
"""
|
42 |
+
# Cache invalidation is a hard problem :-)
|
43 |
+
return True
|
44 |
+
|
45 |
+
def get(self, resource):
|
46 |
+
"""
|
47 |
+
Get a resource into the cache,
|
48 |
+
|
49 |
+
:param resource: A :class:`Resource` instance.
|
50 |
+
:return: The pathname of the resource in the cache.
|
51 |
+
"""
|
52 |
+
prefix, path = resource.finder.get_cache_info(resource)
|
53 |
+
if prefix is None:
|
54 |
+
result = path
|
55 |
+
else:
|
56 |
+
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
|
57 |
+
dirname = os.path.dirname(result)
|
58 |
+
if not os.path.isdir(dirname):
|
59 |
+
os.makedirs(dirname)
|
60 |
+
if not os.path.exists(result):
|
61 |
+
stale = True
|
62 |
+
else:
|
63 |
+
stale = self.is_stale(resource, path)
|
64 |
+
if stale:
|
65 |
+
# write the bytes of the resource to the cache location
|
66 |
+
with open(result, 'wb') as f:
|
67 |
+
f.write(resource.bytes)
|
68 |
+
return result
|
69 |
+
|
70 |
+
|
71 |
+
class ResourceBase(object):
|
72 |
+
def __init__(self, finder, name):
|
73 |
+
self.finder = finder
|
74 |
+
self.name = name
|
75 |
+
|
76 |
+
|
77 |
+
class Resource(ResourceBase):
|
78 |
+
"""
|
79 |
+
A class representing an in-package resource, such as a data file. This is
|
80 |
+
not normally instantiated by user code, but rather by a
|
81 |
+
:class:`ResourceFinder` which manages the resource.
|
82 |
+
"""
|
83 |
+
is_container = False # Backwards compatibility
|
84 |
+
|
85 |
+
def as_stream(self):
|
86 |
+
"""
|
87 |
+
Get the resource as a stream.
|
88 |
+
|
89 |
+
This is not a property to make it obvious that it returns a new stream
|
90 |
+
each time.
|
91 |
+
"""
|
92 |
+
return self.finder.get_stream(self)
|
93 |
+
|
94 |
+
@cached_property
|
95 |
+
def file_path(self):
|
96 |
+
global cache
|
97 |
+
if cache is None:
|
98 |
+
cache = ResourceCache()
|
99 |
+
return cache.get(self)
|
100 |
+
|
101 |
+
@cached_property
|
102 |
+
def bytes(self):
|
103 |
+
return self.finder.get_bytes(self)
|
104 |
+
|
105 |
+
@cached_property
|
106 |
+
def size(self):
|
107 |
+
return self.finder.get_size(self)
|
108 |
+
|
109 |
+
|
110 |
+
class ResourceContainer(ResourceBase):
|
111 |
+
is_container = True # Backwards compatibility
|
112 |
+
|
113 |
+
@cached_property
|
114 |
+
def resources(self):
|
115 |
+
return self.finder.get_resources(self)
|
116 |
+
|
117 |
+
|
118 |
+
class ResourceFinder(object):
|
119 |
+
"""
|
120 |
+
Resource finder for file system resources.
|
121 |
+
"""
|
122 |
+
|
123 |
+
if sys.platform.startswith('java'):
|
124 |
+
skipped_extensions = ('.pyc', '.pyo', '.class')
|
125 |
+
else:
|
126 |
+
skipped_extensions = ('.pyc', '.pyo')
|
127 |
+
|
128 |
+
def __init__(self, module):
|
129 |
+
self.module = module
|
130 |
+
self.loader = getattr(module, '__loader__', None)
|
131 |
+
self.base = os.path.dirname(getattr(module, '__file__', ''))
|
132 |
+
|
133 |
+
def _adjust_path(self, path):
|
134 |
+
return os.path.realpath(path)
|
135 |
+
|
136 |
+
def _make_path(self, resource_name):
|
137 |
+
# Issue #50: need to preserve type of path on Python 2.x
|
138 |
+
# like os.path._get_sep
|
139 |
+
if isinstance(resource_name, bytes): # should only happen on 2.x
|
140 |
+
sep = b'/'
|
141 |
+
else:
|
142 |
+
sep = '/'
|
143 |
+
parts = resource_name.split(sep)
|
144 |
+
parts.insert(0, self.base)
|
145 |
+
result = os.path.join(*parts)
|
146 |
+
return self._adjust_path(result)
|
147 |
+
|
148 |
+
def _find(self, path):
|
149 |
+
return os.path.exists(path)
|
150 |
+
|
151 |
+
def get_cache_info(self, resource):
|
152 |
+
return None, resource.path
|
153 |
+
|
154 |
+
def find(self, resource_name):
|
155 |
+
path = self._make_path(resource_name)
|
156 |
+
if not self._find(path):
|
157 |
+
result = None
|
158 |
+
else:
|
159 |
+
if self._is_directory(path):
|
160 |
+
result = ResourceContainer(self, resource_name)
|
161 |
+
else:
|
162 |
+
result = Resource(self, resource_name)
|
163 |
+
result.path = path
|
164 |
+
return result
|
165 |
+
|
166 |
+
def get_stream(self, resource):
|
167 |
+
return open(resource.path, 'rb')
|
168 |
+
|
169 |
+
def get_bytes(self, resource):
|
170 |
+
with open(resource.path, 'rb') as f:
|
171 |
+
return f.read()
|
172 |
+
|
173 |
+
def get_size(self, resource):
|
174 |
+
return os.path.getsize(resource.path)
|
175 |
+
|
176 |
+
def get_resources(self, resource):
|
177 |
+
def allowed(f):
|
178 |
+
return (f != '__pycache__' and not
|
179 |
+
f.endswith(self.skipped_extensions))
|
180 |
+
return set([f for f in os.listdir(resource.path) if allowed(f)])
|
181 |
+
|
182 |
+
def is_container(self, resource):
|
183 |
+
return self._is_directory(resource.path)
|
184 |
+
|
185 |
+
_is_directory = staticmethod(os.path.isdir)
|
186 |
+
|
187 |
+
def iterator(self, resource_name):
|
188 |
+
resource = self.find(resource_name)
|
189 |
+
if resource is not None:
|
190 |
+
todo = [resource]
|
191 |
+
while todo:
|
192 |
+
resource = todo.pop(0)
|
193 |
+
yield resource
|
194 |
+
if resource.is_container:
|
195 |
+
rname = resource.name
|
196 |
+
for name in resource.resources:
|
197 |
+
if not rname:
|
198 |
+
new_name = name
|
199 |
+
else:
|
200 |
+
new_name = '/'.join([rname, name])
|
201 |
+
child = self.find(new_name)
|
202 |
+
if child.is_container:
|
203 |
+
todo.append(child)
|
204 |
+
else:
|
205 |
+
yield child
|
206 |
+
|
207 |
+
|
208 |
+
class ZipResourceFinder(ResourceFinder):
|
209 |
+
"""
|
210 |
+
Resource finder for resources in .zip files.
|
211 |
+
"""
|
212 |
+
def __init__(self, module):
|
213 |
+
super(ZipResourceFinder, self).__init__(module)
|
214 |
+
archive = self.loader.archive
|
215 |
+
self.prefix_len = 1 + len(archive)
|
216 |
+
# PyPy doesn't have a _files attr on zipimporter, and you can't set one
|
217 |
+
if hasattr(self.loader, '_files'):
|
218 |
+
self._files = self.loader._files
|
219 |
+
else:
|
220 |
+
self._files = zipimport._zip_directory_cache[archive]
|
221 |
+
self.index = sorted(self._files)
|
222 |
+
|
223 |
+
def _adjust_path(self, path):
|
224 |
+
return path
|
225 |
+
|
226 |
+
def _find(self, path):
|
227 |
+
path = path[self.prefix_len:]
|
228 |
+
if path in self._files:
|
229 |
+
result = True
|
230 |
+
else:
|
231 |
+
if path and path[-1] != os.sep:
|
232 |
+
path = path + os.sep
|
233 |
+
i = bisect.bisect(self.index, path)
|
234 |
+
try:
|
235 |
+
result = self.index[i].startswith(path)
|
236 |
+
except IndexError:
|
237 |
+
result = False
|
238 |
+
if not result:
|
239 |
+
logger.debug('_find failed: %r %r', path, self.loader.prefix)
|
240 |
+
else:
|
241 |
+
logger.debug('_find worked: %r %r', path, self.loader.prefix)
|
242 |
+
return result
|
243 |
+
|
244 |
+
def get_cache_info(self, resource):
|
245 |
+
prefix = self.loader.archive
|
246 |
+
path = resource.path[1 + len(prefix):]
|
247 |
+
return prefix, path
|
248 |
+
|
249 |
+
def get_bytes(self, resource):
|
250 |
+
return self.loader.get_data(resource.path)
|
251 |
+
|
252 |
+
def get_stream(self, resource):
|
253 |
+
return io.BytesIO(self.get_bytes(resource))
|
254 |
+
|
255 |
+
def get_size(self, resource):
|
256 |
+
path = resource.path[self.prefix_len:]
|
257 |
+
return self._files[path][3]
|
258 |
+
|
259 |
+
def get_resources(self, resource):
|
260 |
+
path = resource.path[self.prefix_len:]
|
261 |
+
if path and path[-1] != os.sep:
|
262 |
+
path += os.sep
|
263 |
+
plen = len(path)
|
264 |
+
result = set()
|
265 |
+
i = bisect.bisect(self.index, path)
|
266 |
+
while i < len(self.index):
|
267 |
+
if not self.index[i].startswith(path):
|
268 |
+
break
|
269 |
+
s = self.index[i][plen:]
|
270 |
+
result.add(s.split(os.sep, 1)[0]) # only immediate children
|
271 |
+
i += 1
|
272 |
+
return result
|
273 |
+
|
274 |
+
def _is_directory(self, path):
|
275 |
+
path = path[self.prefix_len:]
|
276 |
+
if path and path[-1] != os.sep:
|
277 |
+
path += os.sep
|
278 |
+
i = bisect.bisect(self.index, path)
|
279 |
+
try:
|
280 |
+
result = self.index[i].startswith(path)
|
281 |
+
except IndexError:
|
282 |
+
result = False
|
283 |
+
return result
|
284 |
+
|
285 |
+
|
286 |
+
_finder_registry = {
|
287 |
+
type(None): ResourceFinder,
|
288 |
+
zipimport.zipimporter: ZipResourceFinder
|
289 |
+
}
|
290 |
+
|
291 |
+
try:
|
292 |
+
# In Python 3.6, _frozen_importlib -> _frozen_importlib_external
|
293 |
+
try:
|
294 |
+
import _frozen_importlib_external as _fi
|
295 |
+
except ImportError:
|
296 |
+
import _frozen_importlib as _fi
|
297 |
+
_finder_registry[_fi.SourceFileLoader] = ResourceFinder
|
298 |
+
_finder_registry[_fi.FileFinder] = ResourceFinder
|
299 |
+
# See issue #146
|
300 |
+
_finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
|
301 |
+
del _fi
|
302 |
+
except (ImportError, AttributeError):
|
303 |
+
pass
|
304 |
+
|
305 |
+
|
306 |
+
def register_finder(loader, finder_maker):
|
307 |
+
_finder_registry[type(loader)] = finder_maker
|
308 |
+
|
309 |
+
|
310 |
+
_finder_cache = {}
|
311 |
+
|
312 |
+
|
313 |
+
def finder(package):
|
314 |
+
"""
|
315 |
+
Return a resource finder for a package.
|
316 |
+
:param package: The name of the package.
|
317 |
+
:return: A :class:`ResourceFinder` instance for the package.
|
318 |
+
"""
|
319 |
+
if package in _finder_cache:
|
320 |
+
result = _finder_cache[package]
|
321 |
+
else:
|
322 |
+
if package not in sys.modules:
|
323 |
+
__import__(package)
|
324 |
+
module = sys.modules[package]
|
325 |
+
path = getattr(module, '__path__', None)
|
326 |
+
if path is None:
|
327 |
+
raise DistlibException('You cannot get a finder for a module, '
|
328 |
+
'only for a package')
|
329 |
+
loader = getattr(module, '__loader__', None)
|
330 |
+
finder_maker = _finder_registry.get(type(loader))
|
331 |
+
if finder_maker is None:
|
332 |
+
raise DistlibException('Unable to locate finder for %r' % package)
|
333 |
+
result = finder_maker(module)
|
334 |
+
_finder_cache[package] = result
|
335 |
+
return result
|
336 |
+
|
337 |
+
|
338 |
+
_dummy_module = types.ModuleType(str('__dummy__'))
|
339 |
+
|
340 |
+
|
341 |
+
def finder_for_path(path):
|
342 |
+
"""
|
343 |
+
Return a resource finder for a path, which should represent a container.
|
344 |
+
|
345 |
+
:param path: The path.
|
346 |
+
:return: A :class:`ResourceFinder` instance for the path.
|
347 |
+
"""
|
348 |
+
result = None
|
349 |
+
# calls any path hooks, gets importer into cache
|
350 |
+
pkgutil.get_importer(path)
|
351 |
+
loader = sys.path_importer_cache.get(path)
|
352 |
+
finder = _finder_registry.get(type(loader))
|
353 |
+
if finder:
|
354 |
+
module = _dummy_module
|
355 |
+
module.__file__ = os.path.join(path, '')
|
356 |
+
module.__loader__ = loader
|
357 |
+
result = finder(module)
|
358 |
+
return result
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py
ADDED
@@ -0,0 +1,429 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2013-2015 Vinay Sajip.
|
4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
6 |
+
#
|
7 |
+
from io import BytesIO
|
8 |
+
import logging
|
9 |
+
import os
|
10 |
+
import re
|
11 |
+
import struct
|
12 |
+
import sys
|
13 |
+
|
14 |
+
from .compat import sysconfig, detect_encoding, ZipFile
|
15 |
+
from .resources import finder
|
16 |
+
from .util import (FileOperator, get_export_entry, convert_path,
|
17 |
+
get_executable, get_platform, in_venv)
|
18 |
+
|
19 |
+
logger = logging.getLogger(__name__)
|
20 |
+
|
21 |
+
_DEFAULT_MANIFEST = '''
|
22 |
+
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
23 |
+
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
24 |
+
<assemblyIdentity version="1.0.0.0"
|
25 |
+
processorArchitecture="X86"
|
26 |
+
name="%s"
|
27 |
+
type="win32"/>
|
28 |
+
|
29 |
+
<!-- Identify the application security requirements. -->
|
30 |
+
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
31 |
+
<security>
|
32 |
+
<requestedPrivileges>
|
33 |
+
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
34 |
+
</requestedPrivileges>
|
35 |
+
</security>
|
36 |
+
</trustInfo>
|
37 |
+
</assembly>'''.strip()
|
38 |
+
|
39 |
+
# check if Python is called on the first line with this expression
|
40 |
+
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
|
41 |
+
SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
|
42 |
+
import re
|
43 |
+
import sys
|
44 |
+
from %(module)s import %(import_name)s
|
45 |
+
if __name__ == '__main__':
|
46 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
47 |
+
sys.exit(%(func)s())
|
48 |
+
'''
|
49 |
+
|
50 |
+
|
51 |
+
def enquote_executable(executable):
|
52 |
+
if ' ' in executable:
|
53 |
+
# make sure we quote only the executable in case of env
|
54 |
+
# for example /usr/bin/env "/dir with spaces/bin/jython"
|
55 |
+
# instead of "/usr/bin/env /dir with spaces/bin/jython"
|
56 |
+
# otherwise whole
|
57 |
+
if executable.startswith('/usr/bin/env '):
|
58 |
+
env, _executable = executable.split(' ', 1)
|
59 |
+
if ' ' in _executable and not _executable.startswith('"'):
|
60 |
+
executable = '%s "%s"' % (env, _executable)
|
61 |
+
else:
|
62 |
+
if not executable.startswith('"'):
|
63 |
+
executable = '"%s"' % executable
|
64 |
+
return executable
|
65 |
+
|
66 |
+
# Keep the old name around (for now), as there is at least one project using it!
|
67 |
+
_enquote_executable = enquote_executable
|
68 |
+
|
69 |
+
class ScriptMaker(object):
|
70 |
+
"""
|
71 |
+
A class to copy or create scripts from source scripts or callable
|
72 |
+
specifications.
|
73 |
+
"""
|
74 |
+
script_template = SCRIPT_TEMPLATE
|
75 |
+
|
76 |
+
executable = None # for shebangs
|
77 |
+
|
78 |
+
def __init__(self, source_dir, target_dir, add_launchers=True,
|
79 |
+
dry_run=False, fileop=None):
|
80 |
+
self.source_dir = source_dir
|
81 |
+
self.target_dir = target_dir
|
82 |
+
self.add_launchers = add_launchers
|
83 |
+
self.force = False
|
84 |
+
self.clobber = False
|
85 |
+
# It only makes sense to set mode bits on POSIX.
|
86 |
+
self.set_mode = (os.name == 'posix') or (os.name == 'java' and
|
87 |
+
os._name == 'posix')
|
88 |
+
self.variants = set(('', 'X.Y'))
|
89 |
+
self._fileop = fileop or FileOperator(dry_run)
|
90 |
+
|
91 |
+
self._is_nt = os.name == 'nt' or (
|
92 |
+
os.name == 'java' and os._name == 'nt')
|
93 |
+
self.version_info = sys.version_info
|
94 |
+
|
95 |
+
def _get_alternate_executable(self, executable, options):
|
96 |
+
if options.get('gui', False) and self._is_nt: # pragma: no cover
|
97 |
+
dn, fn = os.path.split(executable)
|
98 |
+
fn = fn.replace('python', 'pythonw')
|
99 |
+
executable = os.path.join(dn, fn)
|
100 |
+
return executable
|
101 |
+
|
102 |
+
if sys.platform.startswith('java'): # pragma: no cover
|
103 |
+
def _is_shell(self, executable):
|
104 |
+
"""
|
105 |
+
Determine if the specified executable is a script
|
106 |
+
(contains a #! line)
|
107 |
+
"""
|
108 |
+
try:
|
109 |
+
with open(executable) as fp:
|
110 |
+
return fp.read(2) == '#!'
|
111 |
+
except (OSError, IOError):
|
112 |
+
logger.warning('Failed to open %s', executable)
|
113 |
+
return False
|
114 |
+
|
115 |
+
def _fix_jython_executable(self, executable):
|
116 |
+
if self._is_shell(executable):
|
117 |
+
# Workaround for Jython is not needed on Linux systems.
|
118 |
+
import java
|
119 |
+
|
120 |
+
if java.lang.System.getProperty('os.name') == 'Linux':
|
121 |
+
return executable
|
122 |
+
elif executable.lower().endswith('jython.exe'):
|
123 |
+
# Use wrapper exe for Jython on Windows
|
124 |
+
return executable
|
125 |
+
return '/usr/bin/env %s' % executable
|
126 |
+
|
127 |
+
def _build_shebang(self, executable, post_interp):
|
128 |
+
"""
|
129 |
+
Build a shebang line. In the simple case (on Windows, or a shebang line
|
130 |
+
which is not too long or contains spaces) use a simple formulation for
|
131 |
+
the shebang. Otherwise, use /bin/sh as the executable, with a contrived
|
132 |
+
shebang which allows the script to run either under Python or sh, using
|
133 |
+
suitable quoting. Thanks to Harald Nordgren for his input.
|
134 |
+
|
135 |
+
See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
|
136 |
+
https://hg.mozilla.org/mozilla-central/file/tip/mach
|
137 |
+
"""
|
138 |
+
if os.name != 'posix':
|
139 |
+
simple_shebang = True
|
140 |
+
else:
|
141 |
+
# Add 3 for '#!' prefix and newline suffix.
|
142 |
+
shebang_length = len(executable) + len(post_interp) + 3
|
143 |
+
if sys.platform == 'darwin':
|
144 |
+
max_shebang_length = 512
|
145 |
+
else:
|
146 |
+
max_shebang_length = 127
|
147 |
+
simple_shebang = ((b' ' not in executable) and
|
148 |
+
(shebang_length <= max_shebang_length))
|
149 |
+
|
150 |
+
if simple_shebang:
|
151 |
+
result = b'#!' + executable + post_interp + b'\n'
|
152 |
+
else:
|
153 |
+
result = b'#!/bin/sh\n'
|
154 |
+
result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
|
155 |
+
result += b"' '''"
|
156 |
+
return result
|
157 |
+
|
158 |
+
def _get_shebang(self, encoding, post_interp=b'', options=None):
|
159 |
+
enquote = True
|
160 |
+
if self.executable:
|
161 |
+
executable = self.executable
|
162 |
+
enquote = False # assume this will be taken care of
|
163 |
+
elif not sysconfig.is_python_build():
|
164 |
+
executable = get_executable()
|
165 |
+
elif in_venv(): # pragma: no cover
|
166 |
+
executable = os.path.join(sysconfig.get_path('scripts'),
|
167 |
+
'python%s' % sysconfig.get_config_var('EXE'))
|
168 |
+
else: # pragma: no cover
|
169 |
+
executable = os.path.join(
|
170 |
+
sysconfig.get_config_var('BINDIR'),
|
171 |
+
'python%s%s' % (sysconfig.get_config_var('VERSION'),
|
172 |
+
sysconfig.get_config_var('EXE')))
|
173 |
+
if not os.path.isfile(executable):
|
174 |
+
# for Python builds from source on Windows, no Python executables with
|
175 |
+
# a version suffix are created, so we use python.exe
|
176 |
+
executable = os.path.join(sysconfig.get_config_var('BINDIR'),
|
177 |
+
'python%s' % (sysconfig.get_config_var('EXE')))
|
178 |
+
if options:
|
179 |
+
executable = self._get_alternate_executable(executable, options)
|
180 |
+
|
181 |
+
if sys.platform.startswith('java'): # pragma: no cover
|
182 |
+
executable = self._fix_jython_executable(executable)
|
183 |
+
|
184 |
+
# Normalise case for Windows - COMMENTED OUT
|
185 |
+
# executable = os.path.normcase(executable)
|
186 |
+
# N.B. The normalising operation above has been commented out: See
|
187 |
+
# issue #124. Although paths in Windows are generally case-insensitive,
|
188 |
+
# they aren't always. For example, a path containing a ẞ (which is a
|
189 |
+
# LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
|
190 |
+
# LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
|
191 |
+
# Windows as equivalent in path names.
|
192 |
+
|
193 |
+
# If the user didn't specify an executable, it may be necessary to
|
194 |
+
# cater for executable paths with spaces (not uncommon on Windows)
|
195 |
+
if enquote:
|
196 |
+
executable = enquote_executable(executable)
|
197 |
+
# Issue #51: don't use fsencode, since we later try to
|
198 |
+
# check that the shebang is decodable using utf-8.
|
199 |
+
executable = executable.encode('utf-8')
|
200 |
+
# in case of IronPython, play safe and enable frames support
|
201 |
+
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
|
202 |
+
and '-X:FullFrames' not in post_interp): # pragma: no cover
|
203 |
+
post_interp += b' -X:Frames'
|
204 |
+
shebang = self._build_shebang(executable, post_interp)
|
205 |
+
# Python parser starts to read a script using UTF-8 until
|
206 |
+
# it gets a #coding:xxx cookie. The shebang has to be the
|
207 |
+
# first line of a file, the #coding:xxx cookie cannot be
|
208 |
+
# written before. So the shebang has to be decodable from
|
209 |
+
# UTF-8.
|
210 |
+
try:
|
211 |
+
shebang.decode('utf-8')
|
212 |
+
except UnicodeDecodeError: # pragma: no cover
|
213 |
+
raise ValueError(
|
214 |
+
'The shebang (%r) is not decodable from utf-8' % shebang)
|
215 |
+
# If the script is encoded to a custom encoding (use a
|
216 |
+
# #coding:xxx cookie), the shebang has to be decodable from
|
217 |
+
# the script encoding too.
|
218 |
+
if encoding != 'utf-8':
|
219 |
+
try:
|
220 |
+
shebang.decode(encoding)
|
221 |
+
except UnicodeDecodeError: # pragma: no cover
|
222 |
+
raise ValueError(
|
223 |
+
'The shebang (%r) is not decodable '
|
224 |
+
'from the script encoding (%r)' % (shebang, encoding))
|
225 |
+
return shebang
|
226 |
+
|
227 |
+
def _get_script_text(self, entry):
|
228 |
+
return self.script_template % dict(module=entry.prefix,
|
229 |
+
import_name=entry.suffix.split('.')[0],
|
230 |
+
func=entry.suffix)
|
231 |
+
|
232 |
+
manifest = _DEFAULT_MANIFEST
|
233 |
+
|
234 |
+
def get_manifest(self, exename):
|
235 |
+
base = os.path.basename(exename)
|
236 |
+
return self.manifest % base
|
237 |
+
|
238 |
+
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
239 |
+
use_launcher = self.add_launchers and self._is_nt
|
240 |
+
linesep = os.linesep.encode('utf-8')
|
241 |
+
if not shebang.endswith(linesep):
|
242 |
+
shebang += linesep
|
243 |
+
if not use_launcher:
|
244 |
+
script_bytes = shebang + script_bytes
|
245 |
+
else: # pragma: no cover
|
246 |
+
if ext == 'py':
|
247 |
+
launcher = self._get_launcher('t')
|
248 |
+
else:
|
249 |
+
launcher = self._get_launcher('w')
|
250 |
+
stream = BytesIO()
|
251 |
+
with ZipFile(stream, 'w') as zf:
|
252 |
+
zf.writestr('__main__.py', script_bytes)
|
253 |
+
zip_data = stream.getvalue()
|
254 |
+
script_bytes = launcher + shebang + zip_data
|
255 |
+
for name in names:
|
256 |
+
outname = os.path.join(self.target_dir, name)
|
257 |
+
if use_launcher: # pragma: no cover
|
258 |
+
n, e = os.path.splitext(outname)
|
259 |
+
if e.startswith('.py'):
|
260 |
+
outname = n
|
261 |
+
outname = '%s.exe' % outname
|
262 |
+
try:
|
263 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
264 |
+
except Exception:
|
265 |
+
# Failed writing an executable - it might be in use.
|
266 |
+
logger.warning('Failed to write executable - trying to '
|
267 |
+
'use .deleteme logic')
|
268 |
+
dfname = '%s.deleteme' % outname
|
269 |
+
if os.path.exists(dfname):
|
270 |
+
os.remove(dfname) # Not allowed to fail here
|
271 |
+
os.rename(outname, dfname) # nor here
|
272 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
273 |
+
logger.debug('Able to replace executable using '
|
274 |
+
'.deleteme logic')
|
275 |
+
try:
|
276 |
+
os.remove(dfname)
|
277 |
+
except Exception:
|
278 |
+
pass # still in use - ignore error
|
279 |
+
else:
|
280 |
+
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
|
281 |
+
outname = '%s.%s' % (outname, ext)
|
282 |
+
if os.path.exists(outname) and not self.clobber:
|
283 |
+
logger.warning('Skipping existing file %s', outname)
|
284 |
+
continue
|
285 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
286 |
+
if self.set_mode:
|
287 |
+
self._fileop.set_executable_mode([outname])
|
288 |
+
filenames.append(outname)
|
289 |
+
|
290 |
+
variant_separator = '-'
|
291 |
+
|
292 |
+
def get_script_filenames(self, name):
|
293 |
+
result = set()
|
294 |
+
if '' in self.variants:
|
295 |
+
result.add(name)
|
296 |
+
if 'X' in self.variants:
|
297 |
+
result.add('%s%s' % (name, self.version_info[0]))
|
298 |
+
if 'X.Y' in self.variants:
|
299 |
+
result.add('%s%s%s.%s' % (name, self.variant_separator,
|
300 |
+
self.version_info[0], self.version_info[1]))
|
301 |
+
return result
|
302 |
+
|
303 |
+
def _make_script(self, entry, filenames, options=None):
|
304 |
+
post_interp = b''
|
305 |
+
if options:
|
306 |
+
args = options.get('interpreter_args', [])
|
307 |
+
if args:
|
308 |
+
args = ' %s' % ' '.join(args)
|
309 |
+
post_interp = args.encode('utf-8')
|
310 |
+
shebang = self._get_shebang('utf-8', post_interp, options=options)
|
311 |
+
script = self._get_script_text(entry).encode('utf-8')
|
312 |
+
scriptnames = self.get_script_filenames(entry.name)
|
313 |
+
if options and options.get('gui', False):
|
314 |
+
ext = 'pyw'
|
315 |
+
else:
|
316 |
+
ext = 'py'
|
317 |
+
self._write_script(scriptnames, shebang, script, filenames, ext)
|
318 |
+
|
319 |
+
def _copy_script(self, script, filenames):
|
320 |
+
adjust = False
|
321 |
+
script = os.path.join(self.source_dir, convert_path(script))
|
322 |
+
outname = os.path.join(self.target_dir, os.path.basename(script))
|
323 |
+
if not self.force and not self._fileop.newer(script, outname):
|
324 |
+
logger.debug('not copying %s (up-to-date)', script)
|
325 |
+
return
|
326 |
+
|
327 |
+
# Always open the file, but ignore failures in dry-run mode --
|
328 |
+
# that way, we'll get accurate feedback if we can read the
|
329 |
+
# script.
|
330 |
+
try:
|
331 |
+
f = open(script, 'rb')
|
332 |
+
except IOError: # pragma: no cover
|
333 |
+
if not self.dry_run:
|
334 |
+
raise
|
335 |
+
f = None
|
336 |
+
else:
|
337 |
+
first_line = f.readline()
|
338 |
+
if not first_line: # pragma: no cover
|
339 |
+
logger.warning('%s is an empty file (skipping)', script)
|
340 |
+
return
|
341 |
+
|
342 |
+
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
|
343 |
+
if match:
|
344 |
+
adjust = True
|
345 |
+
post_interp = match.group(1) or b''
|
346 |
+
|
347 |
+
if not adjust:
|
348 |
+
if f:
|
349 |
+
f.close()
|
350 |
+
self._fileop.copy_file(script, outname)
|
351 |
+
if self.set_mode:
|
352 |
+
self._fileop.set_executable_mode([outname])
|
353 |
+
filenames.append(outname)
|
354 |
+
else:
|
355 |
+
logger.info('copying and adjusting %s -> %s', script,
|
356 |
+
self.target_dir)
|
357 |
+
if not self._fileop.dry_run:
|
358 |
+
encoding, lines = detect_encoding(f.readline)
|
359 |
+
f.seek(0)
|
360 |
+
shebang = self._get_shebang(encoding, post_interp)
|
361 |
+
if b'pythonw' in first_line: # pragma: no cover
|
362 |
+
ext = 'pyw'
|
363 |
+
else:
|
364 |
+
ext = 'py'
|
365 |
+
n = os.path.basename(outname)
|
366 |
+
self._write_script([n], shebang, f.read(), filenames, ext)
|
367 |
+
if f:
|
368 |
+
f.close()
|
369 |
+
|
370 |
+
@property
|
371 |
+
def dry_run(self):
|
372 |
+
return self._fileop.dry_run
|
373 |
+
|
374 |
+
@dry_run.setter
|
375 |
+
def dry_run(self, value):
|
376 |
+
self._fileop.dry_run = value
|
377 |
+
|
378 |
+
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
|
379 |
+
# Executable launcher support.
|
380 |
+
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
|
381 |
+
|
382 |
+
def _get_launcher(self, kind):
|
383 |
+
if struct.calcsize('P') == 8: # 64-bit
|
384 |
+
bits = '64'
|
385 |
+
else:
|
386 |
+
bits = '32'
|
387 |
+
platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
|
388 |
+
name = '%s%s%s.exe' % (kind, bits, platform_suffix)
|
389 |
+
# Issue 31: don't hardcode an absolute package name, but
|
390 |
+
# determine it relative to the current package
|
391 |
+
distlib_package = __name__.rsplit('.', 1)[0]
|
392 |
+
resource = finder(distlib_package).find(name)
|
393 |
+
if not resource:
|
394 |
+
msg = ('Unable to find resource %s in package %s' % (name,
|
395 |
+
distlib_package))
|
396 |
+
raise ValueError(msg)
|
397 |
+
return resource.bytes
|
398 |
+
|
399 |
+
# Public API follows
|
400 |
+
|
401 |
+
def make(self, specification, options=None):
|
402 |
+
"""
|
403 |
+
Make a script.
|
404 |
+
|
405 |
+
:param specification: The specification, which is either a valid export
|
406 |
+
entry specification (to make a script from a
|
407 |
+
callable) or a filename (to make a script by
|
408 |
+
copying from a source location).
|
409 |
+
:param options: A dictionary of options controlling script generation.
|
410 |
+
:return: A list of all absolute pathnames written to.
|
411 |
+
"""
|
412 |
+
filenames = []
|
413 |
+
entry = get_export_entry(specification)
|
414 |
+
if entry is None:
|
415 |
+
self._copy_script(specification, filenames)
|
416 |
+
else:
|
417 |
+
self._make_script(entry, filenames, options=options)
|
418 |
+
return filenames
|
419 |
+
|
420 |
+
def make_multiple(self, specifications, options=None):
|
421 |
+
"""
|
422 |
+
Take a list of specifications and make scripts from them,
|
423 |
+
:param specifications: A list of specifications.
|
424 |
+
:return: A list of all absolute pathnames written to,
|
425 |
+
"""
|
426 |
+
filenames = []
|
427 |
+
for specification in specifications:
|
428 |
+
filenames.extend(self.make(specification, options))
|
429 |
+
return filenames
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/util.py
ADDED
@@ -0,0 +1,1932 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
# Copyright (C) 2012-2021 The Python Software Foundation.
|
3 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
4 |
+
#
|
5 |
+
import codecs
|
6 |
+
from collections import deque
|
7 |
+
import contextlib
|
8 |
+
import csv
|
9 |
+
from glob import iglob as std_iglob
|
10 |
+
import io
|
11 |
+
import json
|
12 |
+
import logging
|
13 |
+
import os
|
14 |
+
import py_compile
|
15 |
+
import re
|
16 |
+
import socket
|
17 |
+
try:
|
18 |
+
import ssl
|
19 |
+
except ImportError: # pragma: no cover
|
20 |
+
ssl = None
|
21 |
+
import subprocess
|
22 |
+
import sys
|
23 |
+
import tarfile
|
24 |
+
import tempfile
|
25 |
+
import textwrap
|
26 |
+
|
27 |
+
try:
|
28 |
+
import threading
|
29 |
+
except ImportError: # pragma: no cover
|
30 |
+
import dummy_threading as threading
|
31 |
+
import time
|
32 |
+
|
33 |
+
from . import DistlibException
|
34 |
+
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
|
35 |
+
cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
|
36 |
+
splittype, HTTPHandler, BaseConfigurator, valid_ident,
|
37 |
+
Container, configparser, URLError, ZipFile, fsdecode,
|
38 |
+
unquote, urlparse)
|
39 |
+
|
40 |
+
logger = logging.getLogger(__name__)
|
41 |
+
|
42 |
+
#
|
43 |
+
# Requirement parsing code as per PEP 508
|
44 |
+
#
|
45 |
+
|
46 |
+
IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
|
47 |
+
VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
|
48 |
+
COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
|
49 |
+
MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
|
50 |
+
OR = re.compile(r'^or\b\s*')
|
51 |
+
AND = re.compile(r'^and\b\s*')
|
52 |
+
NON_SPACE = re.compile(r'(\S+)\s*')
|
53 |
+
STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
|
54 |
+
|
55 |
+
|
56 |
+
def parse_marker(marker_string):
|
57 |
+
"""
|
58 |
+
Parse a marker string and return a dictionary containing a marker expression.
|
59 |
+
|
60 |
+
The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
|
61 |
+
the expression grammar, or strings. A string contained in quotes is to be
|
62 |
+
interpreted as a literal string, and a string not contained in quotes is a
|
63 |
+
variable (such as os_name).
|
64 |
+
"""
|
65 |
+
def marker_var(remaining):
|
66 |
+
# either identifier, or literal string
|
67 |
+
m = IDENTIFIER.match(remaining)
|
68 |
+
if m:
|
69 |
+
result = m.groups()[0]
|
70 |
+
remaining = remaining[m.end():]
|
71 |
+
elif not remaining:
|
72 |
+
raise SyntaxError('unexpected end of input')
|
73 |
+
else:
|
74 |
+
q = remaining[0]
|
75 |
+
if q not in '\'"':
|
76 |
+
raise SyntaxError('invalid expression: %s' % remaining)
|
77 |
+
oq = '\'"'.replace(q, '')
|
78 |
+
remaining = remaining[1:]
|
79 |
+
parts = [q]
|
80 |
+
while remaining:
|
81 |
+
# either a string chunk, or oq, or q to terminate
|
82 |
+
if remaining[0] == q:
|
83 |
+
break
|
84 |
+
elif remaining[0] == oq:
|
85 |
+
parts.append(oq)
|
86 |
+
remaining = remaining[1:]
|
87 |
+
else:
|
88 |
+
m = STRING_CHUNK.match(remaining)
|
89 |
+
if not m:
|
90 |
+
raise SyntaxError('error in string literal: %s' % remaining)
|
91 |
+
parts.append(m.groups()[0])
|
92 |
+
remaining = remaining[m.end():]
|
93 |
+
else:
|
94 |
+
s = ''.join(parts)
|
95 |
+
raise SyntaxError('unterminated string: %s' % s)
|
96 |
+
parts.append(q)
|
97 |
+
result = ''.join(parts)
|
98 |
+
remaining = remaining[1:].lstrip() # skip past closing quote
|
99 |
+
return result, remaining
|
100 |
+
|
101 |
+
def marker_expr(remaining):
|
102 |
+
if remaining and remaining[0] == '(':
|
103 |
+
result, remaining = marker(remaining[1:].lstrip())
|
104 |
+
if remaining[0] != ')':
|
105 |
+
raise SyntaxError('unterminated parenthesis: %s' % remaining)
|
106 |
+
remaining = remaining[1:].lstrip()
|
107 |
+
else:
|
108 |
+
lhs, remaining = marker_var(remaining)
|
109 |
+
while remaining:
|
110 |
+
m = MARKER_OP.match(remaining)
|
111 |
+
if not m:
|
112 |
+
break
|
113 |
+
op = m.groups()[0]
|
114 |
+
remaining = remaining[m.end():]
|
115 |
+
rhs, remaining = marker_var(remaining)
|
116 |
+
lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
|
117 |
+
result = lhs
|
118 |
+
return result, remaining
|
119 |
+
|
120 |
+
def marker_and(remaining):
|
121 |
+
lhs, remaining = marker_expr(remaining)
|
122 |
+
while remaining:
|
123 |
+
m = AND.match(remaining)
|
124 |
+
if not m:
|
125 |
+
break
|
126 |
+
remaining = remaining[m.end():]
|
127 |
+
rhs, remaining = marker_expr(remaining)
|
128 |
+
lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
|
129 |
+
return lhs, remaining
|
130 |
+
|
131 |
+
def marker(remaining):
|
132 |
+
lhs, remaining = marker_and(remaining)
|
133 |
+
while remaining:
|
134 |
+
m = OR.match(remaining)
|
135 |
+
if not m:
|
136 |
+
break
|
137 |
+
remaining = remaining[m.end():]
|
138 |
+
rhs, remaining = marker_and(remaining)
|
139 |
+
lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
|
140 |
+
return lhs, remaining
|
141 |
+
|
142 |
+
return marker(marker_string)
|
143 |
+
|
144 |
+
|
145 |
+
def parse_requirement(req):
|
146 |
+
"""
|
147 |
+
Parse a requirement passed in as a string. Return a Container
|
148 |
+
whose attributes contain the various parts of the requirement.
|
149 |
+
"""
|
150 |
+
remaining = req.strip()
|
151 |
+
if not remaining or remaining.startswith('#'):
|
152 |
+
return None
|
153 |
+
m = IDENTIFIER.match(remaining)
|
154 |
+
if not m:
|
155 |
+
raise SyntaxError('name expected: %s' % remaining)
|
156 |
+
distname = m.groups()[0]
|
157 |
+
remaining = remaining[m.end():]
|
158 |
+
extras = mark_expr = versions = uri = None
|
159 |
+
if remaining and remaining[0] == '[':
|
160 |
+
i = remaining.find(']', 1)
|
161 |
+
if i < 0:
|
162 |
+
raise SyntaxError('unterminated extra: %s' % remaining)
|
163 |
+
s = remaining[1:i]
|
164 |
+
remaining = remaining[i + 1:].lstrip()
|
165 |
+
extras = []
|
166 |
+
while s:
|
167 |
+
m = IDENTIFIER.match(s)
|
168 |
+
if not m:
|
169 |
+
raise SyntaxError('malformed extra: %s' % s)
|
170 |
+
extras.append(m.groups()[0])
|
171 |
+
s = s[m.end():]
|
172 |
+
if not s:
|
173 |
+
break
|
174 |
+
if s[0] != ',':
|
175 |
+
raise SyntaxError('comma expected in extras: %s' % s)
|
176 |
+
s = s[1:].lstrip()
|
177 |
+
if not extras:
|
178 |
+
extras = None
|
179 |
+
if remaining:
|
180 |
+
if remaining[0] == '@':
|
181 |
+
# it's a URI
|
182 |
+
remaining = remaining[1:].lstrip()
|
183 |
+
m = NON_SPACE.match(remaining)
|
184 |
+
if not m:
|
185 |
+
raise SyntaxError('invalid URI: %s' % remaining)
|
186 |
+
uri = m.groups()[0]
|
187 |
+
t = urlparse(uri)
|
188 |
+
# there are issues with Python and URL parsing, so this test
|
189 |
+
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
190 |
+
# always parse invalid URLs correctly - it should raise
|
191 |
+
# exceptions for malformed URLs
|
192 |
+
if not (t.scheme and t.netloc):
|
193 |
+
raise SyntaxError('Invalid URL: %s' % uri)
|
194 |
+
remaining = remaining[m.end():].lstrip()
|
195 |
+
else:
|
196 |
+
|
197 |
+
def get_versions(ver_remaining):
|
198 |
+
"""
|
199 |
+
Return a list of operator, version tuples if any are
|
200 |
+
specified, else None.
|
201 |
+
"""
|
202 |
+
m = COMPARE_OP.match(ver_remaining)
|
203 |
+
versions = None
|
204 |
+
if m:
|
205 |
+
versions = []
|
206 |
+
while True:
|
207 |
+
op = m.groups()[0]
|
208 |
+
ver_remaining = ver_remaining[m.end():]
|
209 |
+
m = VERSION_IDENTIFIER.match(ver_remaining)
|
210 |
+
if not m:
|
211 |
+
raise SyntaxError('invalid version: %s' % ver_remaining)
|
212 |
+
v = m.groups()[0]
|
213 |
+
versions.append((op, v))
|
214 |
+
ver_remaining = ver_remaining[m.end():]
|
215 |
+
if not ver_remaining or ver_remaining[0] != ',':
|
216 |
+
break
|
217 |
+
ver_remaining = ver_remaining[1:].lstrip()
|
218 |
+
# Some packages have a trailing comma which would break things
|
219 |
+
# See issue #148
|
220 |
+
if not ver_remaining:
|
221 |
+
break
|
222 |
+
m = COMPARE_OP.match(ver_remaining)
|
223 |
+
if not m:
|
224 |
+
raise SyntaxError('invalid constraint: %s' % ver_remaining)
|
225 |
+
if not versions:
|
226 |
+
versions = None
|
227 |
+
return versions, ver_remaining
|
228 |
+
|
229 |
+
if remaining[0] != '(':
|
230 |
+
versions, remaining = get_versions(remaining)
|
231 |
+
else:
|
232 |
+
i = remaining.find(')', 1)
|
233 |
+
if i < 0:
|
234 |
+
raise SyntaxError('unterminated parenthesis: %s' % remaining)
|
235 |
+
s = remaining[1:i]
|
236 |
+
remaining = remaining[i + 1:].lstrip()
|
237 |
+
# As a special diversion from PEP 508, allow a version number
|
238 |
+
# a.b.c in parentheses as a synonym for ~= a.b.c (because this
|
239 |
+
# is allowed in earlier PEPs)
|
240 |
+
if COMPARE_OP.match(s):
|
241 |
+
versions, _ = get_versions(s)
|
242 |
+
else:
|
243 |
+
m = VERSION_IDENTIFIER.match(s)
|
244 |
+
if not m:
|
245 |
+
raise SyntaxError('invalid constraint: %s' % s)
|
246 |
+
v = m.groups()[0]
|
247 |
+
s = s[m.end():].lstrip()
|
248 |
+
if s:
|
249 |
+
raise SyntaxError('invalid constraint: %s' % s)
|
250 |
+
versions = [('~=', v)]
|
251 |
+
|
252 |
+
if remaining:
|
253 |
+
if remaining[0] != ';':
|
254 |
+
raise SyntaxError('invalid requirement: %s' % remaining)
|
255 |
+
remaining = remaining[1:].lstrip()
|
256 |
+
|
257 |
+
mark_expr, remaining = parse_marker(remaining)
|
258 |
+
|
259 |
+
if remaining and remaining[0] != '#':
|
260 |
+
raise SyntaxError('unexpected trailing data: %s' % remaining)
|
261 |
+
|
262 |
+
if not versions:
|
263 |
+
rs = distname
|
264 |
+
else:
|
265 |
+
rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
|
266 |
+
return Container(name=distname, extras=extras, constraints=versions,
|
267 |
+
marker=mark_expr, url=uri, requirement=rs)
|
268 |
+
|
269 |
+
|
270 |
+
def get_resources_dests(resources_root, rules):
|
271 |
+
"""Find destinations for resources files"""
|
272 |
+
|
273 |
+
def get_rel_path(root, path):
|
274 |
+
# normalizes and returns a lstripped-/-separated path
|
275 |
+
root = root.replace(os.path.sep, '/')
|
276 |
+
path = path.replace(os.path.sep, '/')
|
277 |
+
assert path.startswith(root)
|
278 |
+
return path[len(root):].lstrip('/')
|
279 |
+
|
280 |
+
destinations = {}
|
281 |
+
for base, suffix, dest in rules:
|
282 |
+
prefix = os.path.join(resources_root, base)
|
283 |
+
for abs_base in iglob(prefix):
|
284 |
+
abs_glob = os.path.join(abs_base, suffix)
|
285 |
+
for abs_path in iglob(abs_glob):
|
286 |
+
resource_file = get_rel_path(resources_root, abs_path)
|
287 |
+
if dest is None: # remove the entry if it was here
|
288 |
+
destinations.pop(resource_file, None)
|
289 |
+
else:
|
290 |
+
rel_path = get_rel_path(abs_base, abs_path)
|
291 |
+
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
|
292 |
+
destinations[resource_file] = rel_dest + '/' + rel_path
|
293 |
+
return destinations
|
294 |
+
|
295 |
+
|
296 |
+
def in_venv():
|
297 |
+
if hasattr(sys, 'real_prefix'):
|
298 |
+
# virtualenv venvs
|
299 |
+
result = True
|
300 |
+
else:
|
301 |
+
# PEP 405 venvs
|
302 |
+
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
|
303 |
+
return result
|
304 |
+
|
305 |
+
|
306 |
+
def get_executable():
|
307 |
+
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
|
308 |
+
# changes to the stub launcher mean that sys.executable always points
|
309 |
+
# to the stub on OS X
|
310 |
+
# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
|
311 |
+
# in os.environ):
|
312 |
+
# result = os.environ['__PYVENV_LAUNCHER__']
|
313 |
+
# else:
|
314 |
+
# result = sys.executable
|
315 |
+
# return result
|
316 |
+
# Avoid normcasing: see issue #143
|
317 |
+
# result = os.path.normcase(sys.executable)
|
318 |
+
result = sys.executable
|
319 |
+
if not isinstance(result, text_type):
|
320 |
+
result = fsdecode(result)
|
321 |
+
return result
|
322 |
+
|
323 |
+
|
324 |
+
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
|
325 |
+
p = prompt
|
326 |
+
while True:
|
327 |
+
s = raw_input(p)
|
328 |
+
p = prompt
|
329 |
+
if not s and default:
|
330 |
+
s = default
|
331 |
+
if s:
|
332 |
+
c = s[0].lower()
|
333 |
+
if c in allowed_chars:
|
334 |
+
break
|
335 |
+
if error_prompt:
|
336 |
+
p = '%c: %s\n%s' % (c, error_prompt, prompt)
|
337 |
+
return c
|
338 |
+
|
339 |
+
|
340 |
+
def extract_by_key(d, keys):
|
341 |
+
if isinstance(keys, string_types):
|
342 |
+
keys = keys.split()
|
343 |
+
result = {}
|
344 |
+
for key in keys:
|
345 |
+
if key in d:
|
346 |
+
result[key] = d[key]
|
347 |
+
return result
|
348 |
+
|
349 |
+
def read_exports(stream):
|
350 |
+
if sys.version_info[0] >= 3:
|
351 |
+
# needs to be a text stream
|
352 |
+
stream = codecs.getreader('utf-8')(stream)
|
353 |
+
# Try to load as JSON, falling back on legacy format
|
354 |
+
data = stream.read()
|
355 |
+
stream = StringIO(data)
|
356 |
+
try:
|
357 |
+
jdata = json.load(stream)
|
358 |
+
result = jdata['extensions']['python.exports']['exports']
|
359 |
+
for group, entries in result.items():
|
360 |
+
for k, v in entries.items():
|
361 |
+
s = '%s = %s' % (k, v)
|
362 |
+
entry = get_export_entry(s)
|
363 |
+
assert entry is not None
|
364 |
+
entries[k] = entry
|
365 |
+
return result
|
366 |
+
except Exception:
|
367 |
+
stream.seek(0, 0)
|
368 |
+
|
369 |
+
def read_stream(cp, stream):
|
370 |
+
if hasattr(cp, 'read_file'):
|
371 |
+
cp.read_file(stream)
|
372 |
+
else:
|
373 |
+
cp.readfp(stream)
|
374 |
+
|
375 |
+
cp = configparser.ConfigParser()
|
376 |
+
try:
|
377 |
+
read_stream(cp, stream)
|
378 |
+
except configparser.MissingSectionHeaderError:
|
379 |
+
stream.close()
|
380 |
+
data = textwrap.dedent(data)
|
381 |
+
stream = StringIO(data)
|
382 |
+
read_stream(cp, stream)
|
383 |
+
|
384 |
+
result = {}
|
385 |
+
for key in cp.sections():
|
386 |
+
result[key] = entries = {}
|
387 |
+
for name, value in cp.items(key):
|
388 |
+
s = '%s = %s' % (name, value)
|
389 |
+
entry = get_export_entry(s)
|
390 |
+
assert entry is not None
|
391 |
+
#entry.dist = self
|
392 |
+
entries[name] = entry
|
393 |
+
return result
|
394 |
+
|
395 |
+
|
396 |
+
def write_exports(exports, stream):
|
397 |
+
if sys.version_info[0] >= 3:
|
398 |
+
# needs to be a text stream
|
399 |
+
stream = codecs.getwriter('utf-8')(stream)
|
400 |
+
cp = configparser.ConfigParser()
|
401 |
+
for k, v in exports.items():
|
402 |
+
# TODO check k, v for valid values
|
403 |
+
cp.add_section(k)
|
404 |
+
for entry in v.values():
|
405 |
+
if entry.suffix is None:
|
406 |
+
s = entry.prefix
|
407 |
+
else:
|
408 |
+
s = '%s:%s' % (entry.prefix, entry.suffix)
|
409 |
+
if entry.flags:
|
410 |
+
s = '%s [%s]' % (s, ', '.join(entry.flags))
|
411 |
+
cp.set(k, entry.name, s)
|
412 |
+
cp.write(stream)
|
413 |
+
|
414 |
+
|
415 |
+
@contextlib.contextmanager
|
416 |
+
def tempdir():
|
417 |
+
td = tempfile.mkdtemp()
|
418 |
+
try:
|
419 |
+
yield td
|
420 |
+
finally:
|
421 |
+
shutil.rmtree(td)
|
422 |
+
|
423 |
+
@contextlib.contextmanager
|
424 |
+
def chdir(d):
|
425 |
+
cwd = os.getcwd()
|
426 |
+
try:
|
427 |
+
os.chdir(d)
|
428 |
+
yield
|
429 |
+
finally:
|
430 |
+
os.chdir(cwd)
|
431 |
+
|
432 |
+
|
433 |
+
@contextlib.contextmanager
|
434 |
+
def socket_timeout(seconds=15):
|
435 |
+
cto = socket.getdefaulttimeout()
|
436 |
+
try:
|
437 |
+
socket.setdefaulttimeout(seconds)
|
438 |
+
yield
|
439 |
+
finally:
|
440 |
+
socket.setdefaulttimeout(cto)
|
441 |
+
|
442 |
+
|
443 |
+
class cached_property(object):
|
444 |
+
def __init__(self, func):
|
445 |
+
self.func = func
|
446 |
+
#for attr in ('__name__', '__module__', '__doc__'):
|
447 |
+
# setattr(self, attr, getattr(func, attr, None))
|
448 |
+
|
449 |
+
def __get__(self, obj, cls=None):
|
450 |
+
if obj is None:
|
451 |
+
return self
|
452 |
+
value = self.func(obj)
|
453 |
+
object.__setattr__(obj, self.func.__name__, value)
|
454 |
+
#obj.__dict__[self.func.__name__] = value = self.func(obj)
|
455 |
+
return value
|
456 |
+
|
457 |
+
def convert_path(pathname):
|
458 |
+
"""Return 'pathname' as a name that will work on the native filesystem.
|
459 |
+
|
460 |
+
The path is split on '/' and put back together again using the current
|
461 |
+
directory separator. Needed because filenames in the setup script are
|
462 |
+
always supplied in Unix style, and have to be converted to the local
|
463 |
+
convention before we can actually use them in the filesystem. Raises
|
464 |
+
ValueError on non-Unix-ish systems if 'pathname' either starts or
|
465 |
+
ends with a slash.
|
466 |
+
"""
|
467 |
+
if os.sep == '/':
|
468 |
+
return pathname
|
469 |
+
if not pathname:
|
470 |
+
return pathname
|
471 |
+
if pathname[0] == '/':
|
472 |
+
raise ValueError("path '%s' cannot be absolute" % pathname)
|
473 |
+
if pathname[-1] == '/':
|
474 |
+
raise ValueError("path '%s' cannot end with '/'" % pathname)
|
475 |
+
|
476 |
+
paths = pathname.split('/')
|
477 |
+
while os.curdir in paths:
|
478 |
+
paths.remove(os.curdir)
|
479 |
+
if not paths:
|
480 |
+
return os.curdir
|
481 |
+
return os.path.join(*paths)
|
482 |
+
|
483 |
+
|
484 |
+
class FileOperator(object):
|
485 |
+
def __init__(self, dry_run=False):
|
486 |
+
self.dry_run = dry_run
|
487 |
+
self.ensured = set()
|
488 |
+
self._init_record()
|
489 |
+
|
490 |
+
def _init_record(self):
|
491 |
+
self.record = False
|
492 |
+
self.files_written = set()
|
493 |
+
self.dirs_created = set()
|
494 |
+
|
495 |
+
def record_as_written(self, path):
|
496 |
+
if self.record:
|
497 |
+
self.files_written.add(path)
|
498 |
+
|
499 |
+
def newer(self, source, target):
|
500 |
+
"""Tell if the target is newer than the source.
|
501 |
+
|
502 |
+
Returns true if 'source' exists and is more recently modified than
|
503 |
+
'target', or if 'source' exists and 'target' doesn't.
|
504 |
+
|
505 |
+
Returns false if both exist and 'target' is the same age or younger
|
506 |
+
than 'source'. Raise PackagingFileError if 'source' does not exist.
|
507 |
+
|
508 |
+
Note that this test is not very accurate: files created in the same
|
509 |
+
second will have the same "age".
|
510 |
+
"""
|
511 |
+
if not os.path.exists(source):
|
512 |
+
raise DistlibException("file '%r' does not exist" %
|
513 |
+
os.path.abspath(source))
|
514 |
+
if not os.path.exists(target):
|
515 |
+
return True
|
516 |
+
|
517 |
+
return os.stat(source).st_mtime > os.stat(target).st_mtime
|
518 |
+
|
519 |
+
def copy_file(self, infile, outfile, check=True):
|
520 |
+
"""Copy a file respecting dry-run and force flags.
|
521 |
+
"""
|
522 |
+
self.ensure_dir(os.path.dirname(outfile))
|
523 |
+
logger.info('Copying %s to %s', infile, outfile)
|
524 |
+
if not self.dry_run:
|
525 |
+
msg = None
|
526 |
+
if check:
|
527 |
+
if os.path.islink(outfile):
|
528 |
+
msg = '%s is a symlink' % outfile
|
529 |
+
elif os.path.exists(outfile) and not os.path.isfile(outfile):
|
530 |
+
msg = '%s is a non-regular file' % outfile
|
531 |
+
if msg:
|
532 |
+
raise ValueError(msg + ' which would be overwritten')
|
533 |
+
shutil.copyfile(infile, outfile)
|
534 |
+
self.record_as_written(outfile)
|
535 |
+
|
536 |
+
def copy_stream(self, instream, outfile, encoding=None):
|
537 |
+
assert not os.path.isdir(outfile)
|
538 |
+
self.ensure_dir(os.path.dirname(outfile))
|
539 |
+
logger.info('Copying stream %s to %s', instream, outfile)
|
540 |
+
if not self.dry_run:
|
541 |
+
if encoding is None:
|
542 |
+
outstream = open(outfile, 'wb')
|
543 |
+
else:
|
544 |
+
outstream = codecs.open(outfile, 'w', encoding=encoding)
|
545 |
+
try:
|
546 |
+
shutil.copyfileobj(instream, outstream)
|
547 |
+
finally:
|
548 |
+
outstream.close()
|
549 |
+
self.record_as_written(outfile)
|
550 |
+
|
551 |
+
def write_binary_file(self, path, data):
|
552 |
+
self.ensure_dir(os.path.dirname(path))
|
553 |
+
if not self.dry_run:
|
554 |
+
if os.path.exists(path):
|
555 |
+
os.remove(path)
|
556 |
+
with open(path, 'wb') as f:
|
557 |
+
f.write(data)
|
558 |
+
self.record_as_written(path)
|
559 |
+
|
560 |
+
def write_text_file(self, path, data, encoding):
|
561 |
+
self.write_binary_file(path, data.encode(encoding))
|
562 |
+
|
563 |
+
def set_mode(self, bits, mask, files):
|
564 |
+
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
|
565 |
+
# Set the executable bits (owner, group, and world) on
|
566 |
+
# all the files specified.
|
567 |
+
for f in files:
|
568 |
+
if self.dry_run:
|
569 |
+
logger.info("changing mode of %s", f)
|
570 |
+
else:
|
571 |
+
mode = (os.stat(f).st_mode | bits) & mask
|
572 |
+
logger.info("changing mode of %s to %o", f, mode)
|
573 |
+
os.chmod(f, mode)
|
574 |
+
|
575 |
+
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
|
576 |
+
|
577 |
+
def ensure_dir(self, path):
|
578 |
+
path = os.path.abspath(path)
|
579 |
+
if path not in self.ensured and not os.path.exists(path):
|
580 |
+
self.ensured.add(path)
|
581 |
+
d, f = os.path.split(path)
|
582 |
+
self.ensure_dir(d)
|
583 |
+
logger.info('Creating %s' % path)
|
584 |
+
if not self.dry_run:
|
585 |
+
os.mkdir(path)
|
586 |
+
if self.record:
|
587 |
+
self.dirs_created.add(path)
|
588 |
+
|
589 |
+
def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
|
590 |
+
dpath = cache_from_source(path, not optimize)
|
591 |
+
logger.info('Byte-compiling %s to %s', path, dpath)
|
592 |
+
if not self.dry_run:
|
593 |
+
if force or self.newer(path, dpath):
|
594 |
+
if not prefix:
|
595 |
+
diagpath = None
|
596 |
+
else:
|
597 |
+
assert path.startswith(prefix)
|
598 |
+
diagpath = path[len(prefix):]
|
599 |
+
compile_kwargs = {}
|
600 |
+
if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
|
601 |
+
compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
|
602 |
+
py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
|
603 |
+
self.record_as_written(dpath)
|
604 |
+
return dpath
|
605 |
+
|
606 |
+
def ensure_removed(self, path):
|
607 |
+
if os.path.exists(path):
|
608 |
+
if os.path.isdir(path) and not os.path.islink(path):
|
609 |
+
logger.debug('Removing directory tree at %s', path)
|
610 |
+
if not self.dry_run:
|
611 |
+
shutil.rmtree(path)
|
612 |
+
if self.record:
|
613 |
+
if path in self.dirs_created:
|
614 |
+
self.dirs_created.remove(path)
|
615 |
+
else:
|
616 |
+
if os.path.islink(path):
|
617 |
+
s = 'link'
|
618 |
+
else:
|
619 |
+
s = 'file'
|
620 |
+
logger.debug('Removing %s %s', s, path)
|
621 |
+
if not self.dry_run:
|
622 |
+
os.remove(path)
|
623 |
+
if self.record:
|
624 |
+
if path in self.files_written:
|
625 |
+
self.files_written.remove(path)
|
626 |
+
|
627 |
+
def is_writable(self, path):
|
628 |
+
result = False
|
629 |
+
while not result:
|
630 |
+
if os.path.exists(path):
|
631 |
+
result = os.access(path, os.W_OK)
|
632 |
+
break
|
633 |
+
parent = os.path.dirname(path)
|
634 |
+
if parent == path:
|
635 |
+
break
|
636 |
+
path = parent
|
637 |
+
return result
|
638 |
+
|
639 |
+
def commit(self):
|
640 |
+
"""
|
641 |
+
Commit recorded changes, turn off recording, return
|
642 |
+
changes.
|
643 |
+
"""
|
644 |
+
assert self.record
|
645 |
+
result = self.files_written, self.dirs_created
|
646 |
+
self._init_record()
|
647 |
+
return result
|
648 |
+
|
649 |
+
def rollback(self):
|
650 |
+
if not self.dry_run:
|
651 |
+
for f in list(self.files_written):
|
652 |
+
if os.path.exists(f):
|
653 |
+
os.remove(f)
|
654 |
+
# dirs should all be empty now, except perhaps for
|
655 |
+
# __pycache__ subdirs
|
656 |
+
# reverse so that subdirs appear before their parents
|
657 |
+
dirs = sorted(self.dirs_created, reverse=True)
|
658 |
+
for d in dirs:
|
659 |
+
flist = os.listdir(d)
|
660 |
+
if flist:
|
661 |
+
assert flist == ['__pycache__']
|
662 |
+
sd = os.path.join(d, flist[0])
|
663 |
+
os.rmdir(sd)
|
664 |
+
os.rmdir(d) # should fail if non-empty
|
665 |
+
self._init_record()
|
666 |
+
|
667 |
+
def resolve(module_name, dotted_path):
|
668 |
+
if module_name in sys.modules:
|
669 |
+
mod = sys.modules[module_name]
|
670 |
+
else:
|
671 |
+
mod = __import__(module_name)
|
672 |
+
if dotted_path is None:
|
673 |
+
result = mod
|
674 |
+
else:
|
675 |
+
parts = dotted_path.split('.')
|
676 |
+
result = getattr(mod, parts.pop(0))
|
677 |
+
for p in parts:
|
678 |
+
result = getattr(result, p)
|
679 |
+
return result
|
680 |
+
|
681 |
+
|
682 |
+
class ExportEntry(object):
|
683 |
+
def __init__(self, name, prefix, suffix, flags):
|
684 |
+
self.name = name
|
685 |
+
self.prefix = prefix
|
686 |
+
self.suffix = suffix
|
687 |
+
self.flags = flags
|
688 |
+
|
689 |
+
@cached_property
|
690 |
+
def value(self):
|
691 |
+
return resolve(self.prefix, self.suffix)
|
692 |
+
|
693 |
+
def __repr__(self): # pragma: no cover
|
694 |
+
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
|
695 |
+
self.suffix, self.flags)
|
696 |
+
|
697 |
+
def __eq__(self, other):
|
698 |
+
if not isinstance(other, ExportEntry):
|
699 |
+
result = False
|
700 |
+
else:
|
701 |
+
result = (self.name == other.name and
|
702 |
+
self.prefix == other.prefix and
|
703 |
+
self.suffix == other.suffix and
|
704 |
+
self.flags == other.flags)
|
705 |
+
return result
|
706 |
+
|
707 |
+
__hash__ = object.__hash__
|
708 |
+
|
709 |
+
|
710 |
+
ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)
|
711 |
+
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
|
712 |
+
\s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
|
713 |
+
''', re.VERBOSE)
|
714 |
+
|
715 |
+
def get_export_entry(specification):
|
716 |
+
m = ENTRY_RE.search(specification)
|
717 |
+
if not m:
|
718 |
+
result = None
|
719 |
+
if '[' in specification or ']' in specification:
|
720 |
+
raise DistlibException("Invalid specification "
|
721 |
+
"'%s'" % specification)
|
722 |
+
else:
|
723 |
+
d = m.groupdict()
|
724 |
+
name = d['name']
|
725 |
+
path = d['callable']
|
726 |
+
colons = path.count(':')
|
727 |
+
if colons == 0:
|
728 |
+
prefix, suffix = path, None
|
729 |
+
else:
|
730 |
+
if colons != 1:
|
731 |
+
raise DistlibException("Invalid specification "
|
732 |
+
"'%s'" % specification)
|
733 |
+
prefix, suffix = path.split(':')
|
734 |
+
flags = d['flags']
|
735 |
+
if flags is None:
|
736 |
+
if '[' in specification or ']' in specification:
|
737 |
+
raise DistlibException("Invalid specification "
|
738 |
+
"'%s'" % specification)
|
739 |
+
flags = []
|
740 |
+
else:
|
741 |
+
flags = [f.strip() for f in flags.split(',')]
|
742 |
+
result = ExportEntry(name, prefix, suffix, flags)
|
743 |
+
return result
|
744 |
+
|
745 |
+
|
746 |
+
def get_cache_base(suffix=None):
|
747 |
+
"""
|
748 |
+
Return the default base location for distlib caches. If the directory does
|
749 |
+
not exist, it is created. Use the suffix provided for the base directory,
|
750 |
+
and default to '.distlib' if it isn't provided.
|
751 |
+
|
752 |
+
On Windows, if LOCALAPPDATA is defined in the environment, then it is
|
753 |
+
assumed to be a directory, and will be the parent directory of the result.
|
754 |
+
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
|
755 |
+
directory - using os.expanduser('~') - will be the parent directory of
|
756 |
+
the result.
|
757 |
+
|
758 |
+
The result is just the directory '.distlib' in the parent directory as
|
759 |
+
determined above, or with the name specified with ``suffix``.
|
760 |
+
"""
|
761 |
+
if suffix is None:
|
762 |
+
suffix = '.distlib'
|
763 |
+
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
|
764 |
+
result = os.path.expandvars('$localappdata')
|
765 |
+
else:
|
766 |
+
# Assume posix, or old Windows
|
767 |
+
result = os.path.expanduser('~')
|
768 |
+
# we use 'isdir' instead of 'exists', because we want to
|
769 |
+
# fail if there's a file with that name
|
770 |
+
if os.path.isdir(result):
|
771 |
+
usable = os.access(result, os.W_OK)
|
772 |
+
if not usable:
|
773 |
+
logger.warning('Directory exists but is not writable: %s', result)
|
774 |
+
else:
|
775 |
+
try:
|
776 |
+
os.makedirs(result)
|
777 |
+
usable = True
|
778 |
+
except OSError:
|
779 |
+
logger.warning('Unable to create %s', result, exc_info=True)
|
780 |
+
usable = False
|
781 |
+
if not usable:
|
782 |
+
result = tempfile.mkdtemp()
|
783 |
+
logger.warning('Default location unusable, using %s', result)
|
784 |
+
return os.path.join(result, suffix)
|
785 |
+
|
786 |
+
|
787 |
+
def path_to_cache_dir(path):
|
788 |
+
"""
|
789 |
+
Convert an absolute path to a directory name for use in a cache.
|
790 |
+
|
791 |
+
The algorithm used is:
|
792 |
+
|
793 |
+
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
|
794 |
+
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
|
795 |
+
#. ``'.cache'`` is appended.
|
796 |
+
"""
|
797 |
+
d, p = os.path.splitdrive(os.path.abspath(path))
|
798 |
+
if d:
|
799 |
+
d = d.replace(':', '---')
|
800 |
+
p = p.replace(os.sep, '--')
|
801 |
+
return d + p + '.cache'
|
802 |
+
|
803 |
+
|
804 |
+
def ensure_slash(s):
|
805 |
+
if not s.endswith('/'):
|
806 |
+
return s + '/'
|
807 |
+
return s
|
808 |
+
|
809 |
+
|
810 |
+
def parse_credentials(netloc):
|
811 |
+
username = password = None
|
812 |
+
if '@' in netloc:
|
813 |
+
prefix, netloc = netloc.rsplit('@', 1)
|
814 |
+
if ':' not in prefix:
|
815 |
+
username = prefix
|
816 |
+
else:
|
817 |
+
username, password = prefix.split(':', 1)
|
818 |
+
if username:
|
819 |
+
username = unquote(username)
|
820 |
+
if password:
|
821 |
+
password = unquote(password)
|
822 |
+
return username, password, netloc
|
823 |
+
|
824 |
+
|
825 |
+
def get_process_umask():
|
826 |
+
result = os.umask(0o22)
|
827 |
+
os.umask(result)
|
828 |
+
return result
|
829 |
+
|
830 |
+
def is_string_sequence(seq):
|
831 |
+
result = True
|
832 |
+
i = None
|
833 |
+
for i, s in enumerate(seq):
|
834 |
+
if not isinstance(s, string_types):
|
835 |
+
result = False
|
836 |
+
break
|
837 |
+
assert i is not None
|
838 |
+
return result
|
839 |
+
|
840 |
+
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
|
841 |
+
'([a-z0-9_.+-]+)', re.I)
|
842 |
+
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
|
843 |
+
|
844 |
+
|
845 |
+
def split_filename(filename, project_name=None):
|
846 |
+
"""
|
847 |
+
Extract name, version, python version from a filename (no extension)
|
848 |
+
|
849 |
+
Return name, version, pyver or None
|
850 |
+
"""
|
851 |
+
result = None
|
852 |
+
pyver = None
|
853 |
+
filename = unquote(filename).replace(' ', '-')
|
854 |
+
m = PYTHON_VERSION.search(filename)
|
855 |
+
if m:
|
856 |
+
pyver = m.group(1)
|
857 |
+
filename = filename[:m.start()]
|
858 |
+
if project_name and len(filename) > len(project_name) + 1:
|
859 |
+
m = re.match(re.escape(project_name) + r'\b', filename)
|
860 |
+
if m:
|
861 |
+
n = m.end()
|
862 |
+
result = filename[:n], filename[n + 1:], pyver
|
863 |
+
if result is None:
|
864 |
+
m = PROJECT_NAME_AND_VERSION.match(filename)
|
865 |
+
if m:
|
866 |
+
result = m.group(1), m.group(3), pyver
|
867 |
+
return result
|
868 |
+
|
869 |
+
# Allow spaces in name because of legacy dists like "Twisted Core"
|
870 |
+
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
|
871 |
+
r'\(\s*(?P<ver>[^\s)]+)\)$')
|
872 |
+
|
873 |
+
def parse_name_and_version(p):
|
874 |
+
"""
|
875 |
+
A utility method used to get name and version from a string.
|
876 |
+
|
877 |
+
From e.g. a Provides-Dist value.
|
878 |
+
|
879 |
+
:param p: A value in a form 'foo (1.0)'
|
880 |
+
:return: The name and version as a tuple.
|
881 |
+
"""
|
882 |
+
m = NAME_VERSION_RE.match(p)
|
883 |
+
if not m:
|
884 |
+
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
|
885 |
+
d = m.groupdict()
|
886 |
+
return d['name'].strip().lower(), d['ver']
|
887 |
+
|
888 |
+
def get_extras(requested, available):
|
889 |
+
result = set()
|
890 |
+
requested = set(requested or [])
|
891 |
+
available = set(available or [])
|
892 |
+
if '*' in requested:
|
893 |
+
requested.remove('*')
|
894 |
+
result |= available
|
895 |
+
for r in requested:
|
896 |
+
if r == '-':
|
897 |
+
result.add(r)
|
898 |
+
elif r.startswith('-'):
|
899 |
+
unwanted = r[1:]
|
900 |
+
if unwanted not in available:
|
901 |
+
logger.warning('undeclared extra: %s' % unwanted)
|
902 |
+
if unwanted in result:
|
903 |
+
result.remove(unwanted)
|
904 |
+
else:
|
905 |
+
if r not in available:
|
906 |
+
logger.warning('undeclared extra: %s' % r)
|
907 |
+
result.add(r)
|
908 |
+
return result
|
909 |
+
#
|
910 |
+
# Extended metadata functionality
|
911 |
+
#
|
912 |
+
|
913 |
+
def _get_external_data(url):
|
914 |
+
result = {}
|
915 |
+
try:
|
916 |
+
# urlopen might fail if it runs into redirections,
|
917 |
+
# because of Python issue #13696. Fixed in locators
|
918 |
+
# using a custom redirect handler.
|
919 |
+
resp = urlopen(url)
|
920 |
+
headers = resp.info()
|
921 |
+
ct = headers.get('Content-Type')
|
922 |
+
if not ct.startswith('application/json'):
|
923 |
+
logger.debug('Unexpected response for JSON request: %s', ct)
|
924 |
+
else:
|
925 |
+
reader = codecs.getreader('utf-8')(resp)
|
926 |
+
#data = reader.read().decode('utf-8')
|
927 |
+
#result = json.loads(data)
|
928 |
+
result = json.load(reader)
|
929 |
+
except Exception as e:
|
930 |
+
logger.exception('Failed to get external data for %s: %s', url, e)
|
931 |
+
return result
|
932 |
+
|
933 |
+
_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
|
934 |
+
|
935 |
+
def get_project_data(name):
|
936 |
+
url = '%s/%s/project.json' % (name[0].upper(), name)
|
937 |
+
url = urljoin(_external_data_base_url, url)
|
938 |
+
result = _get_external_data(url)
|
939 |
+
return result
|
940 |
+
|
941 |
+
def get_package_data(name, version):
|
942 |
+
url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
|
943 |
+
url = urljoin(_external_data_base_url, url)
|
944 |
+
return _get_external_data(url)
|
945 |
+
|
946 |
+
|
947 |
+
class Cache(object):
|
948 |
+
"""
|
949 |
+
A class implementing a cache for resources that need to live in the file system
|
950 |
+
e.g. shared libraries. This class was moved from resources to here because it
|
951 |
+
could be used by other modules, e.g. the wheel module.
|
952 |
+
"""
|
953 |
+
|
954 |
+
def __init__(self, base):
|
955 |
+
"""
|
956 |
+
Initialise an instance.
|
957 |
+
|
958 |
+
:param base: The base directory where the cache should be located.
|
959 |
+
"""
|
960 |
+
# we use 'isdir' instead of 'exists', because we want to
|
961 |
+
# fail if there's a file with that name
|
962 |
+
if not os.path.isdir(base): # pragma: no cover
|
963 |
+
os.makedirs(base)
|
964 |
+
if (os.stat(base).st_mode & 0o77) != 0:
|
965 |
+
logger.warning('Directory \'%s\' is not private', base)
|
966 |
+
self.base = os.path.abspath(os.path.normpath(base))
|
967 |
+
|
968 |
+
def prefix_to_dir(self, prefix):
|
969 |
+
"""
|
970 |
+
Converts a resource prefix to a directory name in the cache.
|
971 |
+
"""
|
972 |
+
return path_to_cache_dir(prefix)
|
973 |
+
|
974 |
+
def clear(self):
|
975 |
+
"""
|
976 |
+
Clear the cache.
|
977 |
+
"""
|
978 |
+
not_removed = []
|
979 |
+
for fn in os.listdir(self.base):
|
980 |
+
fn = os.path.join(self.base, fn)
|
981 |
+
try:
|
982 |
+
if os.path.islink(fn) or os.path.isfile(fn):
|
983 |
+
os.remove(fn)
|
984 |
+
elif os.path.isdir(fn):
|
985 |
+
shutil.rmtree(fn)
|
986 |
+
except Exception:
|
987 |
+
not_removed.append(fn)
|
988 |
+
return not_removed
|
989 |
+
|
990 |
+
|
991 |
+
class EventMixin(object):
|
992 |
+
"""
|
993 |
+
A very simple publish/subscribe system.
|
994 |
+
"""
|
995 |
+
def __init__(self):
|
996 |
+
self._subscribers = {}
|
997 |
+
|
998 |
+
def add(self, event, subscriber, append=True):
|
999 |
+
"""
|
1000 |
+
Add a subscriber for an event.
|
1001 |
+
|
1002 |
+
:param event: The name of an event.
|
1003 |
+
:param subscriber: The subscriber to be added (and called when the
|
1004 |
+
event is published).
|
1005 |
+
:param append: Whether to append or prepend the subscriber to an
|
1006 |
+
existing subscriber list for the event.
|
1007 |
+
"""
|
1008 |
+
subs = self._subscribers
|
1009 |
+
if event not in subs:
|
1010 |
+
subs[event] = deque([subscriber])
|
1011 |
+
else:
|
1012 |
+
sq = subs[event]
|
1013 |
+
if append:
|
1014 |
+
sq.append(subscriber)
|
1015 |
+
else:
|
1016 |
+
sq.appendleft(subscriber)
|
1017 |
+
|
1018 |
+
def remove(self, event, subscriber):
|
1019 |
+
"""
|
1020 |
+
Remove a subscriber for an event.
|
1021 |
+
|
1022 |
+
:param event: The name of an event.
|
1023 |
+
:param subscriber: The subscriber to be removed.
|
1024 |
+
"""
|
1025 |
+
subs = self._subscribers
|
1026 |
+
if event not in subs:
|
1027 |
+
raise ValueError('No subscribers: %r' % event)
|
1028 |
+
subs[event].remove(subscriber)
|
1029 |
+
|
1030 |
+
def get_subscribers(self, event):
|
1031 |
+
"""
|
1032 |
+
Return an iterator for the subscribers for an event.
|
1033 |
+
:param event: The event to return subscribers for.
|
1034 |
+
"""
|
1035 |
+
return iter(self._subscribers.get(event, ()))
|
1036 |
+
|
1037 |
+
def publish(self, event, *args, **kwargs):
|
1038 |
+
"""
|
1039 |
+
Publish a event and return a list of values returned by its
|
1040 |
+
subscribers.
|
1041 |
+
|
1042 |
+
:param event: The event to publish.
|
1043 |
+
:param args: The positional arguments to pass to the event's
|
1044 |
+
subscribers.
|
1045 |
+
:param kwargs: The keyword arguments to pass to the event's
|
1046 |
+
subscribers.
|
1047 |
+
"""
|
1048 |
+
result = []
|
1049 |
+
for subscriber in self.get_subscribers(event):
|
1050 |
+
try:
|
1051 |
+
value = subscriber(event, *args, **kwargs)
|
1052 |
+
except Exception:
|
1053 |
+
logger.exception('Exception during event publication')
|
1054 |
+
value = None
|
1055 |
+
result.append(value)
|
1056 |
+
logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
|
1057 |
+
event, args, kwargs, result)
|
1058 |
+
return result
|
1059 |
+
|
1060 |
+
#
|
1061 |
+
# Simple sequencing
|
1062 |
+
#
|
1063 |
+
class Sequencer(object):
|
1064 |
+
def __init__(self):
|
1065 |
+
self._preds = {}
|
1066 |
+
self._succs = {}
|
1067 |
+
self._nodes = set() # nodes with no preds/succs
|
1068 |
+
|
1069 |
+
def add_node(self, node):
|
1070 |
+
self._nodes.add(node)
|
1071 |
+
|
1072 |
+
def remove_node(self, node, edges=False):
|
1073 |
+
if node in self._nodes:
|
1074 |
+
self._nodes.remove(node)
|
1075 |
+
if edges:
|
1076 |
+
for p in set(self._preds.get(node, ())):
|
1077 |
+
self.remove(p, node)
|
1078 |
+
for s in set(self._succs.get(node, ())):
|
1079 |
+
self.remove(node, s)
|
1080 |
+
# Remove empties
|
1081 |
+
for k, v in list(self._preds.items()):
|
1082 |
+
if not v:
|
1083 |
+
del self._preds[k]
|
1084 |
+
for k, v in list(self._succs.items()):
|
1085 |
+
if not v:
|
1086 |
+
del self._succs[k]
|
1087 |
+
|
1088 |
+
def add(self, pred, succ):
|
1089 |
+
assert pred != succ
|
1090 |
+
self._preds.setdefault(succ, set()).add(pred)
|
1091 |
+
self._succs.setdefault(pred, set()).add(succ)
|
1092 |
+
|
1093 |
+
def remove(self, pred, succ):
|
1094 |
+
assert pred != succ
|
1095 |
+
try:
|
1096 |
+
preds = self._preds[succ]
|
1097 |
+
succs = self._succs[pred]
|
1098 |
+
except KeyError: # pragma: no cover
|
1099 |
+
raise ValueError('%r not a successor of anything' % succ)
|
1100 |
+
try:
|
1101 |
+
preds.remove(pred)
|
1102 |
+
succs.remove(succ)
|
1103 |
+
except KeyError: # pragma: no cover
|
1104 |
+
raise ValueError('%r not a successor of %r' % (succ, pred))
|
1105 |
+
|
1106 |
+
def is_step(self, step):
|
1107 |
+
return (step in self._preds or step in self._succs or
|
1108 |
+
step in self._nodes)
|
1109 |
+
|
1110 |
+
def get_steps(self, final):
|
1111 |
+
if not self.is_step(final):
|
1112 |
+
raise ValueError('Unknown: %r' % final)
|
1113 |
+
result = []
|
1114 |
+
todo = []
|
1115 |
+
seen = set()
|
1116 |
+
todo.append(final)
|
1117 |
+
while todo:
|
1118 |
+
step = todo.pop(0)
|
1119 |
+
if step in seen:
|
1120 |
+
# if a step was already seen,
|
1121 |
+
# move it to the end (so it will appear earlier
|
1122 |
+
# when reversed on return) ... but not for the
|
1123 |
+
# final step, as that would be confusing for
|
1124 |
+
# users
|
1125 |
+
if step != final:
|
1126 |
+
result.remove(step)
|
1127 |
+
result.append(step)
|
1128 |
+
else:
|
1129 |
+
seen.add(step)
|
1130 |
+
result.append(step)
|
1131 |
+
preds = self._preds.get(step, ())
|
1132 |
+
todo.extend(preds)
|
1133 |
+
return reversed(result)
|
1134 |
+
|
1135 |
+
@property
|
1136 |
+
def strong_connections(self):
|
1137 |
+
#http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
|
1138 |
+
index_counter = [0]
|
1139 |
+
stack = []
|
1140 |
+
lowlinks = {}
|
1141 |
+
index = {}
|
1142 |
+
result = []
|
1143 |
+
|
1144 |
+
graph = self._succs
|
1145 |
+
|
1146 |
+
def strongconnect(node):
|
1147 |
+
# set the depth index for this node to the smallest unused index
|
1148 |
+
index[node] = index_counter[0]
|
1149 |
+
lowlinks[node] = index_counter[0]
|
1150 |
+
index_counter[0] += 1
|
1151 |
+
stack.append(node)
|
1152 |
+
|
1153 |
+
# Consider successors
|
1154 |
+
try:
|
1155 |
+
successors = graph[node]
|
1156 |
+
except Exception:
|
1157 |
+
successors = []
|
1158 |
+
for successor in successors:
|
1159 |
+
if successor not in lowlinks:
|
1160 |
+
# Successor has not yet been visited
|
1161 |
+
strongconnect(successor)
|
1162 |
+
lowlinks[node] = min(lowlinks[node],lowlinks[successor])
|
1163 |
+
elif successor in stack:
|
1164 |
+
# the successor is in the stack and hence in the current
|
1165 |
+
# strongly connected component (SCC)
|
1166 |
+
lowlinks[node] = min(lowlinks[node],index[successor])
|
1167 |
+
|
1168 |
+
# If `node` is a root node, pop the stack and generate an SCC
|
1169 |
+
if lowlinks[node] == index[node]:
|
1170 |
+
connected_component = []
|
1171 |
+
|
1172 |
+
while True:
|
1173 |
+
successor = stack.pop()
|
1174 |
+
connected_component.append(successor)
|
1175 |
+
if successor == node: break
|
1176 |
+
component = tuple(connected_component)
|
1177 |
+
# storing the result
|
1178 |
+
result.append(component)
|
1179 |
+
|
1180 |
+
for node in graph:
|
1181 |
+
if node not in lowlinks:
|
1182 |
+
strongconnect(node)
|
1183 |
+
|
1184 |
+
return result
|
1185 |
+
|
1186 |
+
@property
|
1187 |
+
def dot(self):
|
1188 |
+
result = ['digraph G {']
|
1189 |
+
for succ in self._preds:
|
1190 |
+
preds = self._preds[succ]
|
1191 |
+
for pred in preds:
|
1192 |
+
result.append(' %s -> %s;' % (pred, succ))
|
1193 |
+
for node in self._nodes:
|
1194 |
+
result.append(' %s;' % node)
|
1195 |
+
result.append('}')
|
1196 |
+
return '\n'.join(result)
|
1197 |
+
|
1198 |
+
#
|
1199 |
+
# Unarchiving functionality for zip, tar, tgz, tbz, whl
|
1200 |
+
#
|
1201 |
+
|
1202 |
+
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
|
1203 |
+
'.tgz', '.tbz', '.whl')
|
1204 |
+
|
1205 |
+
def unarchive(archive_filename, dest_dir, format=None, check=True):
|
1206 |
+
|
1207 |
+
def check_path(path):
|
1208 |
+
if not isinstance(path, text_type):
|
1209 |
+
path = path.decode('utf-8')
|
1210 |
+
p = os.path.abspath(os.path.join(dest_dir, path))
|
1211 |
+
if not p.startswith(dest_dir) or p[plen] != os.sep:
|
1212 |
+
raise ValueError('path outside destination: %r' % p)
|
1213 |
+
|
1214 |
+
dest_dir = os.path.abspath(dest_dir)
|
1215 |
+
plen = len(dest_dir)
|
1216 |
+
archive = None
|
1217 |
+
if format is None:
|
1218 |
+
if archive_filename.endswith(('.zip', '.whl')):
|
1219 |
+
format = 'zip'
|
1220 |
+
elif archive_filename.endswith(('.tar.gz', '.tgz')):
|
1221 |
+
format = 'tgz'
|
1222 |
+
mode = 'r:gz'
|
1223 |
+
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
|
1224 |
+
format = 'tbz'
|
1225 |
+
mode = 'r:bz2'
|
1226 |
+
elif archive_filename.endswith('.tar'):
|
1227 |
+
format = 'tar'
|
1228 |
+
mode = 'r'
|
1229 |
+
else: # pragma: no cover
|
1230 |
+
raise ValueError('Unknown format for %r' % archive_filename)
|
1231 |
+
try:
|
1232 |
+
if format == 'zip':
|
1233 |
+
archive = ZipFile(archive_filename, 'r')
|
1234 |
+
if check:
|
1235 |
+
names = archive.namelist()
|
1236 |
+
for name in names:
|
1237 |
+
check_path(name)
|
1238 |
+
else:
|
1239 |
+
archive = tarfile.open(archive_filename, mode)
|
1240 |
+
if check:
|
1241 |
+
names = archive.getnames()
|
1242 |
+
for name in names:
|
1243 |
+
check_path(name)
|
1244 |
+
if format != 'zip' and sys.version_info[0] < 3:
|
1245 |
+
# See Python issue 17153. If the dest path contains Unicode,
|
1246 |
+
# tarfile extraction fails on Python 2.x if a member path name
|
1247 |
+
# contains non-ASCII characters - it leads to an implicit
|
1248 |
+
# bytes -> unicode conversion using ASCII to decode.
|
1249 |
+
for tarinfo in archive.getmembers():
|
1250 |
+
if not isinstance(tarinfo.name, text_type):
|
1251 |
+
tarinfo.name = tarinfo.name.decode('utf-8')
|
1252 |
+
archive.extractall(dest_dir)
|
1253 |
+
|
1254 |
+
finally:
|
1255 |
+
if archive:
|
1256 |
+
archive.close()
|
1257 |
+
|
1258 |
+
|
1259 |
+
def zip_dir(directory):
|
1260 |
+
"""zip a directory tree into a BytesIO object"""
|
1261 |
+
result = io.BytesIO()
|
1262 |
+
dlen = len(directory)
|
1263 |
+
with ZipFile(result, "w") as zf:
|
1264 |
+
for root, dirs, files in os.walk(directory):
|
1265 |
+
for name in files:
|
1266 |
+
full = os.path.join(root, name)
|
1267 |
+
rel = root[dlen:]
|
1268 |
+
dest = os.path.join(rel, name)
|
1269 |
+
zf.write(full, dest)
|
1270 |
+
return result
|
1271 |
+
|
1272 |
+
#
|
1273 |
+
# Simple progress bar
|
1274 |
+
#
|
1275 |
+
|
1276 |
+
UNITS = ('', 'K', 'M', 'G','T','P')
|
1277 |
+
|
1278 |
+
|
1279 |
+
class Progress(object):
|
1280 |
+
unknown = 'UNKNOWN'
|
1281 |
+
|
1282 |
+
def __init__(self, minval=0, maxval=100):
|
1283 |
+
assert maxval is None or maxval >= minval
|
1284 |
+
self.min = self.cur = minval
|
1285 |
+
self.max = maxval
|
1286 |
+
self.started = None
|
1287 |
+
self.elapsed = 0
|
1288 |
+
self.done = False
|
1289 |
+
|
1290 |
+
def update(self, curval):
|
1291 |
+
assert self.min <= curval
|
1292 |
+
assert self.max is None or curval <= self.max
|
1293 |
+
self.cur = curval
|
1294 |
+
now = time.time()
|
1295 |
+
if self.started is None:
|
1296 |
+
self.started = now
|
1297 |
+
else:
|
1298 |
+
self.elapsed = now - self.started
|
1299 |
+
|
1300 |
+
def increment(self, incr):
|
1301 |
+
assert incr >= 0
|
1302 |
+
self.update(self.cur + incr)
|
1303 |
+
|
1304 |
+
def start(self):
|
1305 |
+
self.update(self.min)
|
1306 |
+
return self
|
1307 |
+
|
1308 |
+
def stop(self):
|
1309 |
+
if self.max is not None:
|
1310 |
+
self.update(self.max)
|
1311 |
+
self.done = True
|
1312 |
+
|
1313 |
+
@property
|
1314 |
+
def maximum(self):
|
1315 |
+
return self.unknown if self.max is None else self.max
|
1316 |
+
|
1317 |
+
@property
|
1318 |
+
def percentage(self):
|
1319 |
+
if self.done:
|
1320 |
+
result = '100 %'
|
1321 |
+
elif self.max is None:
|
1322 |
+
result = ' ?? %'
|
1323 |
+
else:
|
1324 |
+
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
|
1325 |
+
result = '%3d %%' % v
|
1326 |
+
return result
|
1327 |
+
|
1328 |
+
def format_duration(self, duration):
|
1329 |
+
if (duration <= 0) and self.max is None or self.cur == self.min:
|
1330 |
+
result = '??:??:??'
|
1331 |
+
#elif duration < 1:
|
1332 |
+
# result = '--:--:--'
|
1333 |
+
else:
|
1334 |
+
result = time.strftime('%H:%M:%S', time.gmtime(duration))
|
1335 |
+
return result
|
1336 |
+
|
1337 |
+
@property
|
1338 |
+
def ETA(self):
|
1339 |
+
if self.done:
|
1340 |
+
prefix = 'Done'
|
1341 |
+
t = self.elapsed
|
1342 |
+
#import pdb; pdb.set_trace()
|
1343 |
+
else:
|
1344 |
+
prefix = 'ETA '
|
1345 |
+
if self.max is None:
|
1346 |
+
t = -1
|
1347 |
+
elif self.elapsed == 0 or (self.cur == self.min):
|
1348 |
+
t = 0
|
1349 |
+
else:
|
1350 |
+
#import pdb; pdb.set_trace()
|
1351 |
+
t = float(self.max - self.min)
|
1352 |
+
t /= self.cur - self.min
|
1353 |
+
t = (t - 1) * self.elapsed
|
1354 |
+
return '%s: %s' % (prefix, self.format_duration(t))
|
1355 |
+
|
1356 |
+
@property
|
1357 |
+
def speed(self):
|
1358 |
+
if self.elapsed == 0:
|
1359 |
+
result = 0.0
|
1360 |
+
else:
|
1361 |
+
result = (self.cur - self.min) / self.elapsed
|
1362 |
+
for unit in UNITS:
|
1363 |
+
if result < 1000:
|
1364 |
+
break
|
1365 |
+
result /= 1000.0
|
1366 |
+
return '%d %sB/s' % (result, unit)
|
1367 |
+
|
1368 |
+
#
|
1369 |
+
# Glob functionality
|
1370 |
+
#
|
1371 |
+
|
1372 |
+
RICH_GLOB = re.compile(r'\{([^}]*)\}')
|
1373 |
+
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
|
1374 |
+
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
|
1375 |
+
|
1376 |
+
|
1377 |
+
def iglob(path_glob):
|
1378 |
+
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
|
1379 |
+
if _CHECK_RECURSIVE_GLOB.search(path_glob):
|
1380 |
+
msg = """invalid glob %r: recursive glob "**" must be used alone"""
|
1381 |
+
raise ValueError(msg % path_glob)
|
1382 |
+
if _CHECK_MISMATCH_SET.search(path_glob):
|
1383 |
+
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
|
1384 |
+
raise ValueError(msg % path_glob)
|
1385 |
+
return _iglob(path_glob)
|
1386 |
+
|
1387 |
+
|
1388 |
+
def _iglob(path_glob):
|
1389 |
+
rich_path_glob = RICH_GLOB.split(path_glob, 1)
|
1390 |
+
if len(rich_path_glob) > 1:
|
1391 |
+
assert len(rich_path_glob) == 3, rich_path_glob
|
1392 |
+
prefix, set, suffix = rich_path_glob
|
1393 |
+
for item in set.split(','):
|
1394 |
+
for path in _iglob(''.join((prefix, item, suffix))):
|
1395 |
+
yield path
|
1396 |
+
else:
|
1397 |
+
if '**' not in path_glob:
|
1398 |
+
for item in std_iglob(path_glob):
|
1399 |
+
yield item
|
1400 |
+
else:
|
1401 |
+
prefix, radical = path_glob.split('**', 1)
|
1402 |
+
if prefix == '':
|
1403 |
+
prefix = '.'
|
1404 |
+
if radical == '':
|
1405 |
+
radical = '*'
|
1406 |
+
else:
|
1407 |
+
# we support both
|
1408 |
+
radical = radical.lstrip('/')
|
1409 |
+
radical = radical.lstrip('\\')
|
1410 |
+
for path, dir, files in os.walk(prefix):
|
1411 |
+
path = os.path.normpath(path)
|
1412 |
+
for fn in _iglob(os.path.join(path, radical)):
|
1413 |
+
yield fn
|
1414 |
+
|
1415 |
+
if ssl:
|
1416 |
+
from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
|
1417 |
+
CertificateError)
|
1418 |
+
|
1419 |
+
|
1420 |
+
#
|
1421 |
+
# HTTPSConnection which verifies certificates/matches domains
|
1422 |
+
#
|
1423 |
+
|
1424 |
+
class HTTPSConnection(httplib.HTTPSConnection):
|
1425 |
+
ca_certs = None # set this to the path to the certs file (.pem)
|
1426 |
+
check_domain = True # only used if ca_certs is not None
|
1427 |
+
|
1428 |
+
# noinspection PyPropertyAccess
|
1429 |
+
def connect(self):
|
1430 |
+
sock = socket.create_connection((self.host, self.port), self.timeout)
|
1431 |
+
if getattr(self, '_tunnel_host', False):
|
1432 |
+
self.sock = sock
|
1433 |
+
self._tunnel()
|
1434 |
+
|
1435 |
+
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
1436 |
+
if hasattr(ssl, 'OP_NO_SSLv2'):
|
1437 |
+
context.options |= ssl.OP_NO_SSLv2
|
1438 |
+
if self.cert_file:
|
1439 |
+
context.load_cert_chain(self.cert_file, self.key_file)
|
1440 |
+
kwargs = {}
|
1441 |
+
if self.ca_certs:
|
1442 |
+
context.verify_mode = ssl.CERT_REQUIRED
|
1443 |
+
context.load_verify_locations(cafile=self.ca_certs)
|
1444 |
+
if getattr(ssl, 'HAS_SNI', False):
|
1445 |
+
kwargs['server_hostname'] = self.host
|
1446 |
+
|
1447 |
+
self.sock = context.wrap_socket(sock, **kwargs)
|
1448 |
+
if self.ca_certs and self.check_domain:
|
1449 |
+
try:
|
1450 |
+
match_hostname(self.sock.getpeercert(), self.host)
|
1451 |
+
logger.debug('Host verified: %s', self.host)
|
1452 |
+
except CertificateError: # pragma: no cover
|
1453 |
+
self.sock.shutdown(socket.SHUT_RDWR)
|
1454 |
+
self.sock.close()
|
1455 |
+
raise
|
1456 |
+
|
1457 |
+
class HTTPSHandler(BaseHTTPSHandler):
|
1458 |
+
def __init__(self, ca_certs, check_domain=True):
|
1459 |
+
BaseHTTPSHandler.__init__(self)
|
1460 |
+
self.ca_certs = ca_certs
|
1461 |
+
self.check_domain = check_domain
|
1462 |
+
|
1463 |
+
def _conn_maker(self, *args, **kwargs):
|
1464 |
+
"""
|
1465 |
+
This is called to create a connection instance. Normally you'd
|
1466 |
+
pass a connection class to do_open, but it doesn't actually check for
|
1467 |
+
a class, and just expects a callable. As long as we behave just as a
|
1468 |
+
constructor would have, we should be OK. If it ever changes so that
|
1469 |
+
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
|
1470 |
+
which just sets check_domain to False in the class definition, and
|
1471 |
+
choose which one to pass to do_open.
|
1472 |
+
"""
|
1473 |
+
result = HTTPSConnection(*args, **kwargs)
|
1474 |
+
if self.ca_certs:
|
1475 |
+
result.ca_certs = self.ca_certs
|
1476 |
+
result.check_domain = self.check_domain
|
1477 |
+
return result
|
1478 |
+
|
1479 |
+
def https_open(self, req):
|
1480 |
+
try:
|
1481 |
+
return self.do_open(self._conn_maker, req)
|
1482 |
+
except URLError as e:
|
1483 |
+
if 'certificate verify failed' in str(e.reason):
|
1484 |
+
raise CertificateError('Unable to verify server certificate '
|
1485 |
+
'for %s' % req.host)
|
1486 |
+
else:
|
1487 |
+
raise
|
1488 |
+
|
1489 |
+
#
|
1490 |
+
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
|
1491 |
+
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
|
1492 |
+
# HTML containing a http://xyz link when it should be https://xyz),
|
1493 |
+
# you can use the following handler class, which does not allow HTTP traffic.
|
1494 |
+
#
|
1495 |
+
# It works by inheriting from HTTPHandler - so build_opener won't add a
|
1496 |
+
# handler for HTTP itself.
|
1497 |
+
#
|
1498 |
+
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
|
1499 |
+
def http_open(self, req):
|
1500 |
+
raise URLError('Unexpected HTTP request on what should be a secure '
|
1501 |
+
'connection: %s' % req)
|
1502 |
+
|
1503 |
+
#
|
1504 |
+
# XML-RPC with timeouts
|
1505 |
+
#
|
1506 |
+
class Transport(xmlrpclib.Transport):
|
1507 |
+
def __init__(self, timeout, use_datetime=0):
|
1508 |
+
self.timeout = timeout
|
1509 |
+
xmlrpclib.Transport.__init__(self, use_datetime)
|
1510 |
+
|
1511 |
+
def make_connection(self, host):
|
1512 |
+
h, eh, x509 = self.get_host_info(host)
|
1513 |
+
if not self._connection or host != self._connection[0]:
|
1514 |
+
self._extra_headers = eh
|
1515 |
+
self._connection = host, httplib.HTTPConnection(h)
|
1516 |
+
return self._connection[1]
|
1517 |
+
|
1518 |
+
if ssl:
|
1519 |
+
class SafeTransport(xmlrpclib.SafeTransport):
|
1520 |
+
def __init__(self, timeout, use_datetime=0):
|
1521 |
+
self.timeout = timeout
|
1522 |
+
xmlrpclib.SafeTransport.__init__(self, use_datetime)
|
1523 |
+
|
1524 |
+
def make_connection(self, host):
|
1525 |
+
h, eh, kwargs = self.get_host_info(host)
|
1526 |
+
if not kwargs:
|
1527 |
+
kwargs = {}
|
1528 |
+
kwargs['timeout'] = self.timeout
|
1529 |
+
if not self._connection or host != self._connection[0]:
|
1530 |
+
self._extra_headers = eh
|
1531 |
+
self._connection = host, httplib.HTTPSConnection(h, None,
|
1532 |
+
**kwargs)
|
1533 |
+
return self._connection[1]
|
1534 |
+
|
1535 |
+
|
1536 |
+
class ServerProxy(xmlrpclib.ServerProxy):
|
1537 |
+
def __init__(self, uri, **kwargs):
|
1538 |
+
self.timeout = timeout = kwargs.pop('timeout', None)
|
1539 |
+
# The above classes only come into play if a timeout
|
1540 |
+
# is specified
|
1541 |
+
if timeout is not None:
|
1542 |
+
# scheme = splittype(uri) # deprecated as of Python 3.8
|
1543 |
+
scheme = urlparse(uri)[0]
|
1544 |
+
use_datetime = kwargs.get('use_datetime', 0)
|
1545 |
+
if scheme == 'https':
|
1546 |
+
tcls = SafeTransport
|
1547 |
+
else:
|
1548 |
+
tcls = Transport
|
1549 |
+
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
|
1550 |
+
self.transport = t
|
1551 |
+
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
|
1552 |
+
|
1553 |
+
#
|
1554 |
+
# CSV functionality. This is provided because on 2.x, the csv module can't
|
1555 |
+
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
|
1556 |
+
#
|
1557 |
+
|
1558 |
+
def _csv_open(fn, mode, **kwargs):
|
1559 |
+
if sys.version_info[0] < 3:
|
1560 |
+
mode += 'b'
|
1561 |
+
else:
|
1562 |
+
kwargs['newline'] = ''
|
1563 |
+
# Python 3 determines encoding from locale. Force 'utf-8'
|
1564 |
+
# file encoding to match other forced utf-8 encoding
|
1565 |
+
kwargs['encoding'] = 'utf-8'
|
1566 |
+
return open(fn, mode, **kwargs)
|
1567 |
+
|
1568 |
+
|
1569 |
+
class CSVBase(object):
|
1570 |
+
defaults = {
|
1571 |
+
'delimiter': str(','), # The strs are used because we need native
|
1572 |
+
'quotechar': str('"'), # str in the csv API (2.x won't take
|
1573 |
+
'lineterminator': str('\n') # Unicode)
|
1574 |
+
}
|
1575 |
+
|
1576 |
+
def __enter__(self):
|
1577 |
+
return self
|
1578 |
+
|
1579 |
+
def __exit__(self, *exc_info):
|
1580 |
+
self.stream.close()
|
1581 |
+
|
1582 |
+
|
1583 |
+
class CSVReader(CSVBase):
|
1584 |
+
def __init__(self, **kwargs):
|
1585 |
+
if 'stream' in kwargs:
|
1586 |
+
stream = kwargs['stream']
|
1587 |
+
if sys.version_info[0] >= 3:
|
1588 |
+
# needs to be a text stream
|
1589 |
+
stream = codecs.getreader('utf-8')(stream)
|
1590 |
+
self.stream = stream
|
1591 |
+
else:
|
1592 |
+
self.stream = _csv_open(kwargs['path'], 'r')
|
1593 |
+
self.reader = csv.reader(self.stream, **self.defaults)
|
1594 |
+
|
1595 |
+
def __iter__(self):
|
1596 |
+
return self
|
1597 |
+
|
1598 |
+
def next(self):
|
1599 |
+
result = next(self.reader)
|
1600 |
+
if sys.version_info[0] < 3:
|
1601 |
+
for i, item in enumerate(result):
|
1602 |
+
if not isinstance(item, text_type):
|
1603 |
+
result[i] = item.decode('utf-8')
|
1604 |
+
return result
|
1605 |
+
|
1606 |
+
__next__ = next
|
1607 |
+
|
1608 |
+
class CSVWriter(CSVBase):
|
1609 |
+
def __init__(self, fn, **kwargs):
|
1610 |
+
self.stream = _csv_open(fn, 'w')
|
1611 |
+
self.writer = csv.writer(self.stream, **self.defaults)
|
1612 |
+
|
1613 |
+
def writerow(self, row):
|
1614 |
+
if sys.version_info[0] < 3:
|
1615 |
+
r = []
|
1616 |
+
for item in row:
|
1617 |
+
if isinstance(item, text_type):
|
1618 |
+
item = item.encode('utf-8')
|
1619 |
+
r.append(item)
|
1620 |
+
row = r
|
1621 |
+
self.writer.writerow(row)
|
1622 |
+
|
1623 |
+
#
|
1624 |
+
# Configurator functionality
|
1625 |
+
#
|
1626 |
+
|
1627 |
+
class Configurator(BaseConfigurator):
|
1628 |
+
|
1629 |
+
value_converters = dict(BaseConfigurator.value_converters)
|
1630 |
+
value_converters['inc'] = 'inc_convert'
|
1631 |
+
|
1632 |
+
def __init__(self, config, base=None):
|
1633 |
+
super(Configurator, self).__init__(config)
|
1634 |
+
self.base = base or os.getcwd()
|
1635 |
+
|
1636 |
+
def configure_custom(self, config):
|
1637 |
+
def convert(o):
|
1638 |
+
if isinstance(o, (list, tuple)):
|
1639 |
+
result = type(o)([convert(i) for i in o])
|
1640 |
+
elif isinstance(o, dict):
|
1641 |
+
if '()' in o:
|
1642 |
+
result = self.configure_custom(o)
|
1643 |
+
else:
|
1644 |
+
result = {}
|
1645 |
+
for k in o:
|
1646 |
+
result[k] = convert(o[k])
|
1647 |
+
else:
|
1648 |
+
result = self.convert(o)
|
1649 |
+
return result
|
1650 |
+
|
1651 |
+
c = config.pop('()')
|
1652 |
+
if not callable(c):
|
1653 |
+
c = self.resolve(c)
|
1654 |
+
props = config.pop('.', None)
|
1655 |
+
# Check for valid identifiers
|
1656 |
+
args = config.pop('[]', ())
|
1657 |
+
if args:
|
1658 |
+
args = tuple([convert(o) for o in args])
|
1659 |
+
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
|
1660 |
+
kwargs = dict(items)
|
1661 |
+
result = c(*args, **kwargs)
|
1662 |
+
if props:
|
1663 |
+
for n, v in props.items():
|
1664 |
+
setattr(result, n, convert(v))
|
1665 |
+
return result
|
1666 |
+
|
1667 |
+
def __getitem__(self, key):
|
1668 |
+
result = self.config[key]
|
1669 |
+
if isinstance(result, dict) and '()' in result:
|
1670 |
+
self.config[key] = result = self.configure_custom(result)
|
1671 |
+
return result
|
1672 |
+
|
1673 |
+
def inc_convert(self, value):
|
1674 |
+
"""Default converter for the inc:// protocol."""
|
1675 |
+
if not os.path.isabs(value):
|
1676 |
+
value = os.path.join(self.base, value)
|
1677 |
+
with codecs.open(value, 'r', encoding='utf-8') as f:
|
1678 |
+
result = json.load(f)
|
1679 |
+
return result
|
1680 |
+
|
1681 |
+
|
1682 |
+
class SubprocessMixin(object):
|
1683 |
+
"""
|
1684 |
+
Mixin for running subprocesses and capturing their output
|
1685 |
+
"""
|
1686 |
+
def __init__(self, verbose=False, progress=None):
|
1687 |
+
self.verbose = verbose
|
1688 |
+
self.progress = progress
|
1689 |
+
|
1690 |
+
def reader(self, stream, context):
|
1691 |
+
"""
|
1692 |
+
Read lines from a subprocess' output stream and either pass to a progress
|
1693 |
+
callable (if specified) or write progress information to sys.stderr.
|
1694 |
+
"""
|
1695 |
+
progress = self.progress
|
1696 |
+
verbose = self.verbose
|
1697 |
+
while True:
|
1698 |
+
s = stream.readline()
|
1699 |
+
if not s:
|
1700 |
+
break
|
1701 |
+
if progress is not None:
|
1702 |
+
progress(s, context)
|
1703 |
+
else:
|
1704 |
+
if not verbose:
|
1705 |
+
sys.stderr.write('.')
|
1706 |
+
else:
|
1707 |
+
sys.stderr.write(s.decode('utf-8'))
|
1708 |
+
sys.stderr.flush()
|
1709 |
+
stream.close()
|
1710 |
+
|
1711 |
+
def run_command(self, cmd, **kwargs):
|
1712 |
+
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
1713 |
+
stderr=subprocess.PIPE, **kwargs)
|
1714 |
+
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
|
1715 |
+
t1.start()
|
1716 |
+
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
|
1717 |
+
t2.start()
|
1718 |
+
p.wait()
|
1719 |
+
t1.join()
|
1720 |
+
t2.join()
|
1721 |
+
if self.progress is not None:
|
1722 |
+
self.progress('done.', 'main')
|
1723 |
+
elif self.verbose:
|
1724 |
+
sys.stderr.write('done.\n')
|
1725 |
+
return p
|
1726 |
+
|
1727 |
+
|
1728 |
+
def normalize_name(name):
|
1729 |
+
"""Normalize a python package name a la PEP 503"""
|
1730 |
+
# https://www.python.org/dev/peps/pep-0503/#normalized-names
|
1731 |
+
return re.sub('[-_.]+', '-', name).lower()
|
1732 |
+
|
1733 |
+
# def _get_pypirc_command():
|
1734 |
+
# """
|
1735 |
+
# Get the distutils command for interacting with PyPI configurations.
|
1736 |
+
# :return: the command.
|
1737 |
+
# """
|
1738 |
+
# from distutils.core import Distribution
|
1739 |
+
# from distutils.config import PyPIRCCommand
|
1740 |
+
# d = Distribution()
|
1741 |
+
# return PyPIRCCommand(d)
|
1742 |
+
|
1743 |
+
class PyPIRCFile(object):
|
1744 |
+
|
1745 |
+
DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
|
1746 |
+
DEFAULT_REALM = 'pypi'
|
1747 |
+
|
1748 |
+
def __init__(self, fn=None, url=None):
|
1749 |
+
if fn is None:
|
1750 |
+
fn = os.path.join(os.path.expanduser('~'), '.pypirc')
|
1751 |
+
self.filename = fn
|
1752 |
+
self.url = url
|
1753 |
+
|
1754 |
+
def read(self):
|
1755 |
+
result = {}
|
1756 |
+
|
1757 |
+
if os.path.exists(self.filename):
|
1758 |
+
repository = self.url or self.DEFAULT_REPOSITORY
|
1759 |
+
|
1760 |
+
config = configparser.RawConfigParser()
|
1761 |
+
config.read(self.filename)
|
1762 |
+
sections = config.sections()
|
1763 |
+
if 'distutils' in sections:
|
1764 |
+
# let's get the list of servers
|
1765 |
+
index_servers = config.get('distutils', 'index-servers')
|
1766 |
+
_servers = [server.strip() for server in
|
1767 |
+
index_servers.split('\n')
|
1768 |
+
if server.strip() != '']
|
1769 |
+
if _servers == []:
|
1770 |
+
# nothing set, let's try to get the default pypi
|
1771 |
+
if 'pypi' in sections:
|
1772 |
+
_servers = ['pypi']
|
1773 |
+
else:
|
1774 |
+
for server in _servers:
|
1775 |
+
result = {'server': server}
|
1776 |
+
result['username'] = config.get(server, 'username')
|
1777 |
+
|
1778 |
+
# optional params
|
1779 |
+
for key, default in (('repository', self.DEFAULT_REPOSITORY),
|
1780 |
+
('realm', self.DEFAULT_REALM),
|
1781 |
+
('password', None)):
|
1782 |
+
if config.has_option(server, key):
|
1783 |
+
result[key] = config.get(server, key)
|
1784 |
+
else:
|
1785 |
+
result[key] = default
|
1786 |
+
|
1787 |
+
# work around people having "repository" for the "pypi"
|
1788 |
+
# section of their config set to the HTTP (rather than
|
1789 |
+
# HTTPS) URL
|
1790 |
+
if (server == 'pypi' and
|
1791 |
+
repository in (self.DEFAULT_REPOSITORY, 'pypi')):
|
1792 |
+
result['repository'] = self.DEFAULT_REPOSITORY
|
1793 |
+
elif (result['server'] != repository and
|
1794 |
+
result['repository'] != repository):
|
1795 |
+
result = {}
|
1796 |
+
elif 'server-login' in sections:
|
1797 |
+
# old format
|
1798 |
+
server = 'server-login'
|
1799 |
+
if config.has_option(server, 'repository'):
|
1800 |
+
repository = config.get(server, 'repository')
|
1801 |
+
else:
|
1802 |
+
repository = self.DEFAULT_REPOSITORY
|
1803 |
+
result = {
|
1804 |
+
'username': config.get(server, 'username'),
|
1805 |
+
'password': config.get(server, 'password'),
|
1806 |
+
'repository': repository,
|
1807 |
+
'server': server,
|
1808 |
+
'realm': self.DEFAULT_REALM
|
1809 |
+
}
|
1810 |
+
return result
|
1811 |
+
|
1812 |
+
def update(self, username, password):
|
1813 |
+
# import pdb; pdb.set_trace()
|
1814 |
+
config = configparser.RawConfigParser()
|
1815 |
+
fn = self.filename
|
1816 |
+
config.read(fn)
|
1817 |
+
if not config.has_section('pypi'):
|
1818 |
+
config.add_section('pypi')
|
1819 |
+
config.set('pypi', 'username', username)
|
1820 |
+
config.set('pypi', 'password', password)
|
1821 |
+
with open(fn, 'w') as f:
|
1822 |
+
config.write(f)
|
1823 |
+
|
1824 |
+
def _load_pypirc(index):
|
1825 |
+
"""
|
1826 |
+
Read the PyPI access configuration as supported by distutils.
|
1827 |
+
"""
|
1828 |
+
return PyPIRCFile(url=index.url).read()
|
1829 |
+
|
1830 |
+
def _store_pypirc(index):
|
1831 |
+
PyPIRCFile().update(index.username, index.password)
|
1832 |
+
|
1833 |
+
#
|
1834 |
+
# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
|
1835 |
+
# tweaks
|
1836 |
+
#
|
1837 |
+
|
1838 |
+
def get_host_platform():
|
1839 |
+
"""Return a string that identifies the current platform. This is used mainly to
|
1840 |
+
distinguish platform-specific build directories and platform-specific built
|
1841 |
+
distributions. Typically includes the OS name and version and the
|
1842 |
+
architecture (as supplied by 'os.uname()'), although the exact information
|
1843 |
+
included depends on the OS; eg. on Linux, the kernel version isn't
|
1844 |
+
particularly important.
|
1845 |
+
|
1846 |
+
Examples of returned values:
|
1847 |
+
linux-i586
|
1848 |
+
linux-alpha (?)
|
1849 |
+
solaris-2.6-sun4u
|
1850 |
+
|
1851 |
+
Windows will return one of:
|
1852 |
+
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
|
1853 |
+
win32 (all others - specifically, sys.platform is returned)
|
1854 |
+
|
1855 |
+
For other non-POSIX platforms, currently just returns 'sys.platform'.
|
1856 |
+
|
1857 |
+
"""
|
1858 |
+
if os.name == 'nt':
|
1859 |
+
if 'amd64' in sys.version.lower():
|
1860 |
+
return 'win-amd64'
|
1861 |
+
if '(arm)' in sys.version.lower():
|
1862 |
+
return 'win-arm32'
|
1863 |
+
if '(arm64)' in sys.version.lower():
|
1864 |
+
return 'win-arm64'
|
1865 |
+
return sys.platform
|
1866 |
+
|
1867 |
+
# Set for cross builds explicitly
|
1868 |
+
if "_PYTHON_HOST_PLATFORM" in os.environ:
|
1869 |
+
return os.environ["_PYTHON_HOST_PLATFORM"]
|
1870 |
+
|
1871 |
+
if os.name != 'posix' or not hasattr(os, 'uname'):
|
1872 |
+
# XXX what about the architecture? NT is Intel or Alpha,
|
1873 |
+
# Mac OS is M68k or PPC, etc.
|
1874 |
+
return sys.platform
|
1875 |
+
|
1876 |
+
# Try to distinguish various flavours of Unix
|
1877 |
+
|
1878 |
+
(osname, host, release, version, machine) = os.uname()
|
1879 |
+
|
1880 |
+
# Convert the OS name to lowercase, remove '/' characters, and translate
|
1881 |
+
# spaces (for "Power Macintosh")
|
1882 |
+
osname = osname.lower().replace('/', '')
|
1883 |
+
machine = machine.replace(' ', '_').replace('/', '-')
|
1884 |
+
|
1885 |
+
if osname[:5] == 'linux':
|
1886 |
+
# At least on Linux/Intel, 'machine' is the processor --
|
1887 |
+
# i386, etc.
|
1888 |
+
# XXX what about Alpha, SPARC, etc?
|
1889 |
+
return "%s-%s" % (osname, machine)
|
1890 |
+
|
1891 |
+
elif osname[:5] == 'sunos':
|
1892 |
+
if release[0] >= '5': # SunOS 5 == Solaris 2
|
1893 |
+
osname = 'solaris'
|
1894 |
+
release = '%d.%s' % (int(release[0]) - 3, release[2:])
|
1895 |
+
# We can't use 'platform.architecture()[0]' because a
|
1896 |
+
# bootstrap problem. We use a dict to get an error
|
1897 |
+
# if some suspicious happens.
|
1898 |
+
bitness = {2147483647:'32bit', 9223372036854775807:'64bit'}
|
1899 |
+
machine += '.%s' % bitness[sys.maxsize]
|
1900 |
+
# fall through to standard osname-release-machine representation
|
1901 |
+
elif osname[:3] == 'aix':
|
1902 |
+
from _aix_support import aix_platform
|
1903 |
+
return aix_platform()
|
1904 |
+
elif osname[:6] == 'cygwin':
|
1905 |
+
osname = 'cygwin'
|
1906 |
+
rel_re = re.compile (r'[\d.]+', re.ASCII)
|
1907 |
+
m = rel_re.match(release)
|
1908 |
+
if m:
|
1909 |
+
release = m.group()
|
1910 |
+
elif osname[:6] == 'darwin':
|
1911 |
+
import _osx_support, distutils.sysconfig
|
1912 |
+
osname, release, machine = _osx_support.get_platform_osx(
|
1913 |
+
distutils.sysconfig.get_config_vars(),
|
1914 |
+
osname, release, machine)
|
1915 |
+
|
1916 |
+
return '%s-%s-%s' % (osname, release, machine)
|
1917 |
+
|
1918 |
+
|
1919 |
+
_TARGET_TO_PLAT = {
|
1920 |
+
'x86' : 'win32',
|
1921 |
+
'x64' : 'win-amd64',
|
1922 |
+
'arm' : 'win-arm32',
|
1923 |
+
}
|
1924 |
+
|
1925 |
+
|
1926 |
+
def get_platform():
|
1927 |
+
if os.name != 'nt':
|
1928 |
+
return get_host_platform()
|
1929 |
+
cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
|
1930 |
+
if cross_compilation_target not in _TARGET_TO_PLAT:
|
1931 |
+
return get_host_platform()
|
1932 |
+
return _TARGET_TO_PLAT[cross_compilation_target]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/distlib/version.py
ADDED
@@ -0,0 +1,739 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# Copyright (C) 2012-2017 The Python Software Foundation.
|
4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
5 |
+
#
|
6 |
+
"""
|
7 |
+
Implementation of a flexible versioning scheme providing support for PEP-440,
|
8 |
+
setuptools-compatible and semantic versioning.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import logging
|
12 |
+
import re
|
13 |
+
|
14 |
+
from .compat import string_types
|
15 |
+
from .util import parse_requirement
|
16 |
+
|
17 |
+
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
|
18 |
+
'LegacyVersion', 'LegacyMatcher',
|
19 |
+
'SemanticVersion', 'SemanticMatcher',
|
20 |
+
'UnsupportedVersionError', 'get_scheme']
|
21 |
+
|
22 |
+
logger = logging.getLogger(__name__)
|
23 |
+
|
24 |
+
|
25 |
+
class UnsupportedVersionError(ValueError):
|
26 |
+
"""This is an unsupported version."""
|
27 |
+
pass
|
28 |
+
|
29 |
+
|
30 |
+
class Version(object):
|
31 |
+
def __init__(self, s):
|
32 |
+
self._string = s = s.strip()
|
33 |
+
self._parts = parts = self.parse(s)
|
34 |
+
assert isinstance(parts, tuple)
|
35 |
+
assert len(parts) > 0
|
36 |
+
|
37 |
+
def parse(self, s):
|
38 |
+
raise NotImplementedError('please implement in a subclass')
|
39 |
+
|
40 |
+
def _check_compatible(self, other):
|
41 |
+
if type(self) != type(other):
|
42 |
+
raise TypeError('cannot compare %r and %r' % (self, other))
|
43 |
+
|
44 |
+
def __eq__(self, other):
|
45 |
+
self._check_compatible(other)
|
46 |
+
return self._parts == other._parts
|
47 |
+
|
48 |
+
def __ne__(self, other):
|
49 |
+
return not self.__eq__(other)
|
50 |
+
|
51 |
+
def __lt__(self, other):
|
52 |
+
self._check_compatible(other)
|
53 |
+
return self._parts < other._parts
|
54 |
+
|
55 |
+
def __gt__(self, other):
|
56 |
+
return not (self.__lt__(other) or self.__eq__(other))
|
57 |
+
|
58 |
+
def __le__(self, other):
|
59 |
+
return self.__lt__(other) or self.__eq__(other)
|
60 |
+
|
61 |
+
def __ge__(self, other):
|
62 |
+
return self.__gt__(other) or self.__eq__(other)
|
63 |
+
|
64 |
+
# See http://docs.python.org/reference/datamodel#object.__hash__
|
65 |
+
def __hash__(self):
|
66 |
+
return hash(self._parts)
|
67 |
+
|
68 |
+
def __repr__(self):
|
69 |
+
return "%s('%s')" % (self.__class__.__name__, self._string)
|
70 |
+
|
71 |
+
def __str__(self):
|
72 |
+
return self._string
|
73 |
+
|
74 |
+
@property
|
75 |
+
def is_prerelease(self):
|
76 |
+
raise NotImplementedError('Please implement in subclasses.')
|
77 |
+
|
78 |
+
|
79 |
+
class Matcher(object):
|
80 |
+
version_class = None
|
81 |
+
|
82 |
+
# value is either a callable or the name of a method
|
83 |
+
_operators = {
|
84 |
+
'<': lambda v, c, p: v < c,
|
85 |
+
'>': lambda v, c, p: v > c,
|
86 |
+
'<=': lambda v, c, p: v == c or v < c,
|
87 |
+
'>=': lambda v, c, p: v == c or v > c,
|
88 |
+
'==': lambda v, c, p: v == c,
|
89 |
+
'===': lambda v, c, p: v == c,
|
90 |
+
# by default, compatible => >=.
|
91 |
+
'~=': lambda v, c, p: v == c or v > c,
|
92 |
+
'!=': lambda v, c, p: v != c,
|
93 |
+
}
|
94 |
+
|
95 |
+
# this is a method only to support alternative implementations
|
96 |
+
# via overriding
|
97 |
+
def parse_requirement(self, s):
|
98 |
+
return parse_requirement(s)
|
99 |
+
|
100 |
+
def __init__(self, s):
|
101 |
+
if self.version_class is None:
|
102 |
+
raise ValueError('Please specify a version class')
|
103 |
+
self._string = s = s.strip()
|
104 |
+
r = self.parse_requirement(s)
|
105 |
+
if not r:
|
106 |
+
raise ValueError('Not valid: %r' % s)
|
107 |
+
self.name = r.name
|
108 |
+
self.key = self.name.lower() # for case-insensitive comparisons
|
109 |
+
clist = []
|
110 |
+
if r.constraints:
|
111 |
+
# import pdb; pdb.set_trace()
|
112 |
+
for op, s in r.constraints:
|
113 |
+
if s.endswith('.*'):
|
114 |
+
if op not in ('==', '!='):
|
115 |
+
raise ValueError('\'.*\' not allowed for '
|
116 |
+
'%r constraints' % op)
|
117 |
+
# Could be a partial version (e.g. for '2.*') which
|
118 |
+
# won't parse as a version, so keep it as a string
|
119 |
+
vn, prefix = s[:-2], True
|
120 |
+
# Just to check that vn is a valid version
|
121 |
+
self.version_class(vn)
|
122 |
+
else:
|
123 |
+
# Should parse as a version, so we can create an
|
124 |
+
# instance for the comparison
|
125 |
+
vn, prefix = self.version_class(s), False
|
126 |
+
clist.append((op, vn, prefix))
|
127 |
+
self._parts = tuple(clist)
|
128 |
+
|
129 |
+
def match(self, version):
|
130 |
+
"""
|
131 |
+
Check if the provided version matches the constraints.
|
132 |
+
|
133 |
+
:param version: The version to match against this instance.
|
134 |
+
:type version: String or :class:`Version` instance.
|
135 |
+
"""
|
136 |
+
if isinstance(version, string_types):
|
137 |
+
version = self.version_class(version)
|
138 |
+
for operator, constraint, prefix in self._parts:
|
139 |
+
f = self._operators.get(operator)
|
140 |
+
if isinstance(f, string_types):
|
141 |
+
f = getattr(self, f)
|
142 |
+
if not f:
|
143 |
+
msg = ('%r not implemented '
|
144 |
+
'for %s' % (operator, self.__class__.__name__))
|
145 |
+
raise NotImplementedError(msg)
|
146 |
+
if not f(version, constraint, prefix):
|
147 |
+
return False
|
148 |
+
return True
|
149 |
+
|
150 |
+
@property
|
151 |
+
def exact_version(self):
|
152 |
+
result = None
|
153 |
+
if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
|
154 |
+
result = self._parts[0][1]
|
155 |
+
return result
|
156 |
+
|
157 |
+
def _check_compatible(self, other):
|
158 |
+
if type(self) != type(other) or self.name != other.name:
|
159 |
+
raise TypeError('cannot compare %s and %s' % (self, other))
|
160 |
+
|
161 |
+
def __eq__(self, other):
|
162 |
+
self._check_compatible(other)
|
163 |
+
return self.key == other.key and self._parts == other._parts
|
164 |
+
|
165 |
+
def __ne__(self, other):
|
166 |
+
return not self.__eq__(other)
|
167 |
+
|
168 |
+
# See http://docs.python.org/reference/datamodel#object.__hash__
|
169 |
+
def __hash__(self):
|
170 |
+
return hash(self.key) + hash(self._parts)
|
171 |
+
|
172 |
+
def __repr__(self):
|
173 |
+
return "%s(%r)" % (self.__class__.__name__, self._string)
|
174 |
+
|
175 |
+
def __str__(self):
|
176 |
+
return self._string
|
177 |
+
|
178 |
+
|
179 |
+
PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
|
180 |
+
r'(\.(post)(\d+))?(\.(dev)(\d+))?'
|
181 |
+
r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
|
182 |
+
|
183 |
+
|
184 |
+
def _pep_440_key(s):
|
185 |
+
s = s.strip()
|
186 |
+
m = PEP440_VERSION_RE.match(s)
|
187 |
+
if not m:
|
188 |
+
raise UnsupportedVersionError('Not a valid version: %s' % s)
|
189 |
+
groups = m.groups()
|
190 |
+
nums = tuple(int(v) for v in groups[1].split('.'))
|
191 |
+
while len(nums) > 1 and nums[-1] == 0:
|
192 |
+
nums = nums[:-1]
|
193 |
+
|
194 |
+
if not groups[0]:
|
195 |
+
epoch = 0
|
196 |
+
else:
|
197 |
+
epoch = int(groups[0][:-1])
|
198 |
+
pre = groups[4:6]
|
199 |
+
post = groups[7:9]
|
200 |
+
dev = groups[10:12]
|
201 |
+
local = groups[13]
|
202 |
+
if pre == (None, None):
|
203 |
+
pre = ()
|
204 |
+
else:
|
205 |
+
pre = pre[0], int(pre[1])
|
206 |
+
if post == (None, None):
|
207 |
+
post = ()
|
208 |
+
else:
|
209 |
+
post = post[0], int(post[1])
|
210 |
+
if dev == (None, None):
|
211 |
+
dev = ()
|
212 |
+
else:
|
213 |
+
dev = dev[0], int(dev[1])
|
214 |
+
if local is None:
|
215 |
+
local = ()
|
216 |
+
else:
|
217 |
+
parts = []
|
218 |
+
for part in local.split('.'):
|
219 |
+
# to ensure that numeric compares as > lexicographic, avoid
|
220 |
+
# comparing them directly, but encode a tuple which ensures
|
221 |
+
# correct sorting
|
222 |
+
if part.isdigit():
|
223 |
+
part = (1, int(part))
|
224 |
+
else:
|
225 |
+
part = (0, part)
|
226 |
+
parts.append(part)
|
227 |
+
local = tuple(parts)
|
228 |
+
if not pre:
|
229 |
+
# either before pre-release, or final release and after
|
230 |
+
if not post and dev:
|
231 |
+
# before pre-release
|
232 |
+
pre = ('a', -1) # to sort before a0
|
233 |
+
else:
|
234 |
+
pre = ('z',) # to sort after all pre-releases
|
235 |
+
# now look at the state of post and dev.
|
236 |
+
if not post:
|
237 |
+
post = ('_',) # sort before 'a'
|
238 |
+
if not dev:
|
239 |
+
dev = ('final',)
|
240 |
+
|
241 |
+
#print('%s -> %s' % (s, m.groups()))
|
242 |
+
return epoch, nums, pre, post, dev, local
|
243 |
+
|
244 |
+
|
245 |
+
_normalized_key = _pep_440_key
|
246 |
+
|
247 |
+
|
248 |
+
class NormalizedVersion(Version):
|
249 |
+
"""A rational version.
|
250 |
+
|
251 |
+
Good:
|
252 |
+
1.2 # equivalent to "1.2.0"
|
253 |
+
1.2.0
|
254 |
+
1.2a1
|
255 |
+
1.2.3a2
|
256 |
+
1.2.3b1
|
257 |
+
1.2.3c1
|
258 |
+
1.2.3.4
|
259 |
+
TODO: fill this out
|
260 |
+
|
261 |
+
Bad:
|
262 |
+
1 # minimum two numbers
|
263 |
+
1.2a # release level must have a release serial
|
264 |
+
1.2.3b
|
265 |
+
"""
|
266 |
+
def parse(self, s):
|
267 |
+
result = _normalized_key(s)
|
268 |
+
# _normalized_key loses trailing zeroes in the release
|
269 |
+
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
|
270 |
+
# However, PEP 440 prefix matching needs it: for example,
|
271 |
+
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
|
272 |
+
m = PEP440_VERSION_RE.match(s) # must succeed
|
273 |
+
groups = m.groups()
|
274 |
+
self._release_clause = tuple(int(v) for v in groups[1].split('.'))
|
275 |
+
return result
|
276 |
+
|
277 |
+
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
|
278 |
+
|
279 |
+
@property
|
280 |
+
def is_prerelease(self):
|
281 |
+
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
|
282 |
+
|
283 |
+
|
284 |
+
def _match_prefix(x, y):
|
285 |
+
x = str(x)
|
286 |
+
y = str(y)
|
287 |
+
if x == y:
|
288 |
+
return True
|
289 |
+
if not x.startswith(y):
|
290 |
+
return False
|
291 |
+
n = len(y)
|
292 |
+
return x[n] == '.'
|
293 |
+
|
294 |
+
|
295 |
+
class NormalizedMatcher(Matcher):
|
296 |
+
version_class = NormalizedVersion
|
297 |
+
|
298 |
+
# value is either a callable or the name of a method
|
299 |
+
_operators = {
|
300 |
+
'~=': '_match_compatible',
|
301 |
+
'<': '_match_lt',
|
302 |
+
'>': '_match_gt',
|
303 |
+
'<=': '_match_le',
|
304 |
+
'>=': '_match_ge',
|
305 |
+
'==': '_match_eq',
|
306 |
+
'===': '_match_arbitrary',
|
307 |
+
'!=': '_match_ne',
|
308 |
+
}
|
309 |
+
|
310 |
+
def _adjust_local(self, version, constraint, prefix):
|
311 |
+
if prefix:
|
312 |
+
strip_local = '+' not in constraint and version._parts[-1]
|
313 |
+
else:
|
314 |
+
# both constraint and version are
|
315 |
+
# NormalizedVersion instances.
|
316 |
+
# If constraint does not have a local component,
|
317 |
+
# ensure the version doesn't, either.
|
318 |
+
strip_local = not constraint._parts[-1] and version._parts[-1]
|
319 |
+
if strip_local:
|
320 |
+
s = version._string.split('+', 1)[0]
|
321 |
+
version = self.version_class(s)
|
322 |
+
return version, constraint
|
323 |
+
|
324 |
+
def _match_lt(self, version, constraint, prefix):
|
325 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
326 |
+
if version >= constraint:
|
327 |
+
return False
|
328 |
+
release_clause = constraint._release_clause
|
329 |
+
pfx = '.'.join([str(i) for i in release_clause])
|
330 |
+
return not _match_prefix(version, pfx)
|
331 |
+
|
332 |
+
def _match_gt(self, version, constraint, prefix):
|
333 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
334 |
+
if version <= constraint:
|
335 |
+
return False
|
336 |
+
release_clause = constraint._release_clause
|
337 |
+
pfx = '.'.join([str(i) for i in release_clause])
|
338 |
+
return not _match_prefix(version, pfx)
|
339 |
+
|
340 |
+
def _match_le(self, version, constraint, prefix):
|
341 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
342 |
+
return version <= constraint
|
343 |
+
|
344 |
+
def _match_ge(self, version, constraint, prefix):
|
345 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
346 |
+
return version >= constraint
|
347 |
+
|
348 |
+
def _match_eq(self, version, constraint, prefix):
|
349 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
350 |
+
if not prefix:
|
351 |
+
result = (version == constraint)
|
352 |
+
else:
|
353 |
+
result = _match_prefix(version, constraint)
|
354 |
+
return result
|
355 |
+
|
356 |
+
def _match_arbitrary(self, version, constraint, prefix):
|
357 |
+
return str(version) == str(constraint)
|
358 |
+
|
359 |
+
def _match_ne(self, version, constraint, prefix):
|
360 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
361 |
+
if not prefix:
|
362 |
+
result = (version != constraint)
|
363 |
+
else:
|
364 |
+
result = not _match_prefix(version, constraint)
|
365 |
+
return result
|
366 |
+
|
367 |
+
def _match_compatible(self, version, constraint, prefix):
|
368 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
369 |
+
if version == constraint:
|
370 |
+
return True
|
371 |
+
if version < constraint:
|
372 |
+
return False
|
373 |
+
# if not prefix:
|
374 |
+
# return True
|
375 |
+
release_clause = constraint._release_clause
|
376 |
+
if len(release_clause) > 1:
|
377 |
+
release_clause = release_clause[:-1]
|
378 |
+
pfx = '.'.join([str(i) for i in release_clause])
|
379 |
+
return _match_prefix(version, pfx)
|
380 |
+
|
381 |
+
_REPLACEMENTS = (
|
382 |
+
(re.compile('[.+-]$'), ''), # remove trailing puncts
|
383 |
+
(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
|
384 |
+
(re.compile('^[.-]'), ''), # remove leading puncts
|
385 |
+
(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
|
386 |
+
(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
387 |
+
(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
388 |
+
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
389 |
+
(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
|
390 |
+
(re.compile(r'\b(pre-alpha|prealpha)\b'),
|
391 |
+
'pre.alpha'), # standardise
|
392 |
+
(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
|
393 |
+
)
|
394 |
+
|
395 |
+
_SUFFIX_REPLACEMENTS = (
|
396 |
+
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
|
397 |
+
(re.compile('[,*")([\\]]'), ''), # remove unwanted chars
|
398 |
+
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
|
399 |
+
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
400 |
+
(re.compile(r'\.$'), ''), # trailing '.'
|
401 |
+
)
|
402 |
+
|
403 |
+
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
|
404 |
+
|
405 |
+
|
406 |
+
def _suggest_semantic_version(s):
|
407 |
+
"""
|
408 |
+
Try to suggest a semantic form for a version for which
|
409 |
+
_suggest_normalized_version couldn't come up with anything.
|
410 |
+
"""
|
411 |
+
result = s.strip().lower()
|
412 |
+
for pat, repl in _REPLACEMENTS:
|
413 |
+
result = pat.sub(repl, result)
|
414 |
+
if not result:
|
415 |
+
result = '0.0.0'
|
416 |
+
|
417 |
+
# Now look for numeric prefix, and separate it out from
|
418 |
+
# the rest.
|
419 |
+
#import pdb; pdb.set_trace()
|
420 |
+
m = _NUMERIC_PREFIX.match(result)
|
421 |
+
if not m:
|
422 |
+
prefix = '0.0.0'
|
423 |
+
suffix = result
|
424 |
+
else:
|
425 |
+
prefix = m.groups()[0].split('.')
|
426 |
+
prefix = [int(i) for i in prefix]
|
427 |
+
while len(prefix) < 3:
|
428 |
+
prefix.append(0)
|
429 |
+
if len(prefix) == 3:
|
430 |
+
suffix = result[m.end():]
|
431 |
+
else:
|
432 |
+
suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
|
433 |
+
prefix = prefix[:3]
|
434 |
+
prefix = '.'.join([str(i) for i in prefix])
|
435 |
+
suffix = suffix.strip()
|
436 |
+
if suffix:
|
437 |
+
#import pdb; pdb.set_trace()
|
438 |
+
# massage the suffix.
|
439 |
+
for pat, repl in _SUFFIX_REPLACEMENTS:
|
440 |
+
suffix = pat.sub(repl, suffix)
|
441 |
+
|
442 |
+
if not suffix:
|
443 |
+
result = prefix
|
444 |
+
else:
|
445 |
+
sep = '-' if 'dev' in suffix else '+'
|
446 |
+
result = prefix + sep + suffix
|
447 |
+
if not is_semver(result):
|
448 |
+
result = None
|
449 |
+
return result
|
450 |
+
|
451 |
+
|
452 |
+
def _suggest_normalized_version(s):
|
453 |
+
"""Suggest a normalized version close to the given version string.
|
454 |
+
|
455 |
+
If you have a version string that isn't rational (i.e. NormalizedVersion
|
456 |
+
doesn't like it) then you might be able to get an equivalent (or close)
|
457 |
+
rational version from this function.
|
458 |
+
|
459 |
+
This does a number of simple normalizations to the given string, based
|
460 |
+
on observation of versions currently in use on PyPI. Given a dump of
|
461 |
+
those version during PyCon 2009, 4287 of them:
|
462 |
+
- 2312 (53.93%) match NormalizedVersion without change
|
463 |
+
with the automatic suggestion
|
464 |
+
- 3474 (81.04%) match when using this suggestion method
|
465 |
+
|
466 |
+
@param s {str} An irrational version string.
|
467 |
+
@returns A rational version string, or None, if couldn't determine one.
|
468 |
+
"""
|
469 |
+
try:
|
470 |
+
_normalized_key(s)
|
471 |
+
return s # already rational
|
472 |
+
except UnsupportedVersionError:
|
473 |
+
pass
|
474 |
+
|
475 |
+
rs = s.lower()
|
476 |
+
|
477 |
+
# part of this could use maketrans
|
478 |
+
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
|
479 |
+
('beta', 'b'), ('rc', 'c'), ('-final', ''),
|
480 |
+
('-pre', 'c'),
|
481 |
+
('-release', ''), ('.release', ''), ('-stable', ''),
|
482 |
+
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
|
483 |
+
('final', '')):
|
484 |
+
rs = rs.replace(orig, repl)
|
485 |
+
|
486 |
+
# if something ends with dev or pre, we add a 0
|
487 |
+
rs = re.sub(r"pre$", r"pre0", rs)
|
488 |
+
rs = re.sub(r"dev$", r"dev0", rs)
|
489 |
+
|
490 |
+
# if we have something like "b-2" or "a.2" at the end of the
|
491 |
+
# version, that is probably beta, alpha, etc
|
492 |
+
# let's remove the dash or dot
|
493 |
+
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
|
494 |
+
|
495 |
+
# 1.0-dev-r371 -> 1.0.dev371
|
496 |
+
# 0.1-dev-r79 -> 0.1.dev79
|
497 |
+
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
|
498 |
+
|
499 |
+
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
|
500 |
+
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
|
501 |
+
|
502 |
+
# Clean: v0.3, v1.0
|
503 |
+
if rs.startswith('v'):
|
504 |
+
rs = rs[1:]
|
505 |
+
|
506 |
+
# Clean leading '0's on numbers.
|
507 |
+
#TODO: unintended side-effect on, e.g., "2003.05.09"
|
508 |
+
# PyPI stats: 77 (~2%) better
|
509 |
+
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
|
510 |
+
|
511 |
+
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
|
512 |
+
# zero.
|
513 |
+
# PyPI stats: 245 (7.56%) better
|
514 |
+
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
|
515 |
+
|
516 |
+
# the 'dev-rNNN' tag is a dev tag
|
517 |
+
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
|
518 |
+
|
519 |
+
# clean the - when used as a pre delimiter
|
520 |
+
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
|
521 |
+
|
522 |
+
# a terminal "dev" or "devel" can be changed into ".dev0"
|
523 |
+
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
|
524 |
+
|
525 |
+
# a terminal "dev" can be changed into ".dev0"
|
526 |
+
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
|
527 |
+
|
528 |
+
# a terminal "final" or "stable" can be removed
|
529 |
+
rs = re.sub(r"(final|stable)$", "", rs)
|
530 |
+
|
531 |
+
# The 'r' and the '-' tags are post release tags
|
532 |
+
# 0.4a1.r10 -> 0.4a1.post10
|
533 |
+
# 0.9.33-17222 -> 0.9.33.post17222
|
534 |
+
# 0.9.33-r17222 -> 0.9.33.post17222
|
535 |
+
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
|
536 |
+
|
537 |
+
# Clean 'r' instead of 'dev' usage:
|
538 |
+
# 0.9.33+r17222 -> 0.9.33.dev17222
|
539 |
+
# 1.0dev123 -> 1.0.dev123
|
540 |
+
# 1.0.git123 -> 1.0.dev123
|
541 |
+
# 1.0.bzr123 -> 1.0.dev123
|
542 |
+
# 0.1a0dev.123 -> 0.1a0.dev123
|
543 |
+
# PyPI stats: ~150 (~4%) better
|
544 |
+
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
|
545 |
+
|
546 |
+
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
|
547 |
+
# 0.2.pre1 -> 0.2c1
|
548 |
+
# 0.2-c1 -> 0.2c1
|
549 |
+
# 1.0preview123 -> 1.0c123
|
550 |
+
# PyPI stats: ~21 (0.62%) better
|
551 |
+
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
|
552 |
+
|
553 |
+
# Tcl/Tk uses "px" for their post release markers
|
554 |
+
rs = re.sub(r"p(\d+)$", r".post\1", rs)
|
555 |
+
|
556 |
+
try:
|
557 |
+
_normalized_key(rs)
|
558 |
+
except UnsupportedVersionError:
|
559 |
+
rs = None
|
560 |
+
return rs
|
561 |
+
|
562 |
+
#
|
563 |
+
# Legacy version processing (distribute-compatible)
|
564 |
+
#
|
565 |
+
|
566 |
+
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
|
567 |
+
_VERSION_REPLACE = {
|
568 |
+
'pre': 'c',
|
569 |
+
'preview': 'c',
|
570 |
+
'-': 'final-',
|
571 |
+
'rc': 'c',
|
572 |
+
'dev': '@',
|
573 |
+
'': None,
|
574 |
+
'.': None,
|
575 |
+
}
|
576 |
+
|
577 |
+
|
578 |
+
def _legacy_key(s):
|
579 |
+
def get_parts(s):
|
580 |
+
result = []
|
581 |
+
for p in _VERSION_PART.split(s.lower()):
|
582 |
+
p = _VERSION_REPLACE.get(p, p)
|
583 |
+
if p:
|
584 |
+
if '0' <= p[:1] <= '9':
|
585 |
+
p = p.zfill(8)
|
586 |
+
else:
|
587 |
+
p = '*' + p
|
588 |
+
result.append(p)
|
589 |
+
result.append('*final')
|
590 |
+
return result
|
591 |
+
|
592 |
+
result = []
|
593 |
+
for p in get_parts(s):
|
594 |
+
if p.startswith('*'):
|
595 |
+
if p < '*final':
|
596 |
+
while result and result[-1] == '*final-':
|
597 |
+
result.pop()
|
598 |
+
while result and result[-1] == '00000000':
|
599 |
+
result.pop()
|
600 |
+
result.append(p)
|
601 |
+
return tuple(result)
|
602 |
+
|
603 |
+
|
604 |
+
class LegacyVersion(Version):
|
605 |
+
def parse(self, s):
|
606 |
+
return _legacy_key(s)
|
607 |
+
|
608 |
+
@property
|
609 |
+
def is_prerelease(self):
|
610 |
+
result = False
|
611 |
+
for x in self._parts:
|
612 |
+
if (isinstance(x, string_types) and x.startswith('*') and
|
613 |
+
x < '*final'):
|
614 |
+
result = True
|
615 |
+
break
|
616 |
+
return result
|
617 |
+
|
618 |
+
|
619 |
+
class LegacyMatcher(Matcher):
|
620 |
+
version_class = LegacyVersion
|
621 |
+
|
622 |
+
_operators = dict(Matcher._operators)
|
623 |
+
_operators['~='] = '_match_compatible'
|
624 |
+
|
625 |
+
numeric_re = re.compile(r'^(\d+(\.\d+)*)')
|
626 |
+
|
627 |
+
def _match_compatible(self, version, constraint, prefix):
|
628 |
+
if version < constraint:
|
629 |
+
return False
|
630 |
+
m = self.numeric_re.match(str(constraint))
|
631 |
+
if not m:
|
632 |
+
logger.warning('Cannot compute compatible match for version %s '
|
633 |
+
' and constraint %s', version, constraint)
|
634 |
+
return True
|
635 |
+
s = m.groups()[0]
|
636 |
+
if '.' in s:
|
637 |
+
s = s.rsplit('.', 1)[0]
|
638 |
+
return _match_prefix(version, s)
|
639 |
+
|
640 |
+
#
|
641 |
+
# Semantic versioning
|
642 |
+
#
|
643 |
+
|
644 |
+
_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
|
645 |
+
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
|
646 |
+
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
|
647 |
+
|
648 |
+
|
649 |
+
def is_semver(s):
|
650 |
+
return _SEMVER_RE.match(s)
|
651 |
+
|
652 |
+
|
653 |
+
def _semantic_key(s):
|
654 |
+
def make_tuple(s, absent):
|
655 |
+
if s is None:
|
656 |
+
result = (absent,)
|
657 |
+
else:
|
658 |
+
parts = s[1:].split('.')
|
659 |
+
# We can't compare ints and strings on Python 3, so fudge it
|
660 |
+
# by zero-filling numeric values so simulate a numeric comparison
|
661 |
+
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
|
662 |
+
return result
|
663 |
+
|
664 |
+
m = is_semver(s)
|
665 |
+
if not m:
|
666 |
+
raise UnsupportedVersionError(s)
|
667 |
+
groups = m.groups()
|
668 |
+
major, minor, patch = [int(i) for i in groups[:3]]
|
669 |
+
# choose the '|' and '*' so that versions sort correctly
|
670 |
+
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
|
671 |
+
return (major, minor, patch), pre, build
|
672 |
+
|
673 |
+
|
674 |
+
class SemanticVersion(Version):
|
675 |
+
def parse(self, s):
|
676 |
+
return _semantic_key(s)
|
677 |
+
|
678 |
+
@property
|
679 |
+
def is_prerelease(self):
|
680 |
+
return self._parts[1][0] != '|'
|
681 |
+
|
682 |
+
|
683 |
+
class SemanticMatcher(Matcher):
|
684 |
+
version_class = SemanticVersion
|
685 |
+
|
686 |
+
|
687 |
+
class VersionScheme(object):
|
688 |
+
def __init__(self, key, matcher, suggester=None):
|
689 |
+
self.key = key
|
690 |
+
self.matcher = matcher
|
691 |
+
self.suggester = suggester
|
692 |
+
|
693 |
+
def is_valid_version(self, s):
|
694 |
+
try:
|
695 |
+
self.matcher.version_class(s)
|
696 |
+
result = True
|
697 |
+
except UnsupportedVersionError:
|
698 |
+
result = False
|
699 |
+
return result
|
700 |
+
|
701 |
+
def is_valid_matcher(self, s):
|
702 |
+
try:
|
703 |
+
self.matcher(s)
|
704 |
+
result = True
|
705 |
+
except UnsupportedVersionError:
|
706 |
+
result = False
|
707 |
+
return result
|
708 |
+
|
709 |
+
def is_valid_constraint_list(self, s):
|
710 |
+
"""
|
711 |
+
Used for processing some metadata fields
|
712 |
+
"""
|
713 |
+
# See issue #140. Be tolerant of a single trailing comma.
|
714 |
+
if s.endswith(','):
|
715 |
+
s = s[:-1]
|
716 |
+
return self.is_valid_matcher('dummy_name (%s)' % s)
|
717 |
+
|
718 |
+
def suggest(self, s):
|
719 |
+
if self.suggester is None:
|
720 |
+
result = None
|
721 |
+
else:
|
722 |
+
result = self.suggester(s)
|
723 |
+
return result
|
724 |
+
|
725 |
+
_SCHEMES = {
|
726 |
+
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
|
727 |
+
_suggest_normalized_version),
|
728 |
+
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
|
729 |
+
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
|
730 |
+
_suggest_semantic_version),
|
731 |
+
}
|
732 |
+
|
733 |
+
_SCHEMES['default'] = _SCHEMES['normalized']
|
734 |
+
|
735 |
+
|
736 |
+
def get_scheme(name):
|
737 |
+
if name not in _SCHEMES:
|
738 |
+
raise ValueError('unknown scheme name: %r' % name)
|
739 |
+
return _SCHEMES[name]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (99.9 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc
ADDED
Binary file (653 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pkg_resources/py31compat.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import errno
|
3 |
+
import sys
|
4 |
+
|
5 |
+
from pip._vendor import six
|
6 |
+
|
7 |
+
|
8 |
+
def _makedirs_31(path, exist_ok=False):
|
9 |
+
try:
|
10 |
+
os.makedirs(path)
|
11 |
+
except OSError as exc:
|
12 |
+
if not exist_ok or exc.errno != errno.EEXIST:
|
13 |
+
raise
|
14 |
+
|
15 |
+
|
16 |
+
# rely on compatibility behavior until mode considerations
|
17 |
+
# and exists_ok considerations are disentangled.
|
18 |
+
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
|
19 |
+
needs_makedirs = (
|
20 |
+
six.PY2 or
|
21 |
+
(3, 4) <= sys.version_info < (3, 4, 1)
|
22 |
+
)
|
23 |
+
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__init__.py
ADDED
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and
|
3 |
+
usage.
|
4 |
+
"""
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
import importlib
|
8 |
+
import os
|
9 |
+
import sys
|
10 |
+
from pathlib import Path
|
11 |
+
from typing import TYPE_CHECKING
|
12 |
+
|
13 |
+
if TYPE_CHECKING:
|
14 |
+
from pip._vendor.typing_extensions import Literal # pragma: no cover
|
15 |
+
|
16 |
+
from .api import PlatformDirsABC
|
17 |
+
from .version import __version__, __version_info__
|
18 |
+
|
19 |
+
|
20 |
+
def _set_platform_dir_class() -> type[PlatformDirsABC]:
|
21 |
+
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
|
22 |
+
module, name = "pip._vendor.platformdirs.android", "Android"
|
23 |
+
elif sys.platform == "win32":
|
24 |
+
module, name = "pip._vendor.platformdirs.windows", "Windows"
|
25 |
+
elif sys.platform == "darwin":
|
26 |
+
module, name = "pip._vendor.platformdirs.macos", "MacOS"
|
27 |
+
else:
|
28 |
+
module, name = "pip._vendor.platformdirs.unix", "Unix"
|
29 |
+
result: type[PlatformDirsABC] = getattr(importlib.import_module(module), name)
|
30 |
+
return result
|
31 |
+
|
32 |
+
|
33 |
+
PlatformDirs = _set_platform_dir_class() #: Currently active platform
|
34 |
+
AppDirs = PlatformDirs #: Backwards compatibility with appdirs
|
35 |
+
|
36 |
+
|
37 |
+
def user_data_dir(
|
38 |
+
appname: str | None = None,
|
39 |
+
appauthor: str | None | Literal[False] = None,
|
40 |
+
version: str | None = None,
|
41 |
+
roaming: bool = False,
|
42 |
+
) -> str:
|
43 |
+
"""
|
44 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
45 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
46 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
47 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
48 |
+
:returns: data directory tied to the user
|
49 |
+
"""
|
50 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir
|
51 |
+
|
52 |
+
|
53 |
+
def site_data_dir(
|
54 |
+
appname: str | None = None,
|
55 |
+
appauthor: str | None | Literal[False] = None,
|
56 |
+
version: str | None = None,
|
57 |
+
multipath: bool = False,
|
58 |
+
) -> str:
|
59 |
+
"""
|
60 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
61 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
62 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
63 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
64 |
+
:returns: data directory shared by users
|
65 |
+
"""
|
66 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir
|
67 |
+
|
68 |
+
|
69 |
+
def user_config_dir(
|
70 |
+
appname: str | None = None,
|
71 |
+
appauthor: str | None | Literal[False] = None,
|
72 |
+
version: str | None = None,
|
73 |
+
roaming: bool = False,
|
74 |
+
) -> str:
|
75 |
+
"""
|
76 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
77 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
78 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
79 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
80 |
+
:returns: config directory tied to the user
|
81 |
+
"""
|
82 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir
|
83 |
+
|
84 |
+
|
85 |
+
def site_config_dir(
|
86 |
+
appname: str | None = None,
|
87 |
+
appauthor: str | None | Literal[False] = None,
|
88 |
+
version: str | None = None,
|
89 |
+
multipath: bool = False,
|
90 |
+
) -> str:
|
91 |
+
"""
|
92 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
93 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
94 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
95 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
96 |
+
:returns: config directory shared by the users
|
97 |
+
"""
|
98 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir
|
99 |
+
|
100 |
+
|
101 |
+
def user_cache_dir(
|
102 |
+
appname: str | None = None,
|
103 |
+
appauthor: str | None | Literal[False] = None,
|
104 |
+
version: str | None = None,
|
105 |
+
opinion: bool = True,
|
106 |
+
) -> str:
|
107 |
+
"""
|
108 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
109 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
110 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
111 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
112 |
+
:returns: cache directory tied to the user
|
113 |
+
"""
|
114 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir
|
115 |
+
|
116 |
+
|
117 |
+
def user_state_dir(
|
118 |
+
appname: str | None = None,
|
119 |
+
appauthor: str | None | Literal[False] = None,
|
120 |
+
version: str | None = None,
|
121 |
+
roaming: bool = False,
|
122 |
+
) -> str:
|
123 |
+
"""
|
124 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
125 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
126 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
127 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
128 |
+
:returns: state directory tied to the user
|
129 |
+
"""
|
130 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir
|
131 |
+
|
132 |
+
|
133 |
+
def user_log_dir(
|
134 |
+
appname: str | None = None,
|
135 |
+
appauthor: str | None | Literal[False] = None,
|
136 |
+
version: str | None = None,
|
137 |
+
opinion: bool = True,
|
138 |
+
) -> str:
|
139 |
+
"""
|
140 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
141 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
142 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
143 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
144 |
+
:returns: log directory tied to the user
|
145 |
+
"""
|
146 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir
|
147 |
+
|
148 |
+
|
149 |
+
def user_documents_dir() -> str:
|
150 |
+
"""
|
151 |
+
:returns: documents directory tied to the user
|
152 |
+
"""
|
153 |
+
return PlatformDirs().user_documents_dir
|
154 |
+
|
155 |
+
|
156 |
+
def user_runtime_dir(
|
157 |
+
appname: str | None = None,
|
158 |
+
appauthor: str | None | Literal[False] = None,
|
159 |
+
version: str | None = None,
|
160 |
+
opinion: bool = True,
|
161 |
+
) -> str:
|
162 |
+
"""
|
163 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
164 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
165 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
166 |
+
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
167 |
+
:returns: runtime directory tied to the user
|
168 |
+
"""
|
169 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir
|
170 |
+
|
171 |
+
|
172 |
+
def user_data_path(
|
173 |
+
appname: str | None = None,
|
174 |
+
appauthor: str | None | Literal[False] = None,
|
175 |
+
version: str | None = None,
|
176 |
+
roaming: bool = False,
|
177 |
+
) -> Path:
|
178 |
+
"""
|
179 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
180 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
181 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
182 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
183 |
+
:returns: data path tied to the user
|
184 |
+
"""
|
185 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path
|
186 |
+
|
187 |
+
|
188 |
+
def site_data_path(
|
189 |
+
appname: str | None = None,
|
190 |
+
appauthor: str | None | Literal[False] = None,
|
191 |
+
version: str | None = None,
|
192 |
+
multipath: bool = False,
|
193 |
+
) -> Path:
|
194 |
+
"""
|
195 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
196 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
197 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
198 |
+
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
|
199 |
+
:returns: data path shared by users
|
200 |
+
"""
|
201 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path
|
202 |
+
|
203 |
+
|
204 |
+
def user_config_path(
|
205 |
+
appname: str | None = None,
|
206 |
+
appauthor: str | None | Literal[False] = None,
|
207 |
+
version: str | None = None,
|
208 |
+
roaming: bool = False,
|
209 |
+
) -> Path:
|
210 |
+
"""
|
211 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
212 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
213 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
214 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
215 |
+
:returns: config path tied to the user
|
216 |
+
"""
|
217 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path
|
218 |
+
|
219 |
+
|
220 |
+
def site_config_path(
|
221 |
+
appname: str | None = None,
|
222 |
+
appauthor: str | None | Literal[False] = None,
|
223 |
+
version: str | None = None,
|
224 |
+
multipath: bool = False,
|
225 |
+
) -> Path:
|
226 |
+
"""
|
227 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
228 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
229 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
230 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
231 |
+
:returns: config path shared by the users
|
232 |
+
"""
|
233 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path
|
234 |
+
|
235 |
+
|
236 |
+
def user_cache_path(
|
237 |
+
appname: str | None = None,
|
238 |
+
appauthor: str | None | Literal[False] = None,
|
239 |
+
version: str | None = None,
|
240 |
+
opinion: bool = True,
|
241 |
+
) -> Path:
|
242 |
+
"""
|
243 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
244 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
245 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
246 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
247 |
+
:returns: cache path tied to the user
|
248 |
+
"""
|
249 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path
|
250 |
+
|
251 |
+
|
252 |
+
def user_state_path(
|
253 |
+
appname: str | None = None,
|
254 |
+
appauthor: str | None | Literal[False] = None,
|
255 |
+
version: str | None = None,
|
256 |
+
roaming: bool = False,
|
257 |
+
) -> Path:
|
258 |
+
"""
|
259 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
260 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
261 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
262 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
263 |
+
:returns: state path tied to the user
|
264 |
+
"""
|
265 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path
|
266 |
+
|
267 |
+
|
268 |
+
def user_log_path(
|
269 |
+
appname: str | None = None,
|
270 |
+
appauthor: str | None | Literal[False] = None,
|
271 |
+
version: str | None = None,
|
272 |
+
opinion: bool = True,
|
273 |
+
) -> Path:
|
274 |
+
"""
|
275 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
276 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
277 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
278 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
279 |
+
:returns: log path tied to the user
|
280 |
+
"""
|
281 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path
|
282 |
+
|
283 |
+
|
284 |
+
def user_documents_path() -> Path:
|
285 |
+
"""
|
286 |
+
:returns: documents path tied to the user
|
287 |
+
"""
|
288 |
+
return PlatformDirs().user_documents_path
|
289 |
+
|
290 |
+
|
291 |
+
def user_runtime_path(
|
292 |
+
appname: str | None = None,
|
293 |
+
appauthor: str | None | Literal[False] = None,
|
294 |
+
version: str | None = None,
|
295 |
+
opinion: bool = True,
|
296 |
+
) -> Path:
|
297 |
+
"""
|
298 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
299 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
300 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
301 |
+
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
302 |
+
:returns: runtime path tied to the user
|
303 |
+
"""
|
304 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path
|
305 |
+
|
306 |
+
|
307 |
+
__all__ = [
|
308 |
+
"__version__",
|
309 |
+
"__version_info__",
|
310 |
+
"PlatformDirs",
|
311 |
+
"AppDirs",
|
312 |
+
"PlatformDirsABC",
|
313 |
+
"user_data_dir",
|
314 |
+
"user_config_dir",
|
315 |
+
"user_cache_dir",
|
316 |
+
"user_state_dir",
|
317 |
+
"user_log_dir",
|
318 |
+
"user_documents_dir",
|
319 |
+
"user_runtime_dir",
|
320 |
+
"site_data_dir",
|
321 |
+
"site_config_dir",
|
322 |
+
"user_data_path",
|
323 |
+
"user_config_path",
|
324 |
+
"user_cache_path",
|
325 |
+
"user_state_path",
|
326 |
+
"user_log_path",
|
327 |
+
"user_documents_path",
|
328 |
+
"user_runtime_path",
|
329 |
+
"site_data_path",
|
330 |
+
"site_config_path",
|
331 |
+
]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__main__.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from pip._vendor.platformdirs import PlatformDirs, __version__
|
4 |
+
|
5 |
+
PROPS = (
|
6 |
+
"user_data_dir",
|
7 |
+
"user_config_dir",
|
8 |
+
"user_cache_dir",
|
9 |
+
"user_state_dir",
|
10 |
+
"user_log_dir",
|
11 |
+
"user_documents_dir",
|
12 |
+
"user_runtime_dir",
|
13 |
+
"site_data_dir",
|
14 |
+
"site_config_dir",
|
15 |
+
)
|
16 |
+
|
17 |
+
|
18 |
+
def main() -> None:
|
19 |
+
app_name = "MyApp"
|
20 |
+
app_author = "MyCompany"
|
21 |
+
|
22 |
+
print(f"-- platformdirs {__version__} --")
|
23 |
+
|
24 |
+
print("-- app dirs (with optional 'version')")
|
25 |
+
dirs = PlatformDirs(app_name, app_author, version="1.0")
|
26 |
+
for prop in PROPS:
|
27 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
28 |
+
|
29 |
+
print("\n-- app dirs (without optional 'version')")
|
30 |
+
dirs = PlatformDirs(app_name, app_author)
|
31 |
+
for prop in PROPS:
|
32 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
33 |
+
|
34 |
+
print("\n-- app dirs (without optional 'appauthor')")
|
35 |
+
dirs = PlatformDirs(app_name)
|
36 |
+
for prop in PROPS:
|
37 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
38 |
+
|
39 |
+
print("\n-- app dirs (with disabled 'appauthor')")
|
40 |
+
dirs = PlatformDirs(app_name, appauthor=False)
|
41 |
+
for prop in PROPS:
|
42 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
43 |
+
|
44 |
+
|
45 |
+
if __name__ == "__main__":
|
46 |
+
main()
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (10.5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-310.pyc
ADDED
Binary file (1.23 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-310.pyc
ADDED
Binary file (4.26 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-310.pyc
ADDED
Binary file (5.2 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-310.pyc
ADDED
Binary file (3.18 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-310.pyc
ADDED
Binary file (6.88 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc
ADDED
Binary file (288 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-310.pyc
ADDED
Binary file (6.43 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/android.py
ADDED
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import re
|
5 |
+
import sys
|
6 |
+
from functools import lru_cache
|
7 |
+
|
8 |
+
from .api import PlatformDirsABC
|
9 |
+
|
10 |
+
|
11 |
+
class Android(PlatformDirsABC):
|
12 |
+
"""
|
13 |
+
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_. Makes use of the
|
14 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>` and
|
15 |
+
`version <platformdirs.api.PlatformDirsABC.version>`.
|
16 |
+
"""
|
17 |
+
|
18 |
+
@property
|
19 |
+
def user_data_dir(self) -> str:
|
20 |
+
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
|
21 |
+
return self._append_app_name_and_version(_android_folder(), "files")
|
22 |
+
|
23 |
+
@property
|
24 |
+
def site_data_dir(self) -> str:
|
25 |
+
""":return: data directory shared by users, same as `user_data_dir`"""
|
26 |
+
return self.user_data_dir
|
27 |
+
|
28 |
+
@property
|
29 |
+
def user_config_dir(self) -> str:
|
30 |
+
"""
|
31 |
+
:return: config directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
|
32 |
+
"""
|
33 |
+
return self._append_app_name_and_version(_android_folder(), "shared_prefs")
|
34 |
+
|
35 |
+
@property
|
36 |
+
def site_config_dir(self) -> str:
|
37 |
+
""":return: config directory shared by the users, same as `user_config_dir`"""
|
38 |
+
return self.user_config_dir
|
39 |
+
|
40 |
+
@property
|
41 |
+
def user_cache_dir(self) -> str:
|
42 |
+
""":return: cache directory tied to the user, e.g. e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>``"""
|
43 |
+
return self._append_app_name_and_version(_android_folder(), "cache")
|
44 |
+
|
45 |
+
@property
|
46 |
+
def user_state_dir(self) -> str:
|
47 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
48 |
+
return self.user_data_dir
|
49 |
+
|
50 |
+
@property
|
51 |
+
def user_log_dir(self) -> str:
|
52 |
+
"""
|
53 |
+
:return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
|
54 |
+
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/log``
|
55 |
+
"""
|
56 |
+
path = self.user_cache_dir
|
57 |
+
if self.opinion:
|
58 |
+
path = os.path.join(path, "log")
|
59 |
+
return path
|
60 |
+
|
61 |
+
@property
|
62 |
+
def user_documents_dir(self) -> str:
|
63 |
+
"""
|
64 |
+
:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``
|
65 |
+
"""
|
66 |
+
return _android_documents_folder()
|
67 |
+
|
68 |
+
@property
|
69 |
+
def user_runtime_dir(self) -> str:
|
70 |
+
"""
|
71 |
+
:return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
|
72 |
+
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/tmp``
|
73 |
+
"""
|
74 |
+
path = self.user_cache_dir
|
75 |
+
if self.opinion:
|
76 |
+
path = os.path.join(path, "tmp")
|
77 |
+
return path
|
78 |
+
|
79 |
+
|
80 |
+
@lru_cache(maxsize=1)
|
81 |
+
def _android_folder() -> str:
|
82 |
+
""":return: base folder for the Android OS"""
|
83 |
+
try:
|
84 |
+
# First try to get path to android app via pyjnius
|
85 |
+
from jnius import autoclass
|
86 |
+
|
87 |
+
Context = autoclass("android.content.Context") # noqa: N806
|
88 |
+
result: str = Context.getFilesDir().getParentFile().getAbsolutePath()
|
89 |
+
except Exception:
|
90 |
+
# if fails find an android folder looking path on the sys.path
|
91 |
+
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
|
92 |
+
for path in sys.path:
|
93 |
+
if pattern.match(path):
|
94 |
+
result = path.split("/files")[0]
|
95 |
+
break
|
96 |
+
else:
|
97 |
+
raise OSError("Cannot find path to android app folder")
|
98 |
+
return result
|
99 |
+
|
100 |
+
|
101 |
+
@lru_cache(maxsize=1)
|
102 |
+
def _android_documents_folder() -> str:
|
103 |
+
""":return: documents folder for the Android OS"""
|
104 |
+
# Get directories with pyjnius
|
105 |
+
try:
|
106 |
+
from jnius import autoclass
|
107 |
+
|
108 |
+
Context = autoclass("android.content.Context") # noqa: N806
|
109 |
+
Environment = autoclass("android.os.Environment") # noqa: N806
|
110 |
+
documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
|
111 |
+
except Exception:
|
112 |
+
documents_dir = "/storage/emulated/0/Documents"
|
113 |
+
|
114 |
+
return documents_dir
|
115 |
+
|
116 |
+
|
117 |
+
__all__ = [
|
118 |
+
"Android",
|
119 |
+
]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/api.py
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from abc import ABC, abstractmethod
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
if sys.version_info >= (3, 8): # pragma: no branch
|
9 |
+
from typing import Literal # pragma: no cover
|
10 |
+
|
11 |
+
|
12 |
+
class PlatformDirsABC(ABC):
|
13 |
+
"""
|
14 |
+
Abstract base class for platform directories.
|
15 |
+
"""
|
16 |
+
|
17 |
+
def __init__(
|
18 |
+
self,
|
19 |
+
appname: str | None = None,
|
20 |
+
appauthor: str | None | Literal[False] = None,
|
21 |
+
version: str | None = None,
|
22 |
+
roaming: bool = False,
|
23 |
+
multipath: bool = False,
|
24 |
+
opinion: bool = True,
|
25 |
+
):
|
26 |
+
"""
|
27 |
+
Create a new platform directory.
|
28 |
+
|
29 |
+
:param appname: See `appname`.
|
30 |
+
:param appauthor: See `appauthor`.
|
31 |
+
:param version: See `version`.
|
32 |
+
:param roaming: See `roaming`.
|
33 |
+
:param multipath: See `multipath`.
|
34 |
+
:param opinion: See `opinion`.
|
35 |
+
"""
|
36 |
+
self.appname = appname #: The name of application.
|
37 |
+
self.appauthor = appauthor
|
38 |
+
"""
|
39 |
+
The name of the app author or distributing body for this application. Typically, it is the owning company name.
|
40 |
+
Defaults to `appname`. You may pass ``False`` to disable it.
|
41 |
+
"""
|
42 |
+
self.version = version
|
43 |
+
"""
|
44 |
+
An optional version path element to append to the path. You might want to use this if you want multiple versions
|
45 |
+
of your app to be able to run independently. If used, this would typically be ``<major>.<minor>``.
|
46 |
+
"""
|
47 |
+
self.roaming = roaming
|
48 |
+
"""
|
49 |
+
Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
|
50 |
+
for roaming profiles, this user data will be synced on login (see
|
51 |
+
`here <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>`_).
|
52 |
+
"""
|
53 |
+
self.multipath = multipath
|
54 |
+
"""
|
55 |
+
An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
|
56 |
+
returned. By default, the first item would only be returned.
|
57 |
+
"""
|
58 |
+
self.opinion = opinion #: A flag to indicating to use opinionated values.
|
59 |
+
|
60 |
+
def _append_app_name_and_version(self, *base: str) -> str:
|
61 |
+
params = list(base[1:])
|
62 |
+
if self.appname:
|
63 |
+
params.append(self.appname)
|
64 |
+
if self.version:
|
65 |
+
params.append(self.version)
|
66 |
+
return os.path.join(base[0], *params)
|
67 |
+
|
68 |
+
@property
|
69 |
+
@abstractmethod
|
70 |
+
def user_data_dir(self) -> str:
|
71 |
+
""":return: data directory tied to the user"""
|
72 |
+
|
73 |
+
@property
|
74 |
+
@abstractmethod
|
75 |
+
def site_data_dir(self) -> str:
|
76 |
+
""":return: data directory shared by users"""
|
77 |
+
|
78 |
+
@property
|
79 |
+
@abstractmethod
|
80 |
+
def user_config_dir(self) -> str:
|
81 |
+
""":return: config directory tied to the user"""
|
82 |
+
|
83 |
+
@property
|
84 |
+
@abstractmethod
|
85 |
+
def site_config_dir(self) -> str:
|
86 |
+
""":return: config directory shared by the users"""
|
87 |
+
|
88 |
+
@property
|
89 |
+
@abstractmethod
|
90 |
+
def user_cache_dir(self) -> str:
|
91 |
+
""":return: cache directory tied to the user"""
|
92 |
+
|
93 |
+
@property
|
94 |
+
@abstractmethod
|
95 |
+
def user_state_dir(self) -> str:
|
96 |
+
""":return: state directory tied to the user"""
|
97 |
+
|
98 |
+
@property
|
99 |
+
@abstractmethod
|
100 |
+
def user_log_dir(self) -> str:
|
101 |
+
""":return: log directory tied to the user"""
|
102 |
+
|
103 |
+
@property
|
104 |
+
@abstractmethod
|
105 |
+
def user_documents_dir(self) -> str:
|
106 |
+
""":return: documents directory tied to the user"""
|
107 |
+
|
108 |
+
@property
|
109 |
+
@abstractmethod
|
110 |
+
def user_runtime_dir(self) -> str:
|
111 |
+
""":return: runtime directory tied to the user"""
|
112 |
+
|
113 |
+
@property
|
114 |
+
def user_data_path(self) -> Path:
|
115 |
+
""":return: data path tied to the user"""
|
116 |
+
return Path(self.user_data_dir)
|
117 |
+
|
118 |
+
@property
|
119 |
+
def site_data_path(self) -> Path:
|
120 |
+
""":return: data path shared by users"""
|
121 |
+
return Path(self.site_data_dir)
|
122 |
+
|
123 |
+
@property
|
124 |
+
def user_config_path(self) -> Path:
|
125 |
+
""":return: config path tied to the user"""
|
126 |
+
return Path(self.user_config_dir)
|
127 |
+
|
128 |
+
@property
|
129 |
+
def site_config_path(self) -> Path:
|
130 |
+
""":return: config path shared by the users"""
|
131 |
+
return Path(self.site_config_dir)
|
132 |
+
|
133 |
+
@property
|
134 |
+
def user_cache_path(self) -> Path:
|
135 |
+
""":return: cache path tied to the user"""
|
136 |
+
return Path(self.user_cache_dir)
|
137 |
+
|
138 |
+
@property
|
139 |
+
def user_state_path(self) -> Path:
|
140 |
+
""":return: state path tied to the user"""
|
141 |
+
return Path(self.user_state_dir)
|
142 |
+
|
143 |
+
@property
|
144 |
+
def user_log_path(self) -> Path:
|
145 |
+
""":return: log path tied to the user"""
|
146 |
+
return Path(self.user_log_dir)
|
147 |
+
|
148 |
+
@property
|
149 |
+
def user_documents_path(self) -> Path:
|
150 |
+
""":return: documents path tied to the user"""
|
151 |
+
return Path(self.user_documents_dir)
|
152 |
+
|
153 |
+
@property
|
154 |
+
def user_runtime_path(self) -> Path:
|
155 |
+
""":return: runtime path tied to the user"""
|
156 |
+
return Path(self.user_runtime_dir)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/macos.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
|
5 |
+
from .api import PlatformDirsABC
|
6 |
+
|
7 |
+
|
8 |
+
class MacOS(PlatformDirsABC):
|
9 |
+
"""
|
10 |
+
Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
|
11 |
+
<https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
|
12 |
+
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>` and
|
13 |
+
`version <platformdirs.api.PlatformDirsABC.version>`.
|
14 |
+
"""
|
15 |
+
|
16 |
+
@property
|
17 |
+
def user_data_dir(self) -> str:
|
18 |
+
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
|
19 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/"))
|
20 |
+
|
21 |
+
@property
|
22 |
+
def site_data_dir(self) -> str:
|
23 |
+
""":return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
|
24 |
+
return self._append_app_name_and_version("/Library/Application Support")
|
25 |
+
|
26 |
+
@property
|
27 |
+
def user_config_dir(self) -> str:
|
28 |
+
""":return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``"""
|
29 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/"))
|
30 |
+
|
31 |
+
@property
|
32 |
+
def site_config_dir(self) -> str:
|
33 |
+
""":return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``"""
|
34 |
+
return self._append_app_name_and_version("/Library/Preferences")
|
35 |
+
|
36 |
+
@property
|
37 |
+
def user_cache_dir(self) -> str:
|
38 |
+
""":return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
|
39 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))
|
40 |
+
|
41 |
+
@property
|
42 |
+
def user_state_dir(self) -> str:
|
43 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
44 |
+
return self.user_data_dir
|
45 |
+
|
46 |
+
@property
|
47 |
+
def user_log_dir(self) -> str:
|
48 |
+
""":return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
|
49 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))
|
50 |
+
|
51 |
+
@property
|
52 |
+
def user_documents_dir(self) -> str:
|
53 |
+
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
|
54 |
+
return os.path.expanduser("~/Documents")
|
55 |
+
|
56 |
+
@property
|
57 |
+
def user_runtime_dir(self) -> str:
|
58 |
+
""":return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
|
59 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))
|
60 |
+
|
61 |
+
|
62 |
+
__all__ = [
|
63 |
+
"MacOS",
|
64 |
+
]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/unix.py
ADDED
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from configparser import ConfigParser
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
from .api import PlatformDirsABC
|
9 |
+
|
10 |
+
if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker
|
11 |
+
from os import getuid
|
12 |
+
else:
|
13 |
+
|
14 |
+
def getuid() -> int:
|
15 |
+
raise RuntimeError("should only be used on Linux")
|
16 |
+
|
17 |
+
|
18 |
+
class Unix(PlatformDirsABC):
|
19 |
+
"""
|
20 |
+
On Unix/Linux, we follow the
|
21 |
+
`XDG Basedir Spec <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. The spec allows
|
22 |
+
overriding directories with environment variables. The examples show are the default values, alongside the name of
|
23 |
+
the environment variable that overrides them. Makes use of the
|
24 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
25 |
+
`version <platformdirs.api.PlatformDirsABC.version>`,
|
26 |
+
`multipath <platformdirs.api.PlatformDirsABC.multipath>`,
|
27 |
+
`opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
28 |
+
"""
|
29 |
+
|
30 |
+
@property
|
31 |
+
def user_data_dir(self) -> str:
|
32 |
+
"""
|
33 |
+
:return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
|
34 |
+
``$XDG_DATA_HOME/$appname/$version``
|
35 |
+
"""
|
36 |
+
path = os.environ.get("XDG_DATA_HOME", "")
|
37 |
+
if not path.strip():
|
38 |
+
path = os.path.expanduser("~/.local/share")
|
39 |
+
return self._append_app_name_and_version(path)
|
40 |
+
|
41 |
+
@property
|
42 |
+
def site_data_dir(self) -> str:
|
43 |
+
"""
|
44 |
+
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
|
45 |
+
enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
46 |
+
path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
|
47 |
+
"""
|
48 |
+
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
|
49 |
+
path = os.environ.get("XDG_DATA_DIRS", "")
|
50 |
+
if not path.strip():
|
51 |
+
path = f"/usr/local/share{os.pathsep}/usr/share"
|
52 |
+
return self._with_multi_path(path)
|
53 |
+
|
54 |
+
def _with_multi_path(self, path: str) -> str:
|
55 |
+
path_list = path.split(os.pathsep)
|
56 |
+
if not self.multipath:
|
57 |
+
path_list = path_list[0:1]
|
58 |
+
path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]
|
59 |
+
return os.pathsep.join(path_list)
|
60 |
+
|
61 |
+
@property
|
62 |
+
def user_config_dir(self) -> str:
|
63 |
+
"""
|
64 |
+
:return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
|
65 |
+
``$XDG_CONFIG_HOME/$appname/$version``
|
66 |
+
"""
|
67 |
+
path = os.environ.get("XDG_CONFIG_HOME", "")
|
68 |
+
if not path.strip():
|
69 |
+
path = os.path.expanduser("~/.config")
|
70 |
+
return self._append_app_name_and_version(path)
|
71 |
+
|
72 |
+
@property
|
73 |
+
def site_config_dir(self) -> str:
|
74 |
+
"""
|
75 |
+
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
|
76 |
+
is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
77 |
+
path separator), e.g. ``/etc/xdg/$appname/$version``
|
78 |
+
"""
|
79 |
+
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
|
80 |
+
path = os.environ.get("XDG_CONFIG_DIRS", "")
|
81 |
+
if not path.strip():
|
82 |
+
path = "/etc/xdg"
|
83 |
+
return self._with_multi_path(path)
|
84 |
+
|
85 |
+
@property
|
86 |
+
def user_cache_dir(self) -> str:
|
87 |
+
"""
|
88 |
+
:return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
|
89 |
+
``~/$XDG_CACHE_HOME/$appname/$version``
|
90 |
+
"""
|
91 |
+
path = os.environ.get("XDG_CACHE_HOME", "")
|
92 |
+
if not path.strip():
|
93 |
+
path = os.path.expanduser("~/.cache")
|
94 |
+
return self._append_app_name_and_version(path)
|
95 |
+
|
96 |
+
@property
|
97 |
+
def user_state_dir(self) -> str:
|
98 |
+
"""
|
99 |
+
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
|
100 |
+
``$XDG_STATE_HOME/$appname/$version``
|
101 |
+
"""
|
102 |
+
path = os.environ.get("XDG_STATE_HOME", "")
|
103 |
+
if not path.strip():
|
104 |
+
path = os.path.expanduser("~/.local/state")
|
105 |
+
return self._append_app_name_and_version(path)
|
106 |
+
|
107 |
+
@property
|
108 |
+
def user_log_dir(self) -> str:
|
109 |
+
"""
|
110 |
+
:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``log`` in it
|
111 |
+
"""
|
112 |
+
path = self.user_cache_dir
|
113 |
+
if self.opinion:
|
114 |
+
path = os.path.join(path, "log")
|
115 |
+
return path
|
116 |
+
|
117 |
+
@property
|
118 |
+
def user_documents_dir(self) -> str:
|
119 |
+
"""
|
120 |
+
:return: documents directory tied to the user, e.g. ``~/Documents``
|
121 |
+
"""
|
122 |
+
documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR")
|
123 |
+
if documents_dir is None:
|
124 |
+
documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip()
|
125 |
+
if not documents_dir:
|
126 |
+
documents_dir = os.path.expanduser("~/Documents")
|
127 |
+
|
128 |
+
return documents_dir
|
129 |
+
|
130 |
+
@property
|
131 |
+
def user_runtime_dir(self) -> str:
|
132 |
+
"""
|
133 |
+
:return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
|
134 |
+
``$XDG_RUNTIME_DIR/$appname/$version``
|
135 |
+
"""
|
136 |
+
path = os.environ.get("XDG_RUNTIME_DIR", "")
|
137 |
+
if not path.strip():
|
138 |
+
path = f"/run/user/{getuid()}"
|
139 |
+
return self._append_app_name_and_version(path)
|
140 |
+
|
141 |
+
@property
|
142 |
+
def site_data_path(self) -> Path:
|
143 |
+
""":return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
|
144 |
+
return self._first_item_as_path_if_multipath(self.site_data_dir)
|
145 |
+
|
146 |
+
@property
|
147 |
+
def site_config_path(self) -> Path:
|
148 |
+
""":return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
|
149 |
+
return self._first_item_as_path_if_multipath(self.site_config_dir)
|
150 |
+
|
151 |
+
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
|
152 |
+
if self.multipath:
|
153 |
+
# If multipath is True, the first path is returned.
|
154 |
+
directory = directory.split(os.pathsep)[0]
|
155 |
+
return Path(directory)
|
156 |
+
|
157 |
+
|
158 |
+
def _get_user_dirs_folder(key: str) -> str | None:
|
159 |
+
"""Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/"""
|
160 |
+
user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs")
|
161 |
+
if os.path.exists(user_dirs_config_path):
|
162 |
+
parser = ConfigParser()
|
163 |
+
|
164 |
+
with open(user_dirs_config_path) as stream:
|
165 |
+
# Add fake section header, so ConfigParser doesn't complain
|
166 |
+
parser.read_string(f"[top]\n{stream.read()}")
|
167 |
+
|
168 |
+
if key not in parser["top"]:
|
169 |
+
return None
|
170 |
+
|
171 |
+
path = parser["top"][key].strip('"')
|
172 |
+
# Handle relative home paths
|
173 |
+
path = path.replace("$HOME", os.path.expanduser("~"))
|
174 |
+
return path
|
175 |
+
|
176 |
+
return None
|
177 |
+
|
178 |
+
|
179 |
+
__all__ = [
|
180 |
+
"Unix",
|
181 |
+
]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/version.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Version information """
|
2 |
+
|
3 |
+
__version__ = "2.4.1"
|
4 |
+
__version_info__ = (2, 4, 1)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/platformdirs/windows.py
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import ctypes
|
4 |
+
import os
|
5 |
+
from functools import lru_cache
|
6 |
+
from typing import Callable
|
7 |
+
|
8 |
+
from .api import PlatformDirsABC
|
9 |
+
|
10 |
+
|
11 |
+
class Windows(PlatformDirsABC):
|
12 |
+
"""`MSDN on where to store app data files
|
13 |
+
<http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120>`_.
|
14 |
+
Makes use of the
|
15 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
16 |
+
`appauthor <platformdirs.api.PlatformDirsABC.appauthor>`,
|
17 |
+
`version <platformdirs.api.PlatformDirsABC.version>`,
|
18 |
+
`roaming <platformdirs.api.PlatformDirsABC.roaming>`,
|
19 |
+
`opinion <platformdirs.api.PlatformDirsABC.opinion>`."""
|
20 |
+
|
21 |
+
@property
|
22 |
+
def user_data_dir(self) -> str:
|
23 |
+
"""
|
24 |
+
:return: data directory tied to the user, e.g.
|
25 |
+
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
|
26 |
+
``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
|
27 |
+
"""
|
28 |
+
const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
|
29 |
+
path = os.path.normpath(get_win_folder(const))
|
30 |
+
return self._append_parts(path)
|
31 |
+
|
32 |
+
def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
|
33 |
+
params = []
|
34 |
+
if self.appname:
|
35 |
+
if self.appauthor is not False:
|
36 |
+
author = self.appauthor or self.appname
|
37 |
+
params.append(author)
|
38 |
+
params.append(self.appname)
|
39 |
+
if opinion_value is not None and self.opinion:
|
40 |
+
params.append(opinion_value)
|
41 |
+
if self.version:
|
42 |
+
params.append(self.version)
|
43 |
+
return os.path.join(path, *params)
|
44 |
+
|
45 |
+
@property
|
46 |
+
def site_data_dir(self) -> str:
|
47 |
+
""":return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
|
48 |
+
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
|
49 |
+
return self._append_parts(path)
|
50 |
+
|
51 |
+
@property
|
52 |
+
def user_config_dir(self) -> str:
|
53 |
+
""":return: config directory tied to the user, same as `user_data_dir`"""
|
54 |
+
return self.user_data_dir
|
55 |
+
|
56 |
+
@property
|
57 |
+
def site_config_dir(self) -> str:
|
58 |
+
""":return: config directory shared by the users, same as `site_data_dir`"""
|
59 |
+
return self.site_data_dir
|
60 |
+
|
61 |
+
@property
|
62 |
+
def user_cache_dir(self) -> str:
|
63 |
+
"""
|
64 |
+
:return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
|
65 |
+
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
|
66 |
+
"""
|
67 |
+
path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
|
68 |
+
return self._append_parts(path, opinion_value="Cache")
|
69 |
+
|
70 |
+
@property
|
71 |
+
def user_state_dir(self) -> str:
|
72 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
73 |
+
return self.user_data_dir
|
74 |
+
|
75 |
+
@property
|
76 |
+
def user_log_dir(self) -> str:
|
77 |
+
"""
|
78 |
+
:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it
|
79 |
+
"""
|
80 |
+
path = self.user_data_dir
|
81 |
+
if self.opinion:
|
82 |
+
path = os.path.join(path, "Logs")
|
83 |
+
return path
|
84 |
+
|
85 |
+
@property
|
86 |
+
def user_documents_dir(self) -> str:
|
87 |
+
"""
|
88 |
+
:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``
|
89 |
+
"""
|
90 |
+
return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
|
91 |
+
|
92 |
+
@property
|
93 |
+
def user_runtime_dir(self) -> str:
|
94 |
+
"""
|
95 |
+
:return: runtime directory tied to the user, e.g.
|
96 |
+
``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
|
97 |
+
"""
|
98 |
+
path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))
|
99 |
+
return self._append_parts(path)
|
100 |
+
|
101 |
+
|
102 |
+
def get_win_folder_from_env_vars(csidl_name: str) -> str:
|
103 |
+
"""Get folder from environment variables."""
|
104 |
+
if csidl_name == "CSIDL_PERSONAL": # does not have an environment name
|
105 |
+
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")
|
106 |
+
|
107 |
+
env_var_name = {
|
108 |
+
"CSIDL_APPDATA": "APPDATA",
|
109 |
+
"CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
|
110 |
+
"CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
|
111 |
+
}.get(csidl_name)
|
112 |
+
if env_var_name is None:
|
113 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
114 |
+
result = os.environ.get(env_var_name)
|
115 |
+
if result is None:
|
116 |
+
raise ValueError(f"Unset environment variable: {env_var_name}")
|
117 |
+
return result
|
118 |
+
|
119 |
+
|
120 |
+
def get_win_folder_from_registry(csidl_name: str) -> str:
|
121 |
+
"""Get folder from the registry.
|
122 |
+
|
123 |
+
This is a fallback technique at best. I'm not sure if using the
|
124 |
+
registry for this guarantees us the correct answer for all CSIDL_*
|
125 |
+
names.
|
126 |
+
"""
|
127 |
+
shell_folder_name = {
|
128 |
+
"CSIDL_APPDATA": "AppData",
|
129 |
+
"CSIDL_COMMON_APPDATA": "Common AppData",
|
130 |
+
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
131 |
+
"CSIDL_PERSONAL": "Personal",
|
132 |
+
}.get(csidl_name)
|
133 |
+
if shell_folder_name is None:
|
134 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
135 |
+
|
136 |
+
import winreg
|
137 |
+
|
138 |
+
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
|
139 |
+
directory, _ = winreg.QueryValueEx(key, shell_folder_name)
|
140 |
+
return str(directory)
|
141 |
+
|
142 |
+
|
143 |
+
def get_win_folder_via_ctypes(csidl_name: str) -> str:
|
144 |
+
"""Get folder with ctypes."""
|
145 |
+
csidl_const = {
|
146 |
+
"CSIDL_APPDATA": 26,
|
147 |
+
"CSIDL_COMMON_APPDATA": 35,
|
148 |
+
"CSIDL_LOCAL_APPDATA": 28,
|
149 |
+
"CSIDL_PERSONAL": 5,
|
150 |
+
}.get(csidl_name)
|
151 |
+
if csidl_const is None:
|
152 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
153 |
+
|
154 |
+
buf = ctypes.create_unicode_buffer(1024)
|
155 |
+
windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker
|
156 |
+
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
157 |
+
|
158 |
+
# Downgrade to short path name if it has highbit chars.
|
159 |
+
if any(ord(c) > 255 for c in buf):
|
160 |
+
buf2 = ctypes.create_unicode_buffer(1024)
|
161 |
+
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
162 |
+
buf = buf2
|
163 |
+
|
164 |
+
return buf.value
|
165 |
+
|
166 |
+
|
167 |
+
def _pick_get_win_folder() -> Callable[[str], str]:
|
168 |
+
if hasattr(ctypes, "windll"):
|
169 |
+
return get_win_folder_via_ctypes
|
170 |
+
try:
|
171 |
+
import winreg # noqa: F401
|
172 |
+
except ImportError:
|
173 |
+
return get_win_folder_from_env_vars
|
174 |
+
else:
|
175 |
+
return get_win_folder_from_registry
|
176 |
+
|
177 |
+
|
178 |
+
get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
|
179 |
+
|
180 |
+
__all__ = [
|
181 |
+
"Windows",
|
182 |
+
]
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc
ADDED
Binary file (548 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc
ADDED
Binary file (1.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc
ADDED
Binary file (17 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc
ADDED
Binary file (6.65 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc
ADDED
Binary file (8.09 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc
ADDED
Binary file (632 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (1.67 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (5.24 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc
ADDED
Binary file (987 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/models.cpython-310.pyc
ADDED
Binary file (24.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-310.pyc
ADDED
Binary file (501 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc
ADDED
Binary file (19.6 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc
ADDED
Binary file (4.66 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-310.pyc
ADDED
Binary file (4.45 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/__version__.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# .-. .-. .-. . . .-. .-. .-. .-.
|
2 |
+
# |( |- |.| | | |- `-. | `-.
|
3 |
+
# ' ' `-' `-`.`-' `-' `-' ' `-'
|
4 |
+
|
5 |
+
__title__ = 'requests'
|
6 |
+
__description__ = 'Python HTTP for Humans.'
|
7 |
+
__url__ = 'https://requests.readthedocs.io'
|
8 |
+
__version__ = '2.27.1'
|
9 |
+
__build__ = 0x022701
|
10 |
+
__author__ = 'Kenneth Reitz'
|
11 |
+
__author_email__ = '[email protected]'
|
12 |
+
__license__ = 'Apache 2.0'
|
13 |
+
__copyright__ = 'Copyright 2022 Kenneth Reitz'
|
14 |
+
__cake__ = u'\u2728 \U0001f370 \u2728'
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/adapters.py
ADDED
@@ -0,0 +1,538 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.adapters
|
5 |
+
~~~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module contains the transport adapters that Requests uses to define
|
8 |
+
and maintain connections.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import os.path
|
12 |
+
import socket
|
13 |
+
|
14 |
+
from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
|
15 |
+
from pip._vendor.urllib3.response import HTTPResponse
|
16 |
+
from pip._vendor.urllib3.util import parse_url
|
17 |
+
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
|
18 |
+
from pip._vendor.urllib3.util.retry import Retry
|
19 |
+
from pip._vendor.urllib3.exceptions import ClosedPoolError
|
20 |
+
from pip._vendor.urllib3.exceptions import ConnectTimeoutError
|
21 |
+
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
|
22 |
+
from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader
|
23 |
+
from pip._vendor.urllib3.exceptions import MaxRetryError
|
24 |
+
from pip._vendor.urllib3.exceptions import NewConnectionError
|
25 |
+
from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
|
26 |
+
from pip._vendor.urllib3.exceptions import ProtocolError
|
27 |
+
from pip._vendor.urllib3.exceptions import ReadTimeoutError
|
28 |
+
from pip._vendor.urllib3.exceptions import SSLError as _SSLError
|
29 |
+
from pip._vendor.urllib3.exceptions import ResponseError
|
30 |
+
from pip._vendor.urllib3.exceptions import LocationValueError
|
31 |
+
|
32 |
+
from .models import Response
|
33 |
+
from .compat import urlparse, basestring
|
34 |
+
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
|
35 |
+
get_encoding_from_headers, prepend_scheme_if_needed,
|
36 |
+
get_auth_from_url, urldefragauth, select_proxy)
|
37 |
+
from .structures import CaseInsensitiveDict
|
38 |
+
from .cookies import extract_cookies_to_jar
|
39 |
+
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
|
40 |
+
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
|
41 |
+
InvalidURL, InvalidHeader)
|
42 |
+
from .auth import _basic_auth_str
|
43 |
+
|
44 |
+
try:
|
45 |
+
from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
|
46 |
+
except ImportError:
|
47 |
+
def SOCKSProxyManager(*args, **kwargs):
|
48 |
+
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
49 |
+
|
50 |
+
DEFAULT_POOLBLOCK = False
|
51 |
+
DEFAULT_POOLSIZE = 10
|
52 |
+
DEFAULT_RETRIES = 0
|
53 |
+
DEFAULT_POOL_TIMEOUT = None
|
54 |
+
|
55 |
+
|
56 |
+
class BaseAdapter(object):
|
57 |
+
"""The Base Transport Adapter"""
|
58 |
+
|
59 |
+
def __init__(self):
|
60 |
+
super(BaseAdapter, self).__init__()
|
61 |
+
|
62 |
+
def send(self, request, stream=False, timeout=None, verify=True,
|
63 |
+
cert=None, proxies=None):
|
64 |
+
"""Sends PreparedRequest object. Returns Response object.
|
65 |
+
|
66 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
67 |
+
:param stream: (optional) Whether to stream the request content.
|
68 |
+
:param timeout: (optional) How long to wait for the server to send
|
69 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
70 |
+
read timeout) <timeouts>` tuple.
|
71 |
+
:type timeout: float or tuple
|
72 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
73 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
74 |
+
to a CA bundle to use
|
75 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
76 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
77 |
+
"""
|
78 |
+
raise NotImplementedError
|
79 |
+
|
80 |
+
def close(self):
|
81 |
+
"""Cleans up adapter specific items."""
|
82 |
+
raise NotImplementedError
|
83 |
+
|
84 |
+
|
85 |
+
class HTTPAdapter(BaseAdapter):
|
86 |
+
"""The built-in HTTP Adapter for urllib3.
|
87 |
+
|
88 |
+
Provides a general-case interface for Requests sessions to contact HTTP and
|
89 |
+
HTTPS urls by implementing the Transport Adapter interface. This class will
|
90 |
+
usually be created by the :class:`Session <Session>` class under the
|
91 |
+
covers.
|
92 |
+
|
93 |
+
:param pool_connections: The number of urllib3 connection pools to cache.
|
94 |
+
:param pool_maxsize: The maximum number of connections to save in the pool.
|
95 |
+
:param max_retries: The maximum number of retries each connection
|
96 |
+
should attempt. Note, this applies only to failed DNS lookups, socket
|
97 |
+
connections and connection timeouts, never to requests where data has
|
98 |
+
made it to the server. By default, Requests does not retry failed
|
99 |
+
connections. If you need granular control over the conditions under
|
100 |
+
which we retry a request, import urllib3's ``Retry`` class and pass
|
101 |
+
that instead.
|
102 |
+
:param pool_block: Whether the connection pool should block for connections.
|
103 |
+
|
104 |
+
Usage::
|
105 |
+
|
106 |
+
>>> import requests
|
107 |
+
>>> s = requests.Session()
|
108 |
+
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
109 |
+
>>> s.mount('http://', a)
|
110 |
+
"""
|
111 |
+
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
|
112 |
+
'_pool_block']
|
113 |
+
|
114 |
+
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
|
115 |
+
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
|
116 |
+
pool_block=DEFAULT_POOLBLOCK):
|
117 |
+
if max_retries == DEFAULT_RETRIES:
|
118 |
+
self.max_retries = Retry(0, read=False)
|
119 |
+
else:
|
120 |
+
self.max_retries = Retry.from_int(max_retries)
|
121 |
+
self.config = {}
|
122 |
+
self.proxy_manager = {}
|
123 |
+
|
124 |
+
super(HTTPAdapter, self).__init__()
|
125 |
+
|
126 |
+
self._pool_connections = pool_connections
|
127 |
+
self._pool_maxsize = pool_maxsize
|
128 |
+
self._pool_block = pool_block
|
129 |
+
|
130 |
+
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
131 |
+
|
132 |
+
def __getstate__(self):
|
133 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
134 |
+
|
135 |
+
def __setstate__(self, state):
|
136 |
+
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
137 |
+
# self.poolmanager uses a lambda function, which isn't pickleable.
|
138 |
+
self.proxy_manager = {}
|
139 |
+
self.config = {}
|
140 |
+
|
141 |
+
for attr, value in state.items():
|
142 |
+
setattr(self, attr, value)
|
143 |
+
|
144 |
+
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
|
145 |
+
block=self._pool_block)
|
146 |
+
|
147 |
+
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
|
148 |
+
"""Initializes a urllib3 PoolManager.
|
149 |
+
|
150 |
+
This method should not be called from user code, and is only
|
151 |
+
exposed for use when subclassing the
|
152 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
153 |
+
|
154 |
+
:param connections: The number of urllib3 connection pools to cache.
|
155 |
+
:param maxsize: The maximum number of connections to save in the pool.
|
156 |
+
:param block: Block when no free connections are available.
|
157 |
+
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
158 |
+
"""
|
159 |
+
# save these values for pickling
|
160 |
+
self._pool_connections = connections
|
161 |
+
self._pool_maxsize = maxsize
|
162 |
+
self._pool_block = block
|
163 |
+
|
164 |
+
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
|
165 |
+
block=block, strict=True, **pool_kwargs)
|
166 |
+
|
167 |
+
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
168 |
+
"""Return urllib3 ProxyManager for the given proxy.
|
169 |
+
|
170 |
+
This method should not be called from user code, and is only
|
171 |
+
exposed for use when subclassing the
|
172 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
173 |
+
|
174 |
+
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
175 |
+
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
176 |
+
:returns: ProxyManager
|
177 |
+
:rtype: urllib3.ProxyManager
|
178 |
+
"""
|
179 |
+
if proxy in self.proxy_manager:
|
180 |
+
manager = self.proxy_manager[proxy]
|
181 |
+
elif proxy.lower().startswith('socks'):
|
182 |
+
username, password = get_auth_from_url(proxy)
|
183 |
+
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
184 |
+
proxy,
|
185 |
+
username=username,
|
186 |
+
password=password,
|
187 |
+
num_pools=self._pool_connections,
|
188 |
+
maxsize=self._pool_maxsize,
|
189 |
+
block=self._pool_block,
|
190 |
+
**proxy_kwargs
|
191 |
+
)
|
192 |
+
else:
|
193 |
+
proxy_headers = self.proxy_headers(proxy)
|
194 |
+
manager = self.proxy_manager[proxy] = proxy_from_url(
|
195 |
+
proxy,
|
196 |
+
proxy_headers=proxy_headers,
|
197 |
+
num_pools=self._pool_connections,
|
198 |
+
maxsize=self._pool_maxsize,
|
199 |
+
block=self._pool_block,
|
200 |
+
**proxy_kwargs)
|
201 |
+
|
202 |
+
return manager
|
203 |
+
|
204 |
+
def cert_verify(self, conn, url, verify, cert):
|
205 |
+
"""Verify a SSL certificate. This method should not be called from user
|
206 |
+
code, and is only exposed for use when subclassing the
|
207 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
208 |
+
|
209 |
+
:param conn: The urllib3 connection object associated with the cert.
|
210 |
+
:param url: The requested URL.
|
211 |
+
:param verify: Either a boolean, in which case it controls whether we verify
|
212 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
213 |
+
to a CA bundle to use
|
214 |
+
:param cert: The SSL certificate to verify.
|
215 |
+
"""
|
216 |
+
if url.lower().startswith('https') and verify:
|
217 |
+
|
218 |
+
cert_loc = None
|
219 |
+
|
220 |
+
# Allow self-specified cert location.
|
221 |
+
if verify is not True:
|
222 |
+
cert_loc = verify
|
223 |
+
|
224 |
+
if not cert_loc:
|
225 |
+
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
226 |
+
|
227 |
+
if not cert_loc or not os.path.exists(cert_loc):
|
228 |
+
raise IOError("Could not find a suitable TLS CA certificate bundle, "
|
229 |
+
"invalid path: {}".format(cert_loc))
|
230 |
+
|
231 |
+
conn.cert_reqs = 'CERT_REQUIRED'
|
232 |
+
|
233 |
+
if not os.path.isdir(cert_loc):
|
234 |
+
conn.ca_certs = cert_loc
|
235 |
+
else:
|
236 |
+
conn.ca_cert_dir = cert_loc
|
237 |
+
else:
|
238 |
+
conn.cert_reqs = 'CERT_NONE'
|
239 |
+
conn.ca_certs = None
|
240 |
+
conn.ca_cert_dir = None
|
241 |
+
|
242 |
+
if cert:
|
243 |
+
if not isinstance(cert, basestring):
|
244 |
+
conn.cert_file = cert[0]
|
245 |
+
conn.key_file = cert[1]
|
246 |
+
else:
|
247 |
+
conn.cert_file = cert
|
248 |
+
conn.key_file = None
|
249 |
+
if conn.cert_file and not os.path.exists(conn.cert_file):
|
250 |
+
raise IOError("Could not find the TLS certificate file, "
|
251 |
+
"invalid path: {}".format(conn.cert_file))
|
252 |
+
if conn.key_file and not os.path.exists(conn.key_file):
|
253 |
+
raise IOError("Could not find the TLS key file, "
|
254 |
+
"invalid path: {}".format(conn.key_file))
|
255 |
+
|
256 |
+
def build_response(self, req, resp):
|
257 |
+
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
258 |
+
response. This should not be called from user code, and is only exposed
|
259 |
+
for use when subclassing the
|
260 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
261 |
+
|
262 |
+
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
263 |
+
:param resp: The urllib3 response object.
|
264 |
+
:rtype: requests.Response
|
265 |
+
"""
|
266 |
+
response = Response()
|
267 |
+
|
268 |
+
# Fallback to None if there's no status_code, for whatever reason.
|
269 |
+
response.status_code = getattr(resp, 'status', None)
|
270 |
+
|
271 |
+
# Make headers case-insensitive.
|
272 |
+
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
|
273 |
+
|
274 |
+
# Set encoding.
|
275 |
+
response.encoding = get_encoding_from_headers(response.headers)
|
276 |
+
response.raw = resp
|
277 |
+
response.reason = response.raw.reason
|
278 |
+
|
279 |
+
if isinstance(req.url, bytes):
|
280 |
+
response.url = req.url.decode('utf-8')
|
281 |
+
else:
|
282 |
+
response.url = req.url
|
283 |
+
|
284 |
+
# Add new cookies from the server.
|
285 |
+
extract_cookies_to_jar(response.cookies, req, resp)
|
286 |
+
|
287 |
+
# Give the Response some context.
|
288 |
+
response.request = req
|
289 |
+
response.connection = self
|
290 |
+
|
291 |
+
return response
|
292 |
+
|
293 |
+
def get_connection(self, url, proxies=None):
|
294 |
+
"""Returns a urllib3 connection for the given URL. This should not be
|
295 |
+
called from user code, and is only exposed for use when subclassing the
|
296 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
297 |
+
|
298 |
+
:param url: The URL to connect to.
|
299 |
+
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
300 |
+
:rtype: urllib3.ConnectionPool
|
301 |
+
"""
|
302 |
+
proxy = select_proxy(url, proxies)
|
303 |
+
|
304 |
+
if proxy:
|
305 |
+
proxy = prepend_scheme_if_needed(proxy, 'http')
|
306 |
+
proxy_url = parse_url(proxy)
|
307 |
+
if not proxy_url.host:
|
308 |
+
raise InvalidProxyURL("Please check proxy URL. It is malformed"
|
309 |
+
" and could be missing the host.")
|
310 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
311 |
+
conn = proxy_manager.connection_from_url(url)
|
312 |
+
else:
|
313 |
+
# Only scheme should be lower case
|
314 |
+
parsed = urlparse(url)
|
315 |
+
url = parsed.geturl()
|
316 |
+
conn = self.poolmanager.connection_from_url(url)
|
317 |
+
|
318 |
+
return conn
|
319 |
+
|
320 |
+
def close(self):
|
321 |
+
"""Disposes of any internal state.
|
322 |
+
|
323 |
+
Currently, this closes the PoolManager and any active ProxyManager,
|
324 |
+
which closes any pooled connections.
|
325 |
+
"""
|
326 |
+
self.poolmanager.clear()
|
327 |
+
for proxy in self.proxy_manager.values():
|
328 |
+
proxy.clear()
|
329 |
+
|
330 |
+
def request_url(self, request, proxies):
|
331 |
+
"""Obtain the url to use when making the final request.
|
332 |
+
|
333 |
+
If the message is being sent through a HTTP proxy, the full URL has to
|
334 |
+
be used. Otherwise, we should only use the path portion of the URL.
|
335 |
+
|
336 |
+
This should not be called from user code, and is only exposed for use
|
337 |
+
when subclassing the
|
338 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
339 |
+
|
340 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
341 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
342 |
+
:rtype: str
|
343 |
+
"""
|
344 |
+
proxy = select_proxy(request.url, proxies)
|
345 |
+
scheme = urlparse(request.url).scheme
|
346 |
+
|
347 |
+
is_proxied_http_request = (proxy and scheme != 'https')
|
348 |
+
using_socks_proxy = False
|
349 |
+
if proxy:
|
350 |
+
proxy_scheme = urlparse(proxy).scheme.lower()
|
351 |
+
using_socks_proxy = proxy_scheme.startswith('socks')
|
352 |
+
|
353 |
+
url = request.path_url
|
354 |
+
if is_proxied_http_request and not using_socks_proxy:
|
355 |
+
url = urldefragauth(request.url)
|
356 |
+
|
357 |
+
return url
|
358 |
+
|
359 |
+
def add_headers(self, request, **kwargs):
|
360 |
+
"""Add any headers needed by the connection. As of v2.0 this does
|
361 |
+
nothing by default, but is left for overriding by users that subclass
|
362 |
+
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
363 |
+
|
364 |
+
This should not be called from user code, and is only exposed for use
|
365 |
+
when subclassing the
|
366 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
367 |
+
|
368 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
369 |
+
:param kwargs: The keyword arguments from the call to send().
|
370 |
+
"""
|
371 |
+
pass
|
372 |
+
|
373 |
+
def proxy_headers(self, proxy):
|
374 |
+
"""Returns a dictionary of the headers to add to any request sent
|
375 |
+
through a proxy. This works with urllib3 magic to ensure that they are
|
376 |
+
correctly sent to the proxy, rather than in a tunnelled request if
|
377 |
+
CONNECT is being used.
|
378 |
+
|
379 |
+
This should not be called from user code, and is only exposed for use
|
380 |
+
when subclassing the
|
381 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
382 |
+
|
383 |
+
:param proxy: The url of the proxy being used for this request.
|
384 |
+
:rtype: dict
|
385 |
+
"""
|
386 |
+
headers = {}
|
387 |
+
username, password = get_auth_from_url(proxy)
|
388 |
+
|
389 |
+
if username:
|
390 |
+
headers['Proxy-Authorization'] = _basic_auth_str(username,
|
391 |
+
password)
|
392 |
+
|
393 |
+
return headers
|
394 |
+
|
395 |
+
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
|
396 |
+
"""Sends PreparedRequest object. Returns Response object.
|
397 |
+
|
398 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
399 |
+
:param stream: (optional) Whether to stream the request content.
|
400 |
+
:param timeout: (optional) How long to wait for the server to send
|
401 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
402 |
+
read timeout) <timeouts>` tuple.
|
403 |
+
:type timeout: float or tuple or urllib3 Timeout object
|
404 |
+
:param verify: (optional) Either a boolean, in which case it controls whether
|
405 |
+
we verify the server's TLS certificate, or a string, in which case it
|
406 |
+
must be a path to a CA bundle to use
|
407 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
408 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
409 |
+
:rtype: requests.Response
|
410 |
+
"""
|
411 |
+
|
412 |
+
try:
|
413 |
+
conn = self.get_connection(request.url, proxies)
|
414 |
+
except LocationValueError as e:
|
415 |
+
raise InvalidURL(e, request=request)
|
416 |
+
|
417 |
+
self.cert_verify(conn, request.url, verify, cert)
|
418 |
+
url = self.request_url(request, proxies)
|
419 |
+
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
|
420 |
+
|
421 |
+
chunked = not (request.body is None or 'Content-Length' in request.headers)
|
422 |
+
|
423 |
+
if isinstance(timeout, tuple):
|
424 |
+
try:
|
425 |
+
connect, read = timeout
|
426 |
+
timeout = TimeoutSauce(connect=connect, read=read)
|
427 |
+
except ValueError as e:
|
428 |
+
# this may raise a string formatting error.
|
429 |
+
err = ("Invalid timeout {}. Pass a (connect, read) "
|
430 |
+
"timeout tuple, or a single float to set "
|
431 |
+
"both timeouts to the same value".format(timeout))
|
432 |
+
raise ValueError(err)
|
433 |
+
elif isinstance(timeout, TimeoutSauce):
|
434 |
+
pass
|
435 |
+
else:
|
436 |
+
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
437 |
+
|
438 |
+
try:
|
439 |
+
if not chunked:
|
440 |
+
resp = conn.urlopen(
|
441 |
+
method=request.method,
|
442 |
+
url=url,
|
443 |
+
body=request.body,
|
444 |
+
headers=request.headers,
|
445 |
+
redirect=False,
|
446 |
+
assert_same_host=False,
|
447 |
+
preload_content=False,
|
448 |
+
decode_content=False,
|
449 |
+
retries=self.max_retries,
|
450 |
+
timeout=timeout
|
451 |
+
)
|
452 |
+
|
453 |
+
# Send the request.
|
454 |
+
else:
|
455 |
+
if hasattr(conn, 'proxy_pool'):
|
456 |
+
conn = conn.proxy_pool
|
457 |
+
|
458 |
+
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
|
459 |
+
|
460 |
+
try:
|
461 |
+
skip_host = 'Host' in request.headers
|
462 |
+
low_conn.putrequest(request.method,
|
463 |
+
url,
|
464 |
+
skip_accept_encoding=True,
|
465 |
+
skip_host=skip_host)
|
466 |
+
|
467 |
+
for header, value in request.headers.items():
|
468 |
+
low_conn.putheader(header, value)
|
469 |
+
|
470 |
+
low_conn.endheaders()
|
471 |
+
|
472 |
+
for i in request.body:
|
473 |
+
low_conn.send(hex(len(i))[2:].encode('utf-8'))
|
474 |
+
low_conn.send(b'\r\n')
|
475 |
+
low_conn.send(i)
|
476 |
+
low_conn.send(b'\r\n')
|
477 |
+
low_conn.send(b'0\r\n\r\n')
|
478 |
+
|
479 |
+
# Receive the response from the server
|
480 |
+
try:
|
481 |
+
# For Python 2.7, use buffering of HTTP responses
|
482 |
+
r = low_conn.getresponse(buffering=True)
|
483 |
+
except TypeError:
|
484 |
+
# For compatibility with Python 3.3+
|
485 |
+
r = low_conn.getresponse()
|
486 |
+
|
487 |
+
resp = HTTPResponse.from_httplib(
|
488 |
+
r,
|
489 |
+
pool=conn,
|
490 |
+
connection=low_conn,
|
491 |
+
preload_content=False,
|
492 |
+
decode_content=False
|
493 |
+
)
|
494 |
+
except:
|
495 |
+
# If we hit any problems here, clean up the connection.
|
496 |
+
# Then, reraise so that we can handle the actual exception.
|
497 |
+
low_conn.close()
|
498 |
+
raise
|
499 |
+
|
500 |
+
except (ProtocolError, socket.error) as err:
|
501 |
+
raise ConnectionError(err, request=request)
|
502 |
+
|
503 |
+
except MaxRetryError as e:
|
504 |
+
if isinstance(e.reason, ConnectTimeoutError):
|
505 |
+
# TODO: Remove this in 3.0.0: see #2811
|
506 |
+
if not isinstance(e.reason, NewConnectionError):
|
507 |
+
raise ConnectTimeout(e, request=request)
|
508 |
+
|
509 |
+
if isinstance(e.reason, ResponseError):
|
510 |
+
raise RetryError(e, request=request)
|
511 |
+
|
512 |
+
if isinstance(e.reason, _ProxyError):
|
513 |
+
raise ProxyError(e, request=request)
|
514 |
+
|
515 |
+
if isinstance(e.reason, _SSLError):
|
516 |
+
# This branch is for urllib3 v1.22 and later.
|
517 |
+
raise SSLError(e, request=request)
|
518 |
+
|
519 |
+
raise ConnectionError(e, request=request)
|
520 |
+
|
521 |
+
except ClosedPoolError as e:
|
522 |
+
raise ConnectionError(e, request=request)
|
523 |
+
|
524 |
+
except _ProxyError as e:
|
525 |
+
raise ProxyError(e)
|
526 |
+
|
527 |
+
except (_SSLError, _HTTPError) as e:
|
528 |
+
if isinstance(e, _SSLError):
|
529 |
+
# This branch is for urllib3 versions earlier than v1.22
|
530 |
+
raise SSLError(e, request=request)
|
531 |
+
elif isinstance(e, ReadTimeoutError):
|
532 |
+
raise ReadTimeout(e, request=request)
|
533 |
+
elif isinstance(e, _InvalidHeader):
|
534 |
+
raise InvalidHeader(e, request=request)
|
535 |
+
else:
|
536 |
+
raise
|
537 |
+
|
538 |
+
return self.build_response(request, resp)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/api.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.api
|
5 |
+
~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module implements the Requests API.
|
8 |
+
|
9 |
+
:copyright: (c) 2012 by Kenneth Reitz.
|
10 |
+
:license: Apache2, see LICENSE for more details.
|
11 |
+
"""
|
12 |
+
|
13 |
+
from . import sessions
|
14 |
+
|
15 |
+
|
16 |
+
def request(method, url, **kwargs):
|
17 |
+
"""Constructs and sends a :class:`Request <Request>`.
|
18 |
+
|
19 |
+
:param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
|
20 |
+
:param url: URL for the new :class:`Request` object.
|
21 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
22 |
+
in the query string for the :class:`Request`.
|
23 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
24 |
+
object to send in the body of the :class:`Request`.
|
25 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
26 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
27 |
+
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
28 |
+
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
|
29 |
+
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
|
30 |
+
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
|
31 |
+
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
|
32 |
+
to add for the file.
|
33 |
+
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
34 |
+
:param timeout: (optional) How many seconds to wait for the server to send data
|
35 |
+
before giving up, as a float, or a :ref:`(connect timeout, read
|
36 |
+
timeout) <timeouts>` tuple.
|
37 |
+
:type timeout: float or tuple
|
38 |
+
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
|
39 |
+
:type allow_redirects: bool
|
40 |
+
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
41 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
42 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
43 |
+
to a CA bundle to use. Defaults to ``True``.
|
44 |
+
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
45 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
46 |
+
:return: :class:`Response <Response>` object
|
47 |
+
:rtype: requests.Response
|
48 |
+
|
49 |
+
Usage::
|
50 |
+
|
51 |
+
>>> import requests
|
52 |
+
>>> req = requests.request('GET', 'https://httpbin.org/get')
|
53 |
+
>>> req
|
54 |
+
<Response [200]>
|
55 |
+
"""
|
56 |
+
|
57 |
+
# By using the 'with' statement we are sure the session is closed, thus we
|
58 |
+
# avoid leaving sockets open which can trigger a ResourceWarning in some
|
59 |
+
# cases, and look like a memory leak in others.
|
60 |
+
with sessions.Session() as session:
|
61 |
+
return session.request(method=method, url=url, **kwargs)
|
62 |
+
|
63 |
+
|
64 |
+
def get(url, params=None, **kwargs):
|
65 |
+
r"""Sends a GET request.
|
66 |
+
|
67 |
+
:param url: URL for the new :class:`Request` object.
|
68 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
69 |
+
in the query string for the :class:`Request`.
|
70 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
71 |
+
:return: :class:`Response <Response>` object
|
72 |
+
:rtype: requests.Response
|
73 |
+
"""
|
74 |
+
|
75 |
+
return request('get', url, params=params, **kwargs)
|
76 |
+
|
77 |
+
|
78 |
+
def options(url, **kwargs):
|
79 |
+
r"""Sends an OPTIONS request.
|
80 |
+
|
81 |
+
:param url: URL for the new :class:`Request` object.
|
82 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
83 |
+
:return: :class:`Response <Response>` object
|
84 |
+
:rtype: requests.Response
|
85 |
+
"""
|
86 |
+
|
87 |
+
return request('options', url, **kwargs)
|
88 |
+
|
89 |
+
|
90 |
+
def head(url, **kwargs):
|
91 |
+
r"""Sends a HEAD request.
|
92 |
+
|
93 |
+
:param url: URL for the new :class:`Request` object.
|
94 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes. If
|
95 |
+
`allow_redirects` is not provided, it will be set to `False` (as
|
96 |
+
opposed to the default :meth:`request` behavior).
|
97 |
+
:return: :class:`Response <Response>` object
|
98 |
+
:rtype: requests.Response
|
99 |
+
"""
|
100 |
+
|
101 |
+
kwargs.setdefault('allow_redirects', False)
|
102 |
+
return request('head', url, **kwargs)
|
103 |
+
|
104 |
+
|
105 |
+
def post(url, data=None, json=None, **kwargs):
|
106 |
+
r"""Sends a POST request.
|
107 |
+
|
108 |
+
:param url: URL for the new :class:`Request` object.
|
109 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
110 |
+
object to send in the body of the :class:`Request`.
|
111 |
+
:param json: (optional) json data to send in the body of the :class:`Request`.
|
112 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
113 |
+
:return: :class:`Response <Response>` object
|
114 |
+
:rtype: requests.Response
|
115 |
+
"""
|
116 |
+
|
117 |
+
return request('post', url, data=data, json=json, **kwargs)
|
118 |
+
|
119 |
+
|
120 |
+
def put(url, data=None, **kwargs):
|
121 |
+
r"""Sends a PUT request.
|
122 |
+
|
123 |
+
:param url: URL for the new :class:`Request` object.
|
124 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
125 |
+
object to send in the body of the :class:`Request`.
|
126 |
+
:param json: (optional) json data to send in the body of the :class:`Request`.
|
127 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
128 |
+
:return: :class:`Response <Response>` object
|
129 |
+
:rtype: requests.Response
|
130 |
+
"""
|
131 |
+
|
132 |
+
return request('put', url, data=data, **kwargs)
|
133 |
+
|
134 |
+
|
135 |
+
def patch(url, data=None, **kwargs):
|
136 |
+
r"""Sends a PATCH request.
|
137 |
+
|
138 |
+
:param url: URL for the new :class:`Request` object.
|
139 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
140 |
+
object to send in the body of the :class:`Request`.
|
141 |
+
:param json: (optional) json data to send in the body of the :class:`Request`.
|
142 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
143 |
+
:return: :class:`Response <Response>` object
|
144 |
+
:rtype: requests.Response
|
145 |
+
"""
|
146 |
+
|
147 |
+
return request('patch', url, data=data, **kwargs)
|
148 |
+
|
149 |
+
|
150 |
+
def delete(url, **kwargs):
|
151 |
+
r"""Sends a DELETE request.
|
152 |
+
|
153 |
+
:param url: URL for the new :class:`Request` object.
|
154 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
155 |
+
:return: :class:`Response <Response>` object
|
156 |
+
:rtype: requests.Response
|
157 |
+
"""
|
158 |
+
|
159 |
+
return request('delete', url, **kwargs)
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/certs.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
|
4 |
+
"""
|
5 |
+
requests.certs
|
6 |
+
~~~~~~~~~~~~~~
|
7 |
+
|
8 |
+
This module returns the preferred default CA certificate bundle. There is
|
9 |
+
only one — the one from the certifi package.
|
10 |
+
|
11 |
+
If you are packaging Requests, e.g., for a Linux distribution or a managed
|
12 |
+
environment, you can change the definition of where() to return a separately
|
13 |
+
packaged CA bundle.
|
14 |
+
"""
|
15 |
+
from pip._vendor.certifi import where
|
16 |
+
|
17 |
+
if __name__ == '__main__':
|
18 |
+
print(where())
|
env-llmeval/lib/python3.10/site-packages/pip/_vendor/requests/hooks.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.hooks
|
5 |
+
~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module provides the capabilities for the Requests hooks system.
|
8 |
+
|
9 |
+
Available hooks:
|
10 |
+
|
11 |
+
``response``:
|
12 |
+
The response generated from a Request.
|
13 |
+
"""
|
14 |
+
HOOKS = ['response']
|
15 |
+
|
16 |
+
|
17 |
+
def default_hooks():
|
18 |
+
return {event: [] for event in HOOKS}
|
19 |
+
|
20 |
+
# TODO: response is the only one
|
21 |
+
|
22 |
+
|
23 |
+
def dispatch_hook(key, hooks, hook_data, **kwargs):
|
24 |
+
"""Dispatches a hook dictionary on a given piece of data."""
|
25 |
+
hooks = hooks or {}
|
26 |
+
hooks = hooks.get(key)
|
27 |
+
if hooks:
|
28 |
+
if hasattr(hooks, '__call__'):
|
29 |
+
hooks = [hooks]
|
30 |
+
for hook in hooks:
|
31 |
+
_hook_data = hook(hook_data, **kwargs)
|
32 |
+
if _hook_data is not None:
|
33 |
+
hook_data = _hook_data
|
34 |
+
return hook_data
|