Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/core.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dist.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/errors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/extension.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/log.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/util.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/version.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/bdist_msi.py +749 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/clean.py +76 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/install.py +721 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/install_egg_info.py +84 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/py37compat.py +30 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/register.py +304 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/upload.py +214 -0
- llmeval-env/lib/python3.10/site-packages/transformers/activations.py +239 -0
- llmeval-env/lib/python3.10/site-packages/transformers/activations_tf.py +147 -0
- llmeval-env/lib/python3.10/site-packages/transformers/audio_utils.py +825 -0
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (563 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-310.pyc
ADDED
Binary file (14 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/core.cpython-310.pyc
ADDED
Binary file (7.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-310.pyc
ADDED
Binary file (2.78 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-310.pyc
ADDED
Binary file (5.89 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/dist.cpython-310.pyc
ADDED
Binary file (34.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/errors.cpython-310.pyc
ADDED
Binary file (5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/extension.cpython-310.pyc
ADDED
Binary file (7.01 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-310.pyc
ADDED
Binary file (10.6 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-310.pyc
ADDED
Binary file (5.98 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-310.pyc
ADDED
Binary file (10.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/log.cpython-310.pyc
ADDED
Binary file (2.31 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-310.pyc
ADDED
Binary file (14.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/py35compat.cpython-310.pyc
ADDED
Binary file (633 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-310.pyc
ADDED
Binary file (430 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-310.pyc
ADDED
Binary file (2.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-310.pyc
ADDED
Binary file (8.48 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-310.pyc
ADDED
Binary file (6.81 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/util.cpython-310.pyc
ADDED
Binary file (14.7 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/__pycache__/version.cpython-310.pyc
ADDED
Binary file (7.85 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-310.pyc
ADDED
Binary file (3.67 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-310.pyc
ADDED
Binary file (3.65 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_msi.cpython-310.pyc
ADDED
Binary file (19.7 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-310.pyc
ADDED
Binary file (12.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/bdist_wininst.cpython-310.pyc
ADDED
Binary file (8.64 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-310.pyc
ADDED
Binary file (3.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-310.pyc
ADDED
Binary file (16.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-310.pyc
ADDED
Binary file (9.89 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-310.pyc
ADDED
Binary file (4.02 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-310.pyc
ADDED
Binary file (5.01 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-310.pyc
ADDED
Binary file (10.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-310.pyc
ADDED
Binary file (15.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-310.pyc
ADDED
Binary file (2.35 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-310.pyc
ADDED
Binary file (3.32 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-310.pyc
ADDED
Binary file (5.18 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-310.pyc
ADDED
Binary file (2.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-310.pyc
ADDED
Binary file (1.05 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-310.pyc
ADDED
Binary file (8.69 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-310.pyc
ADDED
Binary file (14.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-310.pyc
ADDED
Binary file (5.38 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/bdist_msi.py
ADDED
@@ -0,0 +1,749 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2005, 2006 Martin von Löwis
|
2 |
+
# Licensed to PSF under a Contributor Agreement.
|
3 |
+
# The bdist_wininst command proper
|
4 |
+
# based on bdist_wininst
|
5 |
+
"""
|
6 |
+
Implements the bdist_msi command.
|
7 |
+
"""
|
8 |
+
|
9 |
+
import os
|
10 |
+
import sys
|
11 |
+
import warnings
|
12 |
+
from distutils.core import Command
|
13 |
+
from distutils.dir_util import remove_tree
|
14 |
+
from distutils.sysconfig import get_python_version
|
15 |
+
from distutils.version import StrictVersion
|
16 |
+
from distutils.errors import DistutilsOptionError
|
17 |
+
from distutils.util import get_platform
|
18 |
+
from distutils import log
|
19 |
+
import msilib
|
20 |
+
from msilib import schema, sequence, text
|
21 |
+
from msilib import Directory, Feature, Dialog, add_data
|
22 |
+
|
23 |
+
class PyDialog(Dialog):
|
24 |
+
"""Dialog class with a fixed layout: controls at the top, then a ruler,
|
25 |
+
then a list of buttons: back, next, cancel. Optionally a bitmap at the
|
26 |
+
left."""
|
27 |
+
def __init__(self, *args, **kw):
|
28 |
+
"""Dialog(database, name, x, y, w, h, attributes, title, first,
|
29 |
+
default, cancel, bitmap=true)"""
|
30 |
+
Dialog.__init__(self, *args)
|
31 |
+
ruler = self.h - 36
|
32 |
+
bmwidth = 152*ruler/328
|
33 |
+
#if kw.get("bitmap", True):
|
34 |
+
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
|
35 |
+
self.line("BottomLine", 0, ruler, self.w, 0)
|
36 |
+
|
37 |
+
def title(self, title):
|
38 |
+
"Set the title text of the dialog at the top."
|
39 |
+
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
|
40 |
+
# text, in VerdanaBold10
|
41 |
+
self.text("Title", 15, 10, 320, 60, 0x30003,
|
42 |
+
r"{\VerdanaBold10}%s" % title)
|
43 |
+
|
44 |
+
def back(self, title, next, name = "Back", active = 1):
|
45 |
+
"""Add a back button with a given title, the tab-next button,
|
46 |
+
its name in the Control table, possibly initially disabled.
|
47 |
+
|
48 |
+
Return the button, so that events can be associated"""
|
49 |
+
if active:
|
50 |
+
flags = 3 # Visible|Enabled
|
51 |
+
else:
|
52 |
+
flags = 1 # Visible
|
53 |
+
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
|
54 |
+
|
55 |
+
def cancel(self, title, next, name = "Cancel", active = 1):
|
56 |
+
"""Add a cancel button with a given title, the tab-next button,
|
57 |
+
its name in the Control table, possibly initially disabled.
|
58 |
+
|
59 |
+
Return the button, so that events can be associated"""
|
60 |
+
if active:
|
61 |
+
flags = 3 # Visible|Enabled
|
62 |
+
else:
|
63 |
+
flags = 1 # Visible
|
64 |
+
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
|
65 |
+
|
66 |
+
def next(self, title, next, name = "Next", active = 1):
|
67 |
+
"""Add a Next button with a given title, the tab-next button,
|
68 |
+
its name in the Control table, possibly initially disabled.
|
69 |
+
|
70 |
+
Return the button, so that events can be associated"""
|
71 |
+
if active:
|
72 |
+
flags = 3 # Visible|Enabled
|
73 |
+
else:
|
74 |
+
flags = 1 # Visible
|
75 |
+
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
|
76 |
+
|
77 |
+
def xbutton(self, name, title, next, xpos):
|
78 |
+
"""Add a button with a given title, the tab-next button,
|
79 |
+
its name in the Control table, giving its x position; the
|
80 |
+
y-position is aligned with the other buttons.
|
81 |
+
|
82 |
+
Return the button, so that events can be associated"""
|
83 |
+
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
|
84 |
+
|
85 |
+
class bdist_msi(Command):
|
86 |
+
|
87 |
+
description = "create a Microsoft Installer (.msi) binary distribution"
|
88 |
+
|
89 |
+
user_options = [('bdist-dir=', None,
|
90 |
+
"temporary directory for creating the distribution"),
|
91 |
+
('plat-name=', 'p',
|
92 |
+
"platform name to embed in generated filenames "
|
93 |
+
"(default: %s)" % get_platform()),
|
94 |
+
('keep-temp', 'k',
|
95 |
+
"keep the pseudo-installation tree around after " +
|
96 |
+
"creating the distribution archive"),
|
97 |
+
('target-version=', None,
|
98 |
+
"require a specific python version" +
|
99 |
+
" on the target system"),
|
100 |
+
('no-target-compile', 'c',
|
101 |
+
"do not compile .py to .pyc on the target system"),
|
102 |
+
('no-target-optimize', 'o',
|
103 |
+
"do not compile .py to .pyo (optimized) "
|
104 |
+
"on the target system"),
|
105 |
+
('dist-dir=', 'd',
|
106 |
+
"directory to put final built distributions in"),
|
107 |
+
('skip-build', None,
|
108 |
+
"skip rebuilding everything (for testing/debugging)"),
|
109 |
+
('install-script=', None,
|
110 |
+
"basename of installation script to be run after "
|
111 |
+
"installation or before deinstallation"),
|
112 |
+
('pre-install-script=', None,
|
113 |
+
"Fully qualified filename of a script to be run before "
|
114 |
+
"any files are installed. This script need not be in the "
|
115 |
+
"distribution"),
|
116 |
+
]
|
117 |
+
|
118 |
+
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
119 |
+
'skip-build']
|
120 |
+
|
121 |
+
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
|
122 |
+
'2.5', '2.6', '2.7', '2.8', '2.9',
|
123 |
+
'3.0', '3.1', '3.2', '3.3', '3.4',
|
124 |
+
'3.5', '3.6', '3.7', '3.8', '3.9']
|
125 |
+
other_version = 'X'
|
126 |
+
|
127 |
+
def __init__(self, *args, **kw):
|
128 |
+
super().__init__(*args, **kw)
|
129 |
+
warnings.warn("bdist_msi command is deprecated since Python 3.9, "
|
130 |
+
"use bdist_wheel (wheel packages) instead",
|
131 |
+
DeprecationWarning, 2)
|
132 |
+
|
133 |
+
def initialize_options(self):
|
134 |
+
self.bdist_dir = None
|
135 |
+
self.plat_name = None
|
136 |
+
self.keep_temp = 0
|
137 |
+
self.no_target_compile = 0
|
138 |
+
self.no_target_optimize = 0
|
139 |
+
self.target_version = None
|
140 |
+
self.dist_dir = None
|
141 |
+
self.skip_build = None
|
142 |
+
self.install_script = None
|
143 |
+
self.pre_install_script = None
|
144 |
+
self.versions = None
|
145 |
+
|
146 |
+
def finalize_options(self):
|
147 |
+
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
|
148 |
+
|
149 |
+
if self.bdist_dir is None:
|
150 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
151 |
+
self.bdist_dir = os.path.join(bdist_base, 'msi')
|
152 |
+
|
153 |
+
short_version = get_python_version()
|
154 |
+
if (not self.target_version) and self.distribution.has_ext_modules():
|
155 |
+
self.target_version = short_version
|
156 |
+
|
157 |
+
if self.target_version:
|
158 |
+
self.versions = [self.target_version]
|
159 |
+
if not self.skip_build and self.distribution.has_ext_modules()\
|
160 |
+
and self.target_version != short_version:
|
161 |
+
raise DistutilsOptionError(
|
162 |
+
"target version can only be %s, or the '--skip-build'"
|
163 |
+
" option must be specified" % (short_version,))
|
164 |
+
else:
|
165 |
+
self.versions = list(self.all_versions)
|
166 |
+
|
167 |
+
self.set_undefined_options('bdist',
|
168 |
+
('dist_dir', 'dist_dir'),
|
169 |
+
('plat_name', 'plat_name'),
|
170 |
+
)
|
171 |
+
|
172 |
+
if self.pre_install_script:
|
173 |
+
raise DistutilsOptionError(
|
174 |
+
"the pre-install-script feature is not yet implemented")
|
175 |
+
|
176 |
+
if self.install_script:
|
177 |
+
for script in self.distribution.scripts:
|
178 |
+
if self.install_script == os.path.basename(script):
|
179 |
+
break
|
180 |
+
else:
|
181 |
+
raise DistutilsOptionError(
|
182 |
+
"install_script '%s' not found in scripts"
|
183 |
+
% self.install_script)
|
184 |
+
self.install_script_key = None
|
185 |
+
|
186 |
+
def run(self):
|
187 |
+
if not self.skip_build:
|
188 |
+
self.run_command('build')
|
189 |
+
|
190 |
+
install = self.reinitialize_command('install', reinit_subcommands=1)
|
191 |
+
install.prefix = self.bdist_dir
|
192 |
+
install.skip_build = self.skip_build
|
193 |
+
install.warn_dir = 0
|
194 |
+
|
195 |
+
install_lib = self.reinitialize_command('install_lib')
|
196 |
+
# we do not want to include pyc or pyo files
|
197 |
+
install_lib.compile = 0
|
198 |
+
install_lib.optimize = 0
|
199 |
+
|
200 |
+
if self.distribution.has_ext_modules():
|
201 |
+
# If we are building an installer for a Python version other
|
202 |
+
# than the one we are currently running, then we need to ensure
|
203 |
+
# our build_lib reflects the other Python version rather than ours.
|
204 |
+
# Note that for target_version!=sys.version, we must have skipped the
|
205 |
+
# build step, so there is no issue with enforcing the build of this
|
206 |
+
# version.
|
207 |
+
target_version = self.target_version
|
208 |
+
if not target_version:
|
209 |
+
assert self.skip_build, "Should have already checked this"
|
210 |
+
target_version = '%d.%d' % sys.version_info[:2]
|
211 |
+
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
212 |
+
build = self.get_finalized_command('build')
|
213 |
+
build.build_lib = os.path.join(build.build_base,
|
214 |
+
'lib' + plat_specifier)
|
215 |
+
|
216 |
+
log.info("installing to %s", self.bdist_dir)
|
217 |
+
install.ensure_finalized()
|
218 |
+
|
219 |
+
# avoid warning of 'install_lib' about installing
|
220 |
+
# into a directory not in sys.path
|
221 |
+
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
222 |
+
|
223 |
+
install.run()
|
224 |
+
|
225 |
+
del sys.path[0]
|
226 |
+
|
227 |
+
self.mkpath(self.dist_dir)
|
228 |
+
fullname = self.distribution.get_fullname()
|
229 |
+
installer_name = self.get_installer_filename(fullname)
|
230 |
+
installer_name = os.path.abspath(installer_name)
|
231 |
+
if os.path.exists(installer_name): os.unlink(installer_name)
|
232 |
+
|
233 |
+
metadata = self.distribution.metadata
|
234 |
+
author = metadata.author
|
235 |
+
if not author:
|
236 |
+
author = metadata.maintainer
|
237 |
+
if not author:
|
238 |
+
author = "UNKNOWN"
|
239 |
+
version = metadata.get_version()
|
240 |
+
# ProductVersion must be strictly numeric
|
241 |
+
# XXX need to deal with prerelease versions
|
242 |
+
sversion = "%d.%d.%d" % StrictVersion(version).version
|
243 |
+
# Prefix ProductName with Python x.y, so that
|
244 |
+
# it sorts together with the other Python packages
|
245 |
+
# in Add-Remove-Programs (APR)
|
246 |
+
fullname = self.distribution.get_fullname()
|
247 |
+
if self.target_version:
|
248 |
+
product_name = "Python %s %s" % (self.target_version, fullname)
|
249 |
+
else:
|
250 |
+
product_name = "Python %s" % (fullname)
|
251 |
+
self.db = msilib.init_database(installer_name, schema,
|
252 |
+
product_name, msilib.gen_uuid(),
|
253 |
+
sversion, author)
|
254 |
+
msilib.add_tables(self.db, sequence)
|
255 |
+
props = [('DistVersion', version)]
|
256 |
+
email = metadata.author_email or metadata.maintainer_email
|
257 |
+
if email:
|
258 |
+
props.append(("ARPCONTACT", email))
|
259 |
+
if metadata.url:
|
260 |
+
props.append(("ARPURLINFOABOUT", metadata.url))
|
261 |
+
if props:
|
262 |
+
add_data(self.db, 'Property', props)
|
263 |
+
|
264 |
+
self.add_find_python()
|
265 |
+
self.add_files()
|
266 |
+
self.add_scripts()
|
267 |
+
self.add_ui()
|
268 |
+
self.db.Commit()
|
269 |
+
|
270 |
+
if hasattr(self.distribution, 'dist_files'):
|
271 |
+
tup = 'bdist_msi', self.target_version or 'any', fullname
|
272 |
+
self.distribution.dist_files.append(tup)
|
273 |
+
|
274 |
+
if not self.keep_temp:
|
275 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
276 |
+
|
277 |
+
def add_files(self):
|
278 |
+
db = self.db
|
279 |
+
cab = msilib.CAB("distfiles")
|
280 |
+
rootdir = os.path.abspath(self.bdist_dir)
|
281 |
+
|
282 |
+
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
|
283 |
+
f = Feature(db, "Python", "Python", "Everything",
|
284 |
+
0, 1, directory="TARGETDIR")
|
285 |
+
|
286 |
+
items = [(f, root, '')]
|
287 |
+
for version in self.versions + [self.other_version]:
|
288 |
+
target = "TARGETDIR" + version
|
289 |
+
name = default = "Python" + version
|
290 |
+
desc = "Everything"
|
291 |
+
if version is self.other_version:
|
292 |
+
title = "Python from another location"
|
293 |
+
level = 2
|
294 |
+
else:
|
295 |
+
title = "Python %s from registry" % version
|
296 |
+
level = 1
|
297 |
+
f = Feature(db, name, title, desc, 1, level, directory=target)
|
298 |
+
dir = Directory(db, cab, root, rootdir, target, default)
|
299 |
+
items.append((f, dir, version))
|
300 |
+
db.Commit()
|
301 |
+
|
302 |
+
seen = {}
|
303 |
+
for feature, dir, version in items:
|
304 |
+
todo = [dir]
|
305 |
+
while todo:
|
306 |
+
dir = todo.pop()
|
307 |
+
for file in os.listdir(dir.absolute):
|
308 |
+
afile = os.path.join(dir.absolute, file)
|
309 |
+
if os.path.isdir(afile):
|
310 |
+
short = "%s|%s" % (dir.make_short(file), file)
|
311 |
+
default = file + version
|
312 |
+
newdir = Directory(db, cab, dir, file, default, short)
|
313 |
+
todo.append(newdir)
|
314 |
+
else:
|
315 |
+
if not dir.component:
|
316 |
+
dir.start_component(dir.logical, feature, 0)
|
317 |
+
if afile not in seen:
|
318 |
+
key = seen[afile] = dir.add_file(file)
|
319 |
+
if file==self.install_script:
|
320 |
+
if self.install_script_key:
|
321 |
+
raise DistutilsOptionError(
|
322 |
+
"Multiple files with name %s" % file)
|
323 |
+
self.install_script_key = '[#%s]' % key
|
324 |
+
else:
|
325 |
+
key = seen[afile]
|
326 |
+
add_data(self.db, "DuplicateFile",
|
327 |
+
[(key + version, dir.component, key, None, dir.logical)])
|
328 |
+
db.Commit()
|
329 |
+
cab.commit(db)
|
330 |
+
|
331 |
+
def add_find_python(self):
|
332 |
+
"""Adds code to the installer to compute the location of Python.
|
333 |
+
|
334 |
+
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
|
335 |
+
registry for each version of Python.
|
336 |
+
|
337 |
+
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
|
338 |
+
else from PYTHON.MACHINE.X.Y.
|
339 |
+
|
340 |
+
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
|
341 |
+
|
342 |
+
start = 402
|
343 |
+
for ver in self.versions:
|
344 |
+
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
|
345 |
+
machine_reg = "python.machine." + ver
|
346 |
+
user_reg = "python.user." + ver
|
347 |
+
machine_prop = "PYTHON.MACHINE." + ver
|
348 |
+
user_prop = "PYTHON.USER." + ver
|
349 |
+
machine_action = "PythonFromMachine" + ver
|
350 |
+
user_action = "PythonFromUser" + ver
|
351 |
+
exe_action = "PythonExe" + ver
|
352 |
+
target_dir_prop = "TARGETDIR" + ver
|
353 |
+
exe_prop = "PYTHON" + ver
|
354 |
+
if msilib.Win64:
|
355 |
+
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
|
356 |
+
Type = 2+16
|
357 |
+
else:
|
358 |
+
Type = 2
|
359 |
+
add_data(self.db, "RegLocator",
|
360 |
+
[(machine_reg, 2, install_path, None, Type),
|
361 |
+
(user_reg, 1, install_path, None, Type)])
|
362 |
+
add_data(self.db, "AppSearch",
|
363 |
+
[(machine_prop, machine_reg),
|
364 |
+
(user_prop, user_reg)])
|
365 |
+
add_data(self.db, "CustomAction",
|
366 |
+
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
|
367 |
+
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
|
368 |
+
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
|
369 |
+
])
|
370 |
+
add_data(self.db, "InstallExecuteSequence",
|
371 |
+
[(machine_action, machine_prop, start),
|
372 |
+
(user_action, user_prop, start + 1),
|
373 |
+
(exe_action, None, start + 2),
|
374 |
+
])
|
375 |
+
add_data(self.db, "InstallUISequence",
|
376 |
+
[(machine_action, machine_prop, start),
|
377 |
+
(user_action, user_prop, start + 1),
|
378 |
+
(exe_action, None, start + 2),
|
379 |
+
])
|
380 |
+
add_data(self.db, "Condition",
|
381 |
+
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
|
382 |
+
start += 4
|
383 |
+
assert start < 500
|
384 |
+
|
385 |
+
def add_scripts(self):
|
386 |
+
if self.install_script:
|
387 |
+
start = 6800
|
388 |
+
for ver in self.versions + [self.other_version]:
|
389 |
+
install_action = "install_script." + ver
|
390 |
+
exe_prop = "PYTHON" + ver
|
391 |
+
add_data(self.db, "CustomAction",
|
392 |
+
[(install_action, 50, exe_prop, self.install_script_key)])
|
393 |
+
add_data(self.db, "InstallExecuteSequence",
|
394 |
+
[(install_action, "&Python%s=3" % ver, start)])
|
395 |
+
start += 1
|
396 |
+
# XXX pre-install scripts are currently refused in finalize_options()
|
397 |
+
# but if this feature is completed, it will also need to add
|
398 |
+
# entries for each version as the above code does
|
399 |
+
if self.pre_install_script:
|
400 |
+
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
|
401 |
+
with open(scriptfn, "w") as f:
|
402 |
+
# The batch file will be executed with [PYTHON], so that %1
|
403 |
+
# is the path to the Python interpreter; %0 will be the path
|
404 |
+
# of the batch file.
|
405 |
+
# rem ="""
|
406 |
+
# %1 %0
|
407 |
+
# exit
|
408 |
+
# """
|
409 |
+
# <actual script>
|
410 |
+
f.write('rem ="""\n%1 %0\nexit\n"""\n')
|
411 |
+
with open(self.pre_install_script) as fin:
|
412 |
+
f.write(fin.read())
|
413 |
+
add_data(self.db, "Binary",
|
414 |
+
[("PreInstall", msilib.Binary(scriptfn))
|
415 |
+
])
|
416 |
+
add_data(self.db, "CustomAction",
|
417 |
+
[("PreInstall", 2, "PreInstall", None)
|
418 |
+
])
|
419 |
+
add_data(self.db, "InstallExecuteSequence",
|
420 |
+
[("PreInstall", "NOT Installed", 450)])
|
421 |
+
|
422 |
+
|
423 |
+
def add_ui(self):
|
424 |
+
db = self.db
|
425 |
+
x = y = 50
|
426 |
+
w = 370
|
427 |
+
h = 300
|
428 |
+
title = "[ProductName] Setup"
|
429 |
+
|
430 |
+
# see "Dialog Style Bits"
|
431 |
+
modal = 3 # visible | modal
|
432 |
+
modeless = 1 # visible
|
433 |
+
track_disk_space = 32
|
434 |
+
|
435 |
+
# UI customization properties
|
436 |
+
add_data(db, "Property",
|
437 |
+
# See "DefaultUIFont Property"
|
438 |
+
[("DefaultUIFont", "DlgFont8"),
|
439 |
+
# See "ErrorDialog Style Bit"
|
440 |
+
("ErrorDialog", "ErrorDlg"),
|
441 |
+
("Progress1", "Install"), # modified in maintenance type dlg
|
442 |
+
("Progress2", "installs"),
|
443 |
+
("MaintenanceForm_Action", "Repair"),
|
444 |
+
# possible values: ALL, JUSTME
|
445 |
+
("WhichUsers", "ALL")
|
446 |
+
])
|
447 |
+
|
448 |
+
# Fonts, see "TextStyle Table"
|
449 |
+
add_data(db, "TextStyle",
|
450 |
+
[("DlgFont8", "Tahoma", 9, None, 0),
|
451 |
+
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
|
452 |
+
("VerdanaBold10", "Verdana", 10, None, 1),
|
453 |
+
("VerdanaRed9", "Verdana", 9, 255, 0),
|
454 |
+
])
|
455 |
+
|
456 |
+
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
|
457 |
+
# Numbers indicate sequence; see sequence.py for how these action integrate
|
458 |
+
add_data(db, "InstallUISequence",
|
459 |
+
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
|
460 |
+
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
|
461 |
+
# In the user interface, assume all-users installation if privileged.
|
462 |
+
("SelectFeaturesDlg", "Not Installed", 1230),
|
463 |
+
# XXX no support for resume installations yet
|
464 |
+
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
|
465 |
+
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
|
466 |
+
("ProgressDlg", None, 1280)])
|
467 |
+
|
468 |
+
add_data(db, 'ActionText', text.ActionText)
|
469 |
+
add_data(db, 'UIText', text.UIText)
|
470 |
+
#####################################################################
|
471 |
+
# Standard dialogs: FatalError, UserExit, ExitDialog
|
472 |
+
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
|
473 |
+
"Finish", "Finish", "Finish")
|
474 |
+
fatal.title("[ProductName] Installer ended prematurely")
|
475 |
+
fatal.back("< Back", "Finish", active = 0)
|
476 |
+
fatal.cancel("Cancel", "Back", active = 0)
|
477 |
+
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
|
478 |
+
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
|
479 |
+
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
|
480 |
+
"Click the Finish button to exit the Installer.")
|
481 |
+
c=fatal.next("Finish", "Cancel", name="Finish")
|
482 |
+
c.event("EndDialog", "Exit")
|
483 |
+
|
484 |
+
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
|
485 |
+
"Finish", "Finish", "Finish")
|
486 |
+
user_exit.title("[ProductName] Installer was interrupted")
|
487 |
+
user_exit.back("< Back", "Finish", active = 0)
|
488 |
+
user_exit.cancel("Cancel", "Back", active = 0)
|
489 |
+
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
|
490 |
+
"[ProductName] setup was interrupted. Your system has not been modified. "
|
491 |
+
"To install this program at a later time, please run the installation again.")
|
492 |
+
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
|
493 |
+
"Click the Finish button to exit the Installer.")
|
494 |
+
c = user_exit.next("Finish", "Cancel", name="Finish")
|
495 |
+
c.event("EndDialog", "Exit")
|
496 |
+
|
497 |
+
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
|
498 |
+
"Finish", "Finish", "Finish")
|
499 |
+
exit_dialog.title("Completing the [ProductName] Installer")
|
500 |
+
exit_dialog.back("< Back", "Finish", active = 0)
|
501 |
+
exit_dialog.cancel("Cancel", "Back", active = 0)
|
502 |
+
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
|
503 |
+
"Click the Finish button to exit the Installer.")
|
504 |
+
c = exit_dialog.next("Finish", "Cancel", name="Finish")
|
505 |
+
c.event("EndDialog", "Return")
|
506 |
+
|
507 |
+
#####################################################################
|
508 |
+
# Required dialog: FilesInUse, ErrorDlg
|
509 |
+
inuse = PyDialog(db, "FilesInUse",
|
510 |
+
x, y, w, h,
|
511 |
+
19, # KeepModeless|Modal|Visible
|
512 |
+
title,
|
513 |
+
"Retry", "Retry", "Retry", bitmap=False)
|
514 |
+
inuse.text("Title", 15, 6, 200, 15, 0x30003,
|
515 |
+
r"{\DlgFontBold8}Files in Use")
|
516 |
+
inuse.text("Description", 20, 23, 280, 20, 0x30003,
|
517 |
+
"Some files that need to be updated are currently in use.")
|
518 |
+
inuse.text("Text", 20, 55, 330, 50, 3,
|
519 |
+
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
|
520 |
+
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
|
521 |
+
None, None, None)
|
522 |
+
c=inuse.back("Exit", "Ignore", name="Exit")
|
523 |
+
c.event("EndDialog", "Exit")
|
524 |
+
c=inuse.next("Ignore", "Retry", name="Ignore")
|
525 |
+
c.event("EndDialog", "Ignore")
|
526 |
+
c=inuse.cancel("Retry", "Exit", name="Retry")
|
527 |
+
c.event("EndDialog","Retry")
|
528 |
+
|
529 |
+
# See "Error Dialog". See "ICE20" for the required names of the controls.
|
530 |
+
error = Dialog(db, "ErrorDlg",
|
531 |
+
50, 10, 330, 101,
|
532 |
+
65543, # Error|Minimize|Modal|Visible
|
533 |
+
title,
|
534 |
+
"ErrorText", None, None)
|
535 |
+
error.text("ErrorText", 50,9,280,48,3, "")
|
536 |
+
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
|
537 |
+
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
|
538 |
+
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
|
539 |
+
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
|
540 |
+
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
|
541 |
+
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
|
542 |
+
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
|
543 |
+
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
|
544 |
+
|
545 |
+
#####################################################################
|
546 |
+
# Global "Query Cancel" dialog
|
547 |
+
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
|
548 |
+
"No", "No", "No")
|
549 |
+
cancel.text("Text", 48, 15, 194, 30, 3,
|
550 |
+
"Are you sure you want to cancel [ProductName] installation?")
|
551 |
+
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
|
552 |
+
# "py.ico", None, None)
|
553 |
+
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
|
554 |
+
c.event("EndDialog", "Exit")
|
555 |
+
|
556 |
+
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
|
557 |
+
c.event("EndDialog", "Return")
|
558 |
+
|
559 |
+
#####################################################################
|
560 |
+
# Global "Wait for costing" dialog
|
561 |
+
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
|
562 |
+
"Return", "Return", "Return")
|
563 |
+
costing.text("Text", 48, 15, 194, 30, 3,
|
564 |
+
"Please wait while the installer finishes determining your disk space requirements.")
|
565 |
+
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
|
566 |
+
c.event("EndDialog", "Exit")
|
567 |
+
|
568 |
+
#####################################################################
|
569 |
+
# Preparation dialog: no user input except cancellation
|
570 |
+
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
|
571 |
+
"Cancel", "Cancel", "Cancel")
|
572 |
+
prep.text("Description", 15, 70, 320, 40, 0x30003,
|
573 |
+
"Please wait while the Installer prepares to guide you through the installation.")
|
574 |
+
prep.title("Welcome to the [ProductName] Installer")
|
575 |
+
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
|
576 |
+
c.mapping("ActionText", "Text")
|
577 |
+
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
|
578 |
+
c.mapping("ActionData", "Text")
|
579 |
+
prep.back("Back", None, active=0)
|
580 |
+
prep.next("Next", None, active=0)
|
581 |
+
c=prep.cancel("Cancel", None)
|
582 |
+
c.event("SpawnDialog", "CancelDlg")
|
583 |
+
|
584 |
+
#####################################################################
|
585 |
+
# Feature (Python directory) selection
|
586 |
+
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
|
587 |
+
"Next", "Next", "Cancel")
|
588 |
+
seldlg.title("Select Python Installations")
|
589 |
+
|
590 |
+
seldlg.text("Hint", 15, 30, 300, 20, 3,
|
591 |
+
"Select the Python locations where %s should be installed."
|
592 |
+
% self.distribution.get_fullname())
|
593 |
+
|
594 |
+
seldlg.back("< Back", None, active=0)
|
595 |
+
c = seldlg.next("Next >", "Cancel")
|
596 |
+
order = 1
|
597 |
+
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
|
598 |
+
for version in self.versions + [self.other_version]:
|
599 |
+
order += 1
|
600 |
+
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
|
601 |
+
"FEATURE_SELECTED AND &Python%s=3" % version,
|
602 |
+
ordering=order)
|
603 |
+
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
|
604 |
+
c.event("EndDialog", "Return", ordering=order + 2)
|
605 |
+
c = seldlg.cancel("Cancel", "Features")
|
606 |
+
c.event("SpawnDialog", "CancelDlg")
|
607 |
+
|
608 |
+
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
|
609 |
+
"FEATURE", None, "PathEdit", None)
|
610 |
+
c.event("[FEATURE_SELECTED]", "1")
|
611 |
+
ver = self.other_version
|
612 |
+
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
|
613 |
+
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
|
614 |
+
|
615 |
+
c = seldlg.text("Other", 15, 200, 300, 15, 3,
|
616 |
+
"Provide an alternate Python location")
|
617 |
+
c.condition("Enable", install_other_cond)
|
618 |
+
c.condition("Show", install_other_cond)
|
619 |
+
c.condition("Disable", dont_install_other_cond)
|
620 |
+
c.condition("Hide", dont_install_other_cond)
|
621 |
+
|
622 |
+
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
|
623 |
+
"TARGETDIR" + ver, None, "Next", None)
|
624 |
+
c.condition("Enable", install_other_cond)
|
625 |
+
c.condition("Show", install_other_cond)
|
626 |
+
c.condition("Disable", dont_install_other_cond)
|
627 |
+
c.condition("Hide", dont_install_other_cond)
|
628 |
+
|
629 |
+
#####################################################################
|
630 |
+
# Disk cost
|
631 |
+
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
|
632 |
+
"OK", "OK", "OK", bitmap=False)
|
633 |
+
cost.text("Title", 15, 6, 200, 15, 0x30003,
|
634 |
+
r"{\DlgFontBold8}Disk Space Requirements")
|
635 |
+
cost.text("Description", 20, 20, 280, 20, 0x30003,
|
636 |
+
"The disk space required for the installation of the selected features.")
|
637 |
+
cost.text("Text", 20, 53, 330, 60, 3,
|
638 |
+
"The highlighted volumes (if any) do not have enough disk space "
|
639 |
+
"available for the currently selected features. You can either "
|
640 |
+
"remove some files from the highlighted volumes, or choose to "
|
641 |
+
"install less features onto local drive(s), or select different "
|
642 |
+
"destination drive(s).")
|
643 |
+
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
|
644 |
+
None, "{120}{70}{70}{70}{70}", None, None)
|
645 |
+
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
|
646 |
+
|
647 |
+
#####################################################################
|
648 |
+
# WhichUsers Dialog. Only available on NT, and for privileged users.
|
649 |
+
# This must be run before FindRelatedProducts, because that will
|
650 |
+
# take into account whether the previous installation was per-user
|
651 |
+
# or per-machine. We currently don't support going back to this
|
652 |
+
# dialog after "Next" was selected; to support this, we would need to
|
653 |
+
# find how to reset the ALLUSERS property, and how to re-run
|
654 |
+
# FindRelatedProducts.
|
655 |
+
# On Windows9x, the ALLUSERS property is ignored on the command line
|
656 |
+
# and in the Property table, but installer fails according to the documentation
|
657 |
+
# if a dialog attempts to set ALLUSERS.
|
658 |
+
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
|
659 |
+
"AdminInstall", "Next", "Cancel")
|
660 |
+
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
|
661 |
+
# A radio group with two options: allusers, justme
|
662 |
+
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
|
663 |
+
"WhichUsers", "", "Next")
|
664 |
+
g.add("ALL", 0, 5, 150, 20, "Install for all users")
|
665 |
+
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
|
666 |
+
|
667 |
+
whichusers.back("Back", None, active=0)
|
668 |
+
|
669 |
+
c = whichusers.next("Next >", "Cancel")
|
670 |
+
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
|
671 |
+
c.event("EndDialog", "Return", ordering = 2)
|
672 |
+
|
673 |
+
c = whichusers.cancel("Cancel", "AdminInstall")
|
674 |
+
c.event("SpawnDialog", "CancelDlg")
|
675 |
+
|
676 |
+
#####################################################################
|
677 |
+
# Installation Progress dialog (modeless)
|
678 |
+
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
|
679 |
+
"Cancel", "Cancel", "Cancel", bitmap=False)
|
680 |
+
progress.text("Title", 20, 15, 200, 15, 0x30003,
|
681 |
+
r"{\DlgFontBold8}[Progress1] [ProductName]")
|
682 |
+
progress.text("Text", 35, 65, 300, 30, 3,
|
683 |
+
"Please wait while the Installer [Progress2] [ProductName]. "
|
684 |
+
"This may take several minutes.")
|
685 |
+
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
|
686 |
+
|
687 |
+
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
|
688 |
+
c.mapping("ActionText", "Text")
|
689 |
+
|
690 |
+
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
|
691 |
+
#c.mapping("ActionData", "Text")
|
692 |
+
|
693 |
+
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
|
694 |
+
None, "Progress done", None, None)
|
695 |
+
c.mapping("SetProgress", "Progress")
|
696 |
+
|
697 |
+
progress.back("< Back", "Next", active=False)
|
698 |
+
progress.next("Next >", "Cancel", active=False)
|
699 |
+
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
|
700 |
+
|
701 |
+
###################################################################
|
702 |
+
# Maintenance type: repair/uninstall
|
703 |
+
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
|
704 |
+
"Next", "Next", "Cancel")
|
705 |
+
maint.title("Welcome to the [ProductName] Setup Wizard")
|
706 |
+
maint.text("BodyText", 15, 63, 330, 42, 3,
|
707 |
+
"Select whether you want to repair or remove [ProductName].")
|
708 |
+
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
|
709 |
+
"MaintenanceForm_Action", "", "Next")
|
710 |
+
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
|
711 |
+
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
|
712 |
+
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
|
713 |
+
|
714 |
+
maint.back("< Back", None, active=False)
|
715 |
+
c=maint.next("Finish", "Cancel")
|
716 |
+
# Change installation: Change progress dialog to "Change", then ask
|
717 |
+
# for feature selection
|
718 |
+
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
|
719 |
+
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
|
720 |
+
|
721 |
+
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
|
722 |
+
# Also set list of reinstalled features to "ALL"
|
723 |
+
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
|
724 |
+
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
|
725 |
+
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
|
726 |
+
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
|
727 |
+
|
728 |
+
# Uninstall: Change progress to "Remove", then invoke uninstall
|
729 |
+
# Also set list of removed features to "ALL"
|
730 |
+
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
|
731 |
+
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
|
732 |
+
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
|
733 |
+
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
|
734 |
+
|
735 |
+
# Close dialog when maintenance action scheduled
|
736 |
+
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
|
737 |
+
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
|
738 |
+
|
739 |
+
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
|
740 |
+
|
741 |
+
def get_installer_filename(self, fullname):
|
742 |
+
# Factored out to allow overriding in subclasses
|
743 |
+
if self.target_version:
|
744 |
+
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
|
745 |
+
self.target_version)
|
746 |
+
else:
|
747 |
+
base_name = "%s.%s.msi" % (fullname, self.plat_name)
|
748 |
+
installer_name = os.path.join(self.dist_dir, base_name)
|
749 |
+
return installer_name
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/clean.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.command.clean
|
2 |
+
|
3 |
+
Implements the Distutils 'clean' command."""
|
4 |
+
|
5 |
+
# contributed by Bastian Kleineidam <[email protected]>, added 2000-03-18
|
6 |
+
|
7 |
+
import os
|
8 |
+
from distutils.core import Command
|
9 |
+
from distutils.dir_util import remove_tree
|
10 |
+
from distutils import log
|
11 |
+
|
12 |
+
class clean(Command):
|
13 |
+
|
14 |
+
description = "clean up temporary files from 'build' command"
|
15 |
+
user_options = [
|
16 |
+
('build-base=', 'b',
|
17 |
+
"base build directory (default: 'build.build-base')"),
|
18 |
+
('build-lib=', None,
|
19 |
+
"build directory for all modules (default: 'build.build-lib')"),
|
20 |
+
('build-temp=', 't',
|
21 |
+
"temporary build directory (default: 'build.build-temp')"),
|
22 |
+
('build-scripts=', None,
|
23 |
+
"build directory for scripts (default: 'build.build-scripts')"),
|
24 |
+
('bdist-base=', None,
|
25 |
+
"temporary directory for built distributions"),
|
26 |
+
('all', 'a',
|
27 |
+
"remove all build output, not just temporary by-products")
|
28 |
+
]
|
29 |
+
|
30 |
+
boolean_options = ['all']
|
31 |
+
|
32 |
+
def initialize_options(self):
|
33 |
+
self.build_base = None
|
34 |
+
self.build_lib = None
|
35 |
+
self.build_temp = None
|
36 |
+
self.build_scripts = None
|
37 |
+
self.bdist_base = None
|
38 |
+
self.all = None
|
39 |
+
|
40 |
+
def finalize_options(self):
|
41 |
+
self.set_undefined_options('build',
|
42 |
+
('build_base', 'build_base'),
|
43 |
+
('build_lib', 'build_lib'),
|
44 |
+
('build_scripts', 'build_scripts'),
|
45 |
+
('build_temp', 'build_temp'))
|
46 |
+
self.set_undefined_options('bdist',
|
47 |
+
('bdist_base', 'bdist_base'))
|
48 |
+
|
49 |
+
def run(self):
|
50 |
+
# remove the build/temp.<plat> directory (unless it's already
|
51 |
+
# gone)
|
52 |
+
if os.path.exists(self.build_temp):
|
53 |
+
remove_tree(self.build_temp, dry_run=self.dry_run)
|
54 |
+
else:
|
55 |
+
log.debug("'%s' does not exist -- can't clean it",
|
56 |
+
self.build_temp)
|
57 |
+
|
58 |
+
if self.all:
|
59 |
+
# remove build directories
|
60 |
+
for directory in (self.build_lib,
|
61 |
+
self.bdist_base,
|
62 |
+
self.build_scripts):
|
63 |
+
if os.path.exists(directory):
|
64 |
+
remove_tree(directory, dry_run=self.dry_run)
|
65 |
+
else:
|
66 |
+
log.warn("'%s' does not exist -- can't clean it",
|
67 |
+
directory)
|
68 |
+
|
69 |
+
# just for the heck of it, try to remove the base build directory:
|
70 |
+
# we might have emptied it right now, but if not we don't care
|
71 |
+
if not self.dry_run:
|
72 |
+
try:
|
73 |
+
os.rmdir(self.build_base)
|
74 |
+
log.info("removing '%s'", self.build_base)
|
75 |
+
except OSError:
|
76 |
+
pass
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/install.py
ADDED
@@ -0,0 +1,721 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.command.install
|
2 |
+
|
3 |
+
Implements the Distutils 'install' command."""
|
4 |
+
|
5 |
+
import sys
|
6 |
+
import os
|
7 |
+
import contextlib
|
8 |
+
import sysconfig
|
9 |
+
import itertools
|
10 |
+
|
11 |
+
from distutils import log
|
12 |
+
from distutils.core import Command
|
13 |
+
from distutils.debug import DEBUG
|
14 |
+
from distutils.sysconfig import get_config_vars
|
15 |
+
from distutils.errors import DistutilsPlatformError
|
16 |
+
from distutils.file_util import write_file
|
17 |
+
from distutils.util import convert_path, subst_vars, change_root
|
18 |
+
from distutils.util import get_platform
|
19 |
+
from distutils.errors import DistutilsOptionError
|
20 |
+
|
21 |
+
from site import USER_BASE
|
22 |
+
from site import USER_SITE
|
23 |
+
HAS_USER_SITE = True
|
24 |
+
|
25 |
+
WINDOWS_SCHEME = {
|
26 |
+
'purelib': '{base}/Lib/site-packages',
|
27 |
+
'platlib': '{base}/Lib/site-packages',
|
28 |
+
'headers': '{base}/Include/{dist_name}',
|
29 |
+
'scripts': '{base}/Scripts',
|
30 |
+
'data' : '{base}',
|
31 |
+
}
|
32 |
+
|
33 |
+
INSTALL_SCHEMES = {
|
34 |
+
'posix_prefix': {
|
35 |
+
'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
|
36 |
+
'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages',
|
37 |
+
'headers': '{base}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}',
|
38 |
+
'scripts': '{base}/bin',
|
39 |
+
'data' : '{base}',
|
40 |
+
},
|
41 |
+
'posix_home': {
|
42 |
+
'purelib': '{base}/lib/{implementation_lower}',
|
43 |
+
'platlib': '{base}/{platlibdir}/{implementation_lower}',
|
44 |
+
'headers': '{base}/include/{implementation_lower}/{dist_name}',
|
45 |
+
'scripts': '{base}/bin',
|
46 |
+
'data' : '{base}',
|
47 |
+
},
|
48 |
+
'nt': WINDOWS_SCHEME,
|
49 |
+
'pypy': {
|
50 |
+
'purelib': '{base}/site-packages',
|
51 |
+
'platlib': '{base}/site-packages',
|
52 |
+
'headers': '{base}/include/{dist_name}',
|
53 |
+
'scripts': '{base}/bin',
|
54 |
+
'data' : '{base}',
|
55 |
+
},
|
56 |
+
'pypy_nt': {
|
57 |
+
'purelib': '{base}/site-packages',
|
58 |
+
'platlib': '{base}/site-packages',
|
59 |
+
'headers': '{base}/include/{dist_name}',
|
60 |
+
'scripts': '{base}/Scripts',
|
61 |
+
'data' : '{base}',
|
62 |
+
},
|
63 |
+
}
|
64 |
+
|
65 |
+
# user site schemes
|
66 |
+
if HAS_USER_SITE:
|
67 |
+
INSTALL_SCHEMES['nt_user'] = {
|
68 |
+
'purelib': '{usersite}',
|
69 |
+
'platlib': '{usersite}',
|
70 |
+
'headers': '{userbase}/{implementation}{py_version_nodot}/Include/{dist_name}',
|
71 |
+
'scripts': '{userbase}/{implementation}{py_version_nodot}/Scripts',
|
72 |
+
'data' : '{userbase}',
|
73 |
+
}
|
74 |
+
|
75 |
+
INSTALL_SCHEMES['posix_user'] = {
|
76 |
+
'purelib': '{usersite}',
|
77 |
+
'platlib': '{usersite}',
|
78 |
+
'headers':
|
79 |
+
'{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}',
|
80 |
+
'scripts': '{userbase}/bin',
|
81 |
+
'data' : '{userbase}',
|
82 |
+
}
|
83 |
+
|
84 |
+
# The keys to an installation scheme; if any new types of files are to be
|
85 |
+
# installed, be sure to add an entry to every installation scheme above,
|
86 |
+
# and to SCHEME_KEYS here.
|
87 |
+
SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
|
88 |
+
|
89 |
+
|
90 |
+
def _load_sysconfig_schemes():
|
91 |
+
with contextlib.suppress(AttributeError):
|
92 |
+
return {
|
93 |
+
scheme: sysconfig.get_paths(scheme, expand=False)
|
94 |
+
for scheme in sysconfig.get_scheme_names()
|
95 |
+
}
|
96 |
+
|
97 |
+
|
98 |
+
def _load_schemes():
|
99 |
+
"""
|
100 |
+
Extend default schemes with schemes from sysconfig.
|
101 |
+
"""
|
102 |
+
|
103 |
+
sysconfig_schemes = _load_sysconfig_schemes() or {}
|
104 |
+
|
105 |
+
return {
|
106 |
+
scheme: {
|
107 |
+
**INSTALL_SCHEMES.get(scheme, {}),
|
108 |
+
**sysconfig_schemes.get(scheme, {}),
|
109 |
+
}
|
110 |
+
for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
|
111 |
+
}
|
112 |
+
|
113 |
+
|
114 |
+
def _get_implementation():
|
115 |
+
if hasattr(sys, 'pypy_version_info'):
|
116 |
+
return 'PyPy'
|
117 |
+
else:
|
118 |
+
return 'Python'
|
119 |
+
|
120 |
+
|
121 |
+
class install(Command):
|
122 |
+
|
123 |
+
description = "install everything from build directory"
|
124 |
+
|
125 |
+
user_options = [
|
126 |
+
# Select installation scheme and set base director(y|ies)
|
127 |
+
('prefix=', None,
|
128 |
+
"installation prefix"),
|
129 |
+
('exec-prefix=', None,
|
130 |
+
"(Unix only) prefix for platform-specific files"),
|
131 |
+
('home=', None,
|
132 |
+
"(Unix only) home directory to install under"),
|
133 |
+
|
134 |
+
# Or, just set the base director(y|ies)
|
135 |
+
('install-base=', None,
|
136 |
+
"base installation directory (instead of --prefix or --home)"),
|
137 |
+
('install-platbase=', None,
|
138 |
+
"base installation directory for platform-specific files " +
|
139 |
+
"(instead of --exec-prefix or --home)"),
|
140 |
+
('root=', None,
|
141 |
+
"install everything relative to this alternate root directory"),
|
142 |
+
|
143 |
+
# Or, explicitly set the installation scheme
|
144 |
+
('install-purelib=', None,
|
145 |
+
"installation directory for pure Python module distributions"),
|
146 |
+
('install-platlib=', None,
|
147 |
+
"installation directory for non-pure module distributions"),
|
148 |
+
('install-lib=', None,
|
149 |
+
"installation directory for all module distributions " +
|
150 |
+
"(overrides --install-purelib and --install-platlib)"),
|
151 |
+
|
152 |
+
('install-headers=', None,
|
153 |
+
"installation directory for C/C++ headers"),
|
154 |
+
('install-scripts=', None,
|
155 |
+
"installation directory for Python scripts"),
|
156 |
+
('install-data=', None,
|
157 |
+
"installation directory for data files"),
|
158 |
+
|
159 |
+
# Byte-compilation options -- see install_lib.py for details, as
|
160 |
+
# these are duplicated from there (but only install_lib does
|
161 |
+
# anything with them).
|
162 |
+
('compile', 'c', "compile .py to .pyc [default]"),
|
163 |
+
('no-compile', None, "don't compile .py files"),
|
164 |
+
('optimize=', 'O',
|
165 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
166 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
167 |
+
|
168 |
+
# Miscellaneous control options
|
169 |
+
('force', 'f',
|
170 |
+
"force installation (overwrite any existing files)"),
|
171 |
+
('skip-build', None,
|
172 |
+
"skip rebuilding everything (for testing/debugging)"),
|
173 |
+
|
174 |
+
# Where to install documentation (eventually!)
|
175 |
+
#('doc-format=', None, "format of documentation to generate"),
|
176 |
+
#('install-man=', None, "directory for Unix man pages"),
|
177 |
+
#('install-html=', None, "directory for HTML documentation"),
|
178 |
+
#('install-info=', None, "directory for GNU info files"),
|
179 |
+
|
180 |
+
('record=', None,
|
181 |
+
"filename in which to record list of installed files"),
|
182 |
+
]
|
183 |
+
|
184 |
+
boolean_options = ['compile', 'force', 'skip-build']
|
185 |
+
|
186 |
+
if HAS_USER_SITE:
|
187 |
+
user_options.append(('user', None,
|
188 |
+
"install in user site-package '%s'" % USER_SITE))
|
189 |
+
boolean_options.append('user')
|
190 |
+
|
191 |
+
negative_opt = {'no-compile' : 'compile'}
|
192 |
+
|
193 |
+
|
194 |
+
def initialize_options(self):
|
195 |
+
"""Initializes options."""
|
196 |
+
# High-level options: these select both an installation base
|
197 |
+
# and scheme.
|
198 |
+
self.prefix = None
|
199 |
+
self.exec_prefix = None
|
200 |
+
self.home = None
|
201 |
+
self.user = 0
|
202 |
+
|
203 |
+
# These select only the installation base; it's up to the user to
|
204 |
+
# specify the installation scheme (currently, that means supplying
|
205 |
+
# the --install-{platlib,purelib,scripts,data} options).
|
206 |
+
self.install_base = None
|
207 |
+
self.install_platbase = None
|
208 |
+
self.root = None
|
209 |
+
|
210 |
+
# These options are the actual installation directories; if not
|
211 |
+
# supplied by the user, they are filled in using the installation
|
212 |
+
# scheme implied by prefix/exec-prefix/home and the contents of
|
213 |
+
# that installation scheme.
|
214 |
+
self.install_purelib = None # for pure module distributions
|
215 |
+
self.install_platlib = None # non-pure (dists w/ extensions)
|
216 |
+
self.install_headers = None # for C/C++ headers
|
217 |
+
self.install_lib = None # set to either purelib or platlib
|
218 |
+
self.install_scripts = None
|
219 |
+
self.install_data = None
|
220 |
+
self.install_userbase = USER_BASE
|
221 |
+
self.install_usersite = USER_SITE
|
222 |
+
|
223 |
+
self.compile = None
|
224 |
+
self.optimize = None
|
225 |
+
|
226 |
+
# Deprecated
|
227 |
+
# These two are for putting non-packagized distributions into their
|
228 |
+
# own directory and creating a .pth file if it makes sense.
|
229 |
+
# 'extra_path' comes from the setup file; 'install_path_file' can
|
230 |
+
# be turned off if it makes no sense to install a .pth file. (But
|
231 |
+
# better to install it uselessly than to guess wrong and not
|
232 |
+
# install it when it's necessary and would be used!) Currently,
|
233 |
+
# 'install_path_file' is always true unless some outsider meddles
|
234 |
+
# with it.
|
235 |
+
self.extra_path = None
|
236 |
+
self.install_path_file = 1
|
237 |
+
|
238 |
+
# 'force' forces installation, even if target files are not
|
239 |
+
# out-of-date. 'skip_build' skips running the "build" command,
|
240 |
+
# handy if you know it's not necessary. 'warn_dir' (which is *not*
|
241 |
+
# a user option, it's just there so the bdist_* commands can turn
|
242 |
+
# it off) determines whether we warn about installing to a
|
243 |
+
# directory not in sys.path.
|
244 |
+
self.force = 0
|
245 |
+
self.skip_build = 0
|
246 |
+
self.warn_dir = 1
|
247 |
+
|
248 |
+
# These are only here as a conduit from the 'build' command to the
|
249 |
+
# 'install_*' commands that do the real work. ('build_base' isn't
|
250 |
+
# actually used anywhere, but it might be useful in future.) They
|
251 |
+
# are not user options, because if the user told the install
|
252 |
+
# command where the build directory is, that wouldn't affect the
|
253 |
+
# build command.
|
254 |
+
self.build_base = None
|
255 |
+
self.build_lib = None
|
256 |
+
|
257 |
+
# Not defined yet because we don't know anything about
|
258 |
+
# documentation yet.
|
259 |
+
#self.install_man = None
|
260 |
+
#self.install_html = None
|
261 |
+
#self.install_info = None
|
262 |
+
|
263 |
+
self.record = None
|
264 |
+
|
265 |
+
|
266 |
+
# -- Option finalizing methods -------------------------------------
|
267 |
+
# (This is rather more involved than for most commands,
|
268 |
+
# because this is where the policy for installing third-
|
269 |
+
# party Python modules on various platforms given a wide
|
270 |
+
# array of user input is decided. Yes, it's quite complex!)
|
271 |
+
|
272 |
+
def finalize_options(self):
|
273 |
+
"""Finalizes options."""
|
274 |
+
# This method (and its helpers, like 'finalize_unix()',
|
275 |
+
# 'finalize_other()', and 'select_scheme()') is where the default
|
276 |
+
# installation directories for modules, extension modules, and
|
277 |
+
# anything else we care to install from a Python module
|
278 |
+
# distribution. Thus, this code makes a pretty important policy
|
279 |
+
# statement about how third-party stuff is added to a Python
|
280 |
+
# installation! Note that the actual work of installation is done
|
281 |
+
# by the relatively simple 'install_*' commands; they just take
|
282 |
+
# their orders from the installation directory options determined
|
283 |
+
# here.
|
284 |
+
|
285 |
+
# Check for errors/inconsistencies in the options; first, stuff
|
286 |
+
# that's wrong on any platform.
|
287 |
+
|
288 |
+
if ((self.prefix or self.exec_prefix or self.home) and
|
289 |
+
(self.install_base or self.install_platbase)):
|
290 |
+
raise DistutilsOptionError(
|
291 |
+
"must supply either prefix/exec-prefix/home or " +
|
292 |
+
"install-base/install-platbase -- not both")
|
293 |
+
|
294 |
+
if self.home and (self.prefix or self.exec_prefix):
|
295 |
+
raise DistutilsOptionError(
|
296 |
+
"must supply either home or prefix/exec-prefix -- not both")
|
297 |
+
|
298 |
+
if self.user and (self.prefix or self.exec_prefix or self.home or
|
299 |
+
self.install_base or self.install_platbase):
|
300 |
+
raise DistutilsOptionError("can't combine user with prefix, "
|
301 |
+
"exec_prefix/home, or install_(plat)base")
|
302 |
+
|
303 |
+
# Next, stuff that's wrong (or dubious) only on certain platforms.
|
304 |
+
if os.name != "posix":
|
305 |
+
if self.exec_prefix:
|
306 |
+
self.warn("exec-prefix option ignored on this platform")
|
307 |
+
self.exec_prefix = None
|
308 |
+
|
309 |
+
# Now the interesting logic -- so interesting that we farm it out
|
310 |
+
# to other methods. The goal of these methods is to set the final
|
311 |
+
# values for the install_{lib,scripts,data,...} options, using as
|
312 |
+
# input a heady brew of prefix, exec_prefix, home, install_base,
|
313 |
+
# install_platbase, user-supplied versions of
|
314 |
+
# install_{purelib,platlib,lib,scripts,data,...}, and the
|
315 |
+
# install schemes. Phew!
|
316 |
+
|
317 |
+
self.dump_dirs("pre-finalize_{unix,other}")
|
318 |
+
|
319 |
+
if os.name == 'posix':
|
320 |
+
self.finalize_unix()
|
321 |
+
else:
|
322 |
+
self.finalize_other()
|
323 |
+
|
324 |
+
self.dump_dirs("post-finalize_{unix,other}()")
|
325 |
+
|
326 |
+
# Expand configuration variables, tilde, etc. in self.install_base
|
327 |
+
# and self.install_platbase -- that way, we can use $base or
|
328 |
+
# $platbase in the other installation directories and not worry
|
329 |
+
# about needing recursive variable expansion (shudder).
|
330 |
+
|
331 |
+
py_version = sys.version.split()[0]
|
332 |
+
(prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
|
333 |
+
try:
|
334 |
+
abiflags = sys.abiflags
|
335 |
+
except AttributeError:
|
336 |
+
# sys.abiflags may not be defined on all platforms.
|
337 |
+
abiflags = ''
|
338 |
+
self.config_vars = {'dist_name': self.distribution.get_name(),
|
339 |
+
'dist_version': self.distribution.get_version(),
|
340 |
+
'dist_fullname': self.distribution.get_fullname(),
|
341 |
+
'py_version': py_version,
|
342 |
+
'py_version_short': '%d.%d' % sys.version_info[:2],
|
343 |
+
'py_version_nodot': '%d%d' % sys.version_info[:2],
|
344 |
+
'sys_prefix': prefix,
|
345 |
+
'prefix': prefix,
|
346 |
+
'sys_exec_prefix': exec_prefix,
|
347 |
+
'exec_prefix': exec_prefix,
|
348 |
+
'abiflags': abiflags,
|
349 |
+
'platlibdir': getattr(sys, 'platlibdir', 'lib'),
|
350 |
+
'implementation_lower': _get_implementation().lower(),
|
351 |
+
'implementation': _get_implementation(),
|
352 |
+
}
|
353 |
+
|
354 |
+
if HAS_USER_SITE:
|
355 |
+
self.config_vars['userbase'] = self.install_userbase
|
356 |
+
self.config_vars['usersite'] = self.install_usersite
|
357 |
+
|
358 |
+
self.expand_basedirs()
|
359 |
+
|
360 |
+
self.dump_dirs("post-expand_basedirs()")
|
361 |
+
|
362 |
+
# Now define config vars for the base directories so we can expand
|
363 |
+
# everything else.
|
364 |
+
self.config_vars['base'] = self.install_base
|
365 |
+
self.config_vars['platbase'] = self.install_platbase
|
366 |
+
self.config_vars['installed_base'] = (
|
367 |
+
sysconfig.get_config_vars()['installed_base'])
|
368 |
+
|
369 |
+
if DEBUG:
|
370 |
+
from pprint import pprint
|
371 |
+
print("config vars:")
|
372 |
+
pprint(self.config_vars)
|
373 |
+
|
374 |
+
# Expand "~" and configuration variables in the installation
|
375 |
+
# directories.
|
376 |
+
self.expand_dirs()
|
377 |
+
|
378 |
+
self.dump_dirs("post-expand_dirs()")
|
379 |
+
|
380 |
+
# Create directories in the home dir:
|
381 |
+
if self.user:
|
382 |
+
self.create_home_path()
|
383 |
+
|
384 |
+
# Pick the actual directory to install all modules to: either
|
385 |
+
# install_purelib or install_platlib, depending on whether this
|
386 |
+
# module distribution is pure or not. Of course, if the user
|
387 |
+
# already specified install_lib, use their selection.
|
388 |
+
if self.install_lib is None:
|
389 |
+
if self.distribution.has_ext_modules(): # has extensions: non-pure
|
390 |
+
self.install_lib = self.install_platlib
|
391 |
+
else:
|
392 |
+
self.install_lib = self.install_purelib
|
393 |
+
|
394 |
+
|
395 |
+
# Convert directories from Unix /-separated syntax to the local
|
396 |
+
# convention.
|
397 |
+
self.convert_paths('lib', 'purelib', 'platlib',
|
398 |
+
'scripts', 'data', 'headers',
|
399 |
+
'userbase', 'usersite')
|
400 |
+
|
401 |
+
# Deprecated
|
402 |
+
# Well, we're not actually fully completely finalized yet: we still
|
403 |
+
# have to deal with 'extra_path', which is the hack for allowing
|
404 |
+
# non-packagized module distributions (hello, Numerical Python!) to
|
405 |
+
# get their own directories.
|
406 |
+
self.handle_extra_path()
|
407 |
+
self.install_libbase = self.install_lib # needed for .pth file
|
408 |
+
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
|
409 |
+
|
410 |
+
# If a new root directory was supplied, make all the installation
|
411 |
+
# dirs relative to it.
|
412 |
+
if self.root is not None:
|
413 |
+
self.change_roots('libbase', 'lib', 'purelib', 'platlib',
|
414 |
+
'scripts', 'data', 'headers')
|
415 |
+
|
416 |
+
self.dump_dirs("after prepending root")
|
417 |
+
|
418 |
+
# Find out the build directories, ie. where to install from.
|
419 |
+
self.set_undefined_options('build',
|
420 |
+
('build_base', 'build_base'),
|
421 |
+
('build_lib', 'build_lib'))
|
422 |
+
|
423 |
+
# Punt on doc directories for now -- after all, we're punting on
|
424 |
+
# documentation completely!
|
425 |
+
|
426 |
+
def dump_dirs(self, msg):
|
427 |
+
"""Dumps the list of user options."""
|
428 |
+
if not DEBUG:
|
429 |
+
return
|
430 |
+
from distutils.fancy_getopt import longopt_xlate
|
431 |
+
log.debug(msg + ":")
|
432 |
+
for opt in self.user_options:
|
433 |
+
opt_name = opt[0]
|
434 |
+
if opt_name[-1] == "=":
|
435 |
+
opt_name = opt_name[0:-1]
|
436 |
+
if opt_name in self.negative_opt:
|
437 |
+
opt_name = self.negative_opt[opt_name]
|
438 |
+
opt_name = opt_name.translate(longopt_xlate)
|
439 |
+
val = not getattr(self, opt_name)
|
440 |
+
else:
|
441 |
+
opt_name = opt_name.translate(longopt_xlate)
|
442 |
+
val = getattr(self, opt_name)
|
443 |
+
log.debug(" %s: %s", opt_name, val)
|
444 |
+
|
445 |
+
def finalize_unix(self):
|
446 |
+
"""Finalizes options for posix platforms."""
|
447 |
+
if self.install_base is not None or self.install_platbase is not None:
|
448 |
+
if ((self.install_lib is None and
|
449 |
+
self.install_purelib is None and
|
450 |
+
self.install_platlib is None) or
|
451 |
+
self.install_headers is None or
|
452 |
+
self.install_scripts is None or
|
453 |
+
self.install_data is None):
|
454 |
+
raise DistutilsOptionError(
|
455 |
+
"install-base or install-platbase supplied, but "
|
456 |
+
"installation scheme is incomplete")
|
457 |
+
return
|
458 |
+
|
459 |
+
if self.user:
|
460 |
+
if self.install_userbase is None:
|
461 |
+
raise DistutilsPlatformError(
|
462 |
+
"User base directory is not specified")
|
463 |
+
self.install_base = self.install_platbase = self.install_userbase
|
464 |
+
self.select_scheme("posix_user")
|
465 |
+
elif self.home is not None:
|
466 |
+
self.install_base = self.install_platbase = self.home
|
467 |
+
self.select_scheme("posix_home")
|
468 |
+
else:
|
469 |
+
if self.prefix is None:
|
470 |
+
if self.exec_prefix is not None:
|
471 |
+
raise DistutilsOptionError(
|
472 |
+
"must not supply exec-prefix without prefix")
|
473 |
+
|
474 |
+
# Allow Fedora to add components to the prefix
|
475 |
+
_prefix_addition = getattr(sysconfig, '_prefix_addition', "")
|
476 |
+
|
477 |
+
self.prefix = (
|
478 |
+
os.path.normpath(sys.prefix) + _prefix_addition)
|
479 |
+
self.exec_prefix = (
|
480 |
+
os.path.normpath(sys.exec_prefix) + _prefix_addition)
|
481 |
+
|
482 |
+
else:
|
483 |
+
if self.exec_prefix is None:
|
484 |
+
self.exec_prefix = self.prefix
|
485 |
+
|
486 |
+
self.install_base = self.prefix
|
487 |
+
self.install_platbase = self.exec_prefix
|
488 |
+
self.select_scheme("posix_prefix")
|
489 |
+
|
490 |
+
def finalize_other(self):
|
491 |
+
"""Finalizes options for non-posix platforms"""
|
492 |
+
if self.user:
|
493 |
+
if self.install_userbase is None:
|
494 |
+
raise DistutilsPlatformError(
|
495 |
+
"User base directory is not specified")
|
496 |
+
self.install_base = self.install_platbase = self.install_userbase
|
497 |
+
self.select_scheme(os.name + "_user")
|
498 |
+
elif self.home is not None:
|
499 |
+
self.install_base = self.install_platbase = self.home
|
500 |
+
self.select_scheme("posix_home")
|
501 |
+
else:
|
502 |
+
if self.prefix is None:
|
503 |
+
self.prefix = os.path.normpath(sys.prefix)
|
504 |
+
|
505 |
+
self.install_base = self.install_platbase = self.prefix
|
506 |
+
try:
|
507 |
+
self.select_scheme(os.name)
|
508 |
+
except KeyError:
|
509 |
+
raise DistutilsPlatformError(
|
510 |
+
"I don't know how to install stuff on '%s'" % os.name)
|
511 |
+
|
512 |
+
def select_scheme(self, name):
|
513 |
+
"""Sets the install directories by applying the install schemes."""
|
514 |
+
# it's the caller's problem if they supply a bad name!
|
515 |
+
if (hasattr(sys, 'pypy_version_info') and
|
516 |
+
sys.version_info < (3, 8) and
|
517 |
+
not name.endswith(('_user', '_home'))):
|
518 |
+
if os.name == 'nt':
|
519 |
+
name = 'pypy_nt'
|
520 |
+
else:
|
521 |
+
name = 'pypy'
|
522 |
+
scheme = _load_schemes()[name]
|
523 |
+
for key in SCHEME_KEYS:
|
524 |
+
attrname = 'install_' + key
|
525 |
+
if getattr(self, attrname) is None:
|
526 |
+
setattr(self, attrname, scheme[key])
|
527 |
+
|
528 |
+
def _expand_attrs(self, attrs):
|
529 |
+
for attr in attrs:
|
530 |
+
val = getattr(self, attr)
|
531 |
+
if val is not None:
|
532 |
+
if os.name == 'posix' or os.name == 'nt':
|
533 |
+
val = os.path.expanduser(val)
|
534 |
+
val = subst_vars(val, self.config_vars)
|
535 |
+
setattr(self, attr, val)
|
536 |
+
|
537 |
+
def expand_basedirs(self):
|
538 |
+
"""Calls `os.path.expanduser` on install_base, install_platbase and
|
539 |
+
root."""
|
540 |
+
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
541 |
+
|
542 |
+
def expand_dirs(self):
|
543 |
+
"""Calls `os.path.expanduser` on install dirs."""
|
544 |
+
self._expand_attrs(['install_purelib', 'install_platlib',
|
545 |
+
'install_lib', 'install_headers',
|
546 |
+
'install_scripts', 'install_data',])
|
547 |
+
|
548 |
+
def convert_paths(self, *names):
|
549 |
+
"""Call `convert_path` over `names`."""
|
550 |
+
for name in names:
|
551 |
+
attr = "install_" + name
|
552 |
+
setattr(self, attr, convert_path(getattr(self, attr)))
|
553 |
+
|
554 |
+
def handle_extra_path(self):
|
555 |
+
"""Set `path_file` and `extra_dirs` using `extra_path`."""
|
556 |
+
if self.extra_path is None:
|
557 |
+
self.extra_path = self.distribution.extra_path
|
558 |
+
|
559 |
+
if self.extra_path is not None:
|
560 |
+
log.warn(
|
561 |
+
"Distribution option extra_path is deprecated. "
|
562 |
+
"See issue27919 for details."
|
563 |
+
)
|
564 |
+
if isinstance(self.extra_path, str):
|
565 |
+
self.extra_path = self.extra_path.split(',')
|
566 |
+
|
567 |
+
if len(self.extra_path) == 1:
|
568 |
+
path_file = extra_dirs = self.extra_path[0]
|
569 |
+
elif len(self.extra_path) == 2:
|
570 |
+
path_file, extra_dirs = self.extra_path
|
571 |
+
else:
|
572 |
+
raise DistutilsOptionError(
|
573 |
+
"'extra_path' option must be a list, tuple, or "
|
574 |
+
"comma-separated string with 1 or 2 elements")
|
575 |
+
|
576 |
+
# convert to local form in case Unix notation used (as it
|
577 |
+
# should be in setup scripts)
|
578 |
+
extra_dirs = convert_path(extra_dirs)
|
579 |
+
else:
|
580 |
+
path_file = None
|
581 |
+
extra_dirs = ''
|
582 |
+
|
583 |
+
# XXX should we warn if path_file and not extra_dirs? (in which
|
584 |
+
# case the path file would be harmless but pointless)
|
585 |
+
self.path_file = path_file
|
586 |
+
self.extra_dirs = extra_dirs
|
587 |
+
|
588 |
+
def change_roots(self, *names):
|
589 |
+
"""Change the install directories pointed by name using root."""
|
590 |
+
for name in names:
|
591 |
+
attr = "install_" + name
|
592 |
+
setattr(self, attr, change_root(self.root, getattr(self, attr)))
|
593 |
+
|
594 |
+
def create_home_path(self):
|
595 |
+
"""Create directories under ~."""
|
596 |
+
if not self.user:
|
597 |
+
return
|
598 |
+
home = convert_path(os.path.expanduser("~"))
|
599 |
+
for name, path in self.config_vars.items():
|
600 |
+
if path.startswith(home) and not os.path.isdir(path):
|
601 |
+
self.debug_print("os.makedirs('%s', 0o700)" % path)
|
602 |
+
os.makedirs(path, 0o700)
|
603 |
+
|
604 |
+
# -- Command execution methods -------------------------------------
|
605 |
+
|
606 |
+
def run(self):
|
607 |
+
"""Runs the command."""
|
608 |
+
# Obviously have to build before we can install
|
609 |
+
if not self.skip_build:
|
610 |
+
self.run_command('build')
|
611 |
+
# If we built for any other platform, we can't install.
|
612 |
+
build_plat = self.distribution.get_command_obj('build').plat_name
|
613 |
+
# check warn_dir - it is a clue that the 'install' is happening
|
614 |
+
# internally, and not to sys.path, so we don't check the platform
|
615 |
+
# matches what we are running.
|
616 |
+
if self.warn_dir and build_plat != get_platform():
|
617 |
+
raise DistutilsPlatformError("Can't install when "
|
618 |
+
"cross-compiling")
|
619 |
+
|
620 |
+
# Run all sub-commands (at least those that need to be run)
|
621 |
+
for cmd_name in self.get_sub_commands():
|
622 |
+
self.run_command(cmd_name)
|
623 |
+
|
624 |
+
if self.path_file:
|
625 |
+
self.create_path_file()
|
626 |
+
|
627 |
+
# write list of installed files, if requested.
|
628 |
+
if self.record:
|
629 |
+
outputs = self.get_outputs()
|
630 |
+
if self.root: # strip any package prefix
|
631 |
+
root_len = len(self.root)
|
632 |
+
for counter in range(len(outputs)):
|
633 |
+
outputs[counter] = outputs[counter][root_len:]
|
634 |
+
self.execute(write_file,
|
635 |
+
(self.record, outputs),
|
636 |
+
"writing list of installed files to '%s'" %
|
637 |
+
self.record)
|
638 |
+
|
639 |
+
sys_path = map(os.path.normpath, sys.path)
|
640 |
+
sys_path = map(os.path.normcase, sys_path)
|
641 |
+
install_lib = os.path.normcase(os.path.normpath(self.install_lib))
|
642 |
+
if (self.warn_dir and
|
643 |
+
not (self.path_file and self.install_path_file) and
|
644 |
+
install_lib not in sys_path):
|
645 |
+
log.debug(("modules installed to '%s', which is not in "
|
646 |
+
"Python's module search path (sys.path) -- "
|
647 |
+
"you'll have to change the search path yourself"),
|
648 |
+
self.install_lib)
|
649 |
+
|
650 |
+
def create_path_file(self):
|
651 |
+
"""Creates the .pth file"""
|
652 |
+
filename = os.path.join(self.install_libbase,
|
653 |
+
self.path_file + ".pth")
|
654 |
+
if self.install_path_file:
|
655 |
+
self.execute(write_file,
|
656 |
+
(filename, [self.extra_dirs]),
|
657 |
+
"creating %s" % filename)
|
658 |
+
else:
|
659 |
+
self.warn("path file '%s' not created" % filename)
|
660 |
+
|
661 |
+
|
662 |
+
# -- Reporting methods ---------------------------------------------
|
663 |
+
|
664 |
+
def get_outputs(self):
|
665 |
+
"""Assembles the outputs of all the sub-commands."""
|
666 |
+
outputs = []
|
667 |
+
for cmd_name in self.get_sub_commands():
|
668 |
+
cmd = self.get_finalized_command(cmd_name)
|
669 |
+
# Add the contents of cmd.get_outputs(), ensuring
|
670 |
+
# that outputs doesn't contain duplicate entries
|
671 |
+
for filename in cmd.get_outputs():
|
672 |
+
if filename not in outputs:
|
673 |
+
outputs.append(filename)
|
674 |
+
|
675 |
+
if self.path_file and self.install_path_file:
|
676 |
+
outputs.append(os.path.join(self.install_libbase,
|
677 |
+
self.path_file + ".pth"))
|
678 |
+
|
679 |
+
return outputs
|
680 |
+
|
681 |
+
def get_inputs(self):
|
682 |
+
"""Returns the inputs of all the sub-commands"""
|
683 |
+
# XXX gee, this looks familiar ;-(
|
684 |
+
inputs = []
|
685 |
+
for cmd_name in self.get_sub_commands():
|
686 |
+
cmd = self.get_finalized_command(cmd_name)
|
687 |
+
inputs.extend(cmd.get_inputs())
|
688 |
+
|
689 |
+
return inputs
|
690 |
+
|
691 |
+
# -- Predicates for sub-command list -------------------------------
|
692 |
+
|
693 |
+
def has_lib(self):
|
694 |
+
"""Returns true if the current distribution has any Python
|
695 |
+
modules to install."""
|
696 |
+
return (self.distribution.has_pure_modules() or
|
697 |
+
self.distribution.has_ext_modules())
|
698 |
+
|
699 |
+
def has_headers(self):
|
700 |
+
"""Returns true if the current distribution has any headers to
|
701 |
+
install."""
|
702 |
+
return self.distribution.has_headers()
|
703 |
+
|
704 |
+
def has_scripts(self):
|
705 |
+
"""Returns true if the current distribution has any scripts to.
|
706 |
+
install."""
|
707 |
+
return self.distribution.has_scripts()
|
708 |
+
|
709 |
+
def has_data(self):
|
710 |
+
"""Returns true if the current distribution has any data to.
|
711 |
+
install."""
|
712 |
+
return self.distribution.has_data_files()
|
713 |
+
|
714 |
+
# 'sub_commands': a list of commands this command might have to run to
|
715 |
+
# get its work done. See cmd.py for more info.
|
716 |
+
sub_commands = [('install_lib', has_lib),
|
717 |
+
('install_headers', has_headers),
|
718 |
+
('install_scripts', has_scripts),
|
719 |
+
('install_data', has_data),
|
720 |
+
('install_egg_info', lambda self:True),
|
721 |
+
]
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/install_egg_info.py
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.command.install_egg_info
|
2 |
+
|
3 |
+
Implements the Distutils 'install_egg_info' command, for installing
|
4 |
+
a package's PKG-INFO metadata."""
|
5 |
+
|
6 |
+
|
7 |
+
from distutils.cmd import Command
|
8 |
+
from distutils import log, dir_util
|
9 |
+
import os, sys, re
|
10 |
+
|
11 |
+
class install_egg_info(Command):
|
12 |
+
"""Install an .egg-info file for the package"""
|
13 |
+
|
14 |
+
description = "Install package's PKG-INFO metadata as an .egg-info file"
|
15 |
+
user_options = [
|
16 |
+
('install-dir=', 'd', "directory to install to"),
|
17 |
+
]
|
18 |
+
|
19 |
+
def initialize_options(self):
|
20 |
+
self.install_dir = None
|
21 |
+
|
22 |
+
@property
|
23 |
+
def basename(self):
|
24 |
+
"""
|
25 |
+
Allow basename to be overridden by child class.
|
26 |
+
Ref pypa/distutils#2.
|
27 |
+
"""
|
28 |
+
return "%s-%s-py%d.%d.egg-info" % (
|
29 |
+
to_filename(safe_name(self.distribution.get_name())),
|
30 |
+
to_filename(safe_version(self.distribution.get_version())),
|
31 |
+
*sys.version_info[:2]
|
32 |
+
)
|
33 |
+
|
34 |
+
def finalize_options(self):
|
35 |
+
self.set_undefined_options('install_lib',('install_dir','install_dir'))
|
36 |
+
self.target = os.path.join(self.install_dir, self.basename)
|
37 |
+
self.outputs = [self.target]
|
38 |
+
|
39 |
+
def run(self):
|
40 |
+
target = self.target
|
41 |
+
if os.path.isdir(target) and not os.path.islink(target):
|
42 |
+
dir_util.remove_tree(target, dry_run=self.dry_run)
|
43 |
+
elif os.path.exists(target):
|
44 |
+
self.execute(os.unlink,(self.target,),"Removing "+target)
|
45 |
+
elif not os.path.isdir(self.install_dir):
|
46 |
+
self.execute(os.makedirs, (self.install_dir,),
|
47 |
+
"Creating "+self.install_dir)
|
48 |
+
log.info("Writing %s", target)
|
49 |
+
if not self.dry_run:
|
50 |
+
with open(target, 'w', encoding='UTF-8') as f:
|
51 |
+
self.distribution.metadata.write_pkg_file(f)
|
52 |
+
|
53 |
+
def get_outputs(self):
|
54 |
+
return self.outputs
|
55 |
+
|
56 |
+
|
57 |
+
# The following routines are taken from setuptools' pkg_resources module and
|
58 |
+
# can be replaced by importing them from pkg_resources once it is included
|
59 |
+
# in the stdlib.
|
60 |
+
|
61 |
+
def safe_name(name):
|
62 |
+
"""Convert an arbitrary string to a standard distribution name
|
63 |
+
|
64 |
+
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
65 |
+
"""
|
66 |
+
return re.sub('[^A-Za-z0-9.]+', '-', name)
|
67 |
+
|
68 |
+
|
69 |
+
def safe_version(version):
|
70 |
+
"""Convert an arbitrary string to a standard version string
|
71 |
+
|
72 |
+
Spaces become dots, and all other non-alphanumeric characters become
|
73 |
+
dashes, with runs of multiple dashes condensed to a single dash.
|
74 |
+
"""
|
75 |
+
version = version.replace(' ','.')
|
76 |
+
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
77 |
+
|
78 |
+
|
79 |
+
def to_filename(name):
|
80 |
+
"""Convert a project or version name to its filename-escaped form
|
81 |
+
|
82 |
+
Any '-' characters are currently replaced with '_'.
|
83 |
+
"""
|
84 |
+
return name.replace('-','_')
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/py37compat.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
|
4 |
+
def _pythonlib_compat():
|
5 |
+
"""
|
6 |
+
On Python 3.7 and earlier, distutils would include the Python
|
7 |
+
library. See pypa/distutils#9.
|
8 |
+
"""
|
9 |
+
from distutils import sysconfig
|
10 |
+
if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
|
11 |
+
return
|
12 |
+
|
13 |
+
yield 'python{}.{}{}'.format(
|
14 |
+
sys.hexversion >> 24,
|
15 |
+
(sys.hexversion >> 16) & 0xff,
|
16 |
+
sysconfig.get_config_var('ABIFLAGS'),
|
17 |
+
)
|
18 |
+
|
19 |
+
|
20 |
+
def compose(f1, f2):
|
21 |
+
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
22 |
+
|
23 |
+
|
24 |
+
pythonlib = (
|
25 |
+
compose(list, _pythonlib_compat)
|
26 |
+
if sys.version_info < (3, 8)
|
27 |
+
and sys.platform != 'darwin'
|
28 |
+
and sys.platform[:3] != 'aix'
|
29 |
+
else list
|
30 |
+
)
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/register.py
ADDED
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""distutils.command.register
|
2 |
+
|
3 |
+
Implements the Distutils 'register' command (register with the repository).
|
4 |
+
"""
|
5 |
+
|
6 |
+
# created 2002/10/21, Richard Jones
|
7 |
+
|
8 |
+
import getpass
|
9 |
+
import io
|
10 |
+
import urllib.parse, urllib.request
|
11 |
+
from warnings import warn
|
12 |
+
|
13 |
+
from distutils.core import PyPIRCCommand
|
14 |
+
from distutils.errors import *
|
15 |
+
from distutils import log
|
16 |
+
|
17 |
+
class register(PyPIRCCommand):
|
18 |
+
|
19 |
+
description = ("register the distribution with the Python package index")
|
20 |
+
user_options = PyPIRCCommand.user_options + [
|
21 |
+
('list-classifiers', None,
|
22 |
+
'list the valid Trove classifiers'),
|
23 |
+
('strict', None ,
|
24 |
+
'Will stop the registering if the meta-data are not fully compliant')
|
25 |
+
]
|
26 |
+
boolean_options = PyPIRCCommand.boolean_options + [
|
27 |
+
'verify', 'list-classifiers', 'strict']
|
28 |
+
|
29 |
+
sub_commands = [('check', lambda self: True)]
|
30 |
+
|
31 |
+
def initialize_options(self):
|
32 |
+
PyPIRCCommand.initialize_options(self)
|
33 |
+
self.list_classifiers = 0
|
34 |
+
self.strict = 0
|
35 |
+
|
36 |
+
def finalize_options(self):
|
37 |
+
PyPIRCCommand.finalize_options(self)
|
38 |
+
# setting options for the `check` subcommand
|
39 |
+
check_options = {'strict': ('register', self.strict),
|
40 |
+
'restructuredtext': ('register', 1)}
|
41 |
+
self.distribution.command_options['check'] = check_options
|
42 |
+
|
43 |
+
def run(self):
|
44 |
+
self.finalize_options()
|
45 |
+
self._set_config()
|
46 |
+
|
47 |
+
# Run sub commands
|
48 |
+
for cmd_name in self.get_sub_commands():
|
49 |
+
self.run_command(cmd_name)
|
50 |
+
|
51 |
+
if self.dry_run:
|
52 |
+
self.verify_metadata()
|
53 |
+
elif self.list_classifiers:
|
54 |
+
self.classifiers()
|
55 |
+
else:
|
56 |
+
self.send_metadata()
|
57 |
+
|
58 |
+
def check_metadata(self):
|
59 |
+
"""Deprecated API."""
|
60 |
+
warn("distutils.command.register.check_metadata is deprecated, \
|
61 |
+
use the check command instead", PendingDeprecationWarning)
|
62 |
+
check = self.distribution.get_command_obj('check')
|
63 |
+
check.ensure_finalized()
|
64 |
+
check.strict = self.strict
|
65 |
+
check.restructuredtext = 1
|
66 |
+
check.run()
|
67 |
+
|
68 |
+
def _set_config(self):
|
69 |
+
''' Reads the configuration file and set attributes.
|
70 |
+
'''
|
71 |
+
config = self._read_pypirc()
|
72 |
+
if config != {}:
|
73 |
+
self.username = config['username']
|
74 |
+
self.password = config['password']
|
75 |
+
self.repository = config['repository']
|
76 |
+
self.realm = config['realm']
|
77 |
+
self.has_config = True
|
78 |
+
else:
|
79 |
+
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
80 |
+
raise ValueError('%s not found in .pypirc' % self.repository)
|
81 |
+
if self.repository == 'pypi':
|
82 |
+
self.repository = self.DEFAULT_REPOSITORY
|
83 |
+
self.has_config = False
|
84 |
+
|
85 |
+
def classifiers(self):
|
86 |
+
''' Fetch the list of classifiers from the server.
|
87 |
+
'''
|
88 |
+
url = self.repository+'?:action=list_classifiers'
|
89 |
+
response = urllib.request.urlopen(url)
|
90 |
+
log.info(self._read_pypi_response(response))
|
91 |
+
|
92 |
+
def verify_metadata(self):
|
93 |
+
''' Send the metadata to the package index server to be checked.
|
94 |
+
'''
|
95 |
+
# send the info to the server and report the result
|
96 |
+
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
97 |
+
log.info('Server response (%s): %s', code, result)
|
98 |
+
|
99 |
+
def send_metadata(self):
|
100 |
+
''' Send the metadata to the package index server.
|
101 |
+
|
102 |
+
Well, do the following:
|
103 |
+
1. figure who the user is, and then
|
104 |
+
2. send the data as a Basic auth'ed POST.
|
105 |
+
|
106 |
+
First we try to read the username/password from $HOME/.pypirc,
|
107 |
+
which is a ConfigParser-formatted file with a section
|
108 |
+
[distutils] containing username and password entries (both
|
109 |
+
in clear text). Eg:
|
110 |
+
|
111 |
+
[distutils]
|
112 |
+
index-servers =
|
113 |
+
pypi
|
114 |
+
|
115 |
+
[pypi]
|
116 |
+
username: fred
|
117 |
+
password: sekrit
|
118 |
+
|
119 |
+
Otherwise, to figure who the user is, we offer the user three
|
120 |
+
choices:
|
121 |
+
|
122 |
+
1. use existing login,
|
123 |
+
2. register as a new user, or
|
124 |
+
3. set the password to a random string and email the user.
|
125 |
+
|
126 |
+
'''
|
127 |
+
# see if we can short-cut and get the username/password from the
|
128 |
+
# config
|
129 |
+
if self.has_config:
|
130 |
+
choice = '1'
|
131 |
+
username = self.username
|
132 |
+
password = self.password
|
133 |
+
else:
|
134 |
+
choice = 'x'
|
135 |
+
username = password = ''
|
136 |
+
|
137 |
+
# get the user's login info
|
138 |
+
choices = '1 2 3 4'.split()
|
139 |
+
while choice not in choices:
|
140 |
+
self.announce('''\
|
141 |
+
We need to know who you are, so please choose either:
|
142 |
+
1. use your existing login,
|
143 |
+
2. register as a new user,
|
144 |
+
3. have the server generate a new password for you (and email it to you), or
|
145 |
+
4. quit
|
146 |
+
Your selection [default 1]: ''', log.INFO)
|
147 |
+
choice = input()
|
148 |
+
if not choice:
|
149 |
+
choice = '1'
|
150 |
+
elif choice not in choices:
|
151 |
+
print('Please choose one of the four options!')
|
152 |
+
|
153 |
+
if choice == '1':
|
154 |
+
# get the username and password
|
155 |
+
while not username:
|
156 |
+
username = input('Username: ')
|
157 |
+
while not password:
|
158 |
+
password = getpass.getpass('Password: ')
|
159 |
+
|
160 |
+
# set up the authentication
|
161 |
+
auth = urllib.request.HTTPPasswordMgr()
|
162 |
+
host = urllib.parse.urlparse(self.repository)[1]
|
163 |
+
auth.add_password(self.realm, host, username, password)
|
164 |
+
# send the info to the server and report the result
|
165 |
+
code, result = self.post_to_server(self.build_post_data('submit'),
|
166 |
+
auth)
|
167 |
+
self.announce('Server response (%s): %s' % (code, result),
|
168 |
+
log.INFO)
|
169 |
+
|
170 |
+
# possibly save the login
|
171 |
+
if code == 200:
|
172 |
+
if self.has_config:
|
173 |
+
# sharing the password in the distribution instance
|
174 |
+
# so the upload command can reuse it
|
175 |
+
self.distribution.password = password
|
176 |
+
else:
|
177 |
+
self.announce(('I can store your PyPI login so future '
|
178 |
+
'submissions will be faster.'), log.INFO)
|
179 |
+
self.announce('(the login will be stored in %s)' % \
|
180 |
+
self._get_rc_file(), log.INFO)
|
181 |
+
choice = 'X'
|
182 |
+
while choice.lower() not in 'yn':
|
183 |
+
choice = input('Save your login (y/N)?')
|
184 |
+
if not choice:
|
185 |
+
choice = 'n'
|
186 |
+
if choice.lower() == 'y':
|
187 |
+
self._store_pypirc(username, password)
|
188 |
+
|
189 |
+
elif choice == '2':
|
190 |
+
data = {':action': 'user'}
|
191 |
+
data['name'] = data['password'] = data['email'] = ''
|
192 |
+
data['confirm'] = None
|
193 |
+
while not data['name']:
|
194 |
+
data['name'] = input('Username: ')
|
195 |
+
while data['password'] != data['confirm']:
|
196 |
+
while not data['password']:
|
197 |
+
data['password'] = getpass.getpass('Password: ')
|
198 |
+
while not data['confirm']:
|
199 |
+
data['confirm'] = getpass.getpass(' Confirm: ')
|
200 |
+
if data['password'] != data['confirm']:
|
201 |
+
data['password'] = ''
|
202 |
+
data['confirm'] = None
|
203 |
+
print("Password and confirm don't match!")
|
204 |
+
while not data['email']:
|
205 |
+
data['email'] = input(' EMail: ')
|
206 |
+
code, result = self.post_to_server(data)
|
207 |
+
if code != 200:
|
208 |
+
log.info('Server response (%s): %s', code, result)
|
209 |
+
else:
|
210 |
+
log.info('You will receive an email shortly.')
|
211 |
+
log.info(('Follow the instructions in it to '
|
212 |
+
'complete registration.'))
|
213 |
+
elif choice == '3':
|
214 |
+
data = {':action': 'password_reset'}
|
215 |
+
data['email'] = ''
|
216 |
+
while not data['email']:
|
217 |
+
data['email'] = input('Your email address: ')
|
218 |
+
code, result = self.post_to_server(data)
|
219 |
+
log.info('Server response (%s): %s', code, result)
|
220 |
+
|
221 |
+
def build_post_data(self, action):
|
222 |
+
# figure the data to send - the metadata plus some additional
|
223 |
+
# information used by the package server
|
224 |
+
meta = self.distribution.metadata
|
225 |
+
data = {
|
226 |
+
':action': action,
|
227 |
+
'metadata_version' : '1.0',
|
228 |
+
'name': meta.get_name(),
|
229 |
+
'version': meta.get_version(),
|
230 |
+
'summary': meta.get_description(),
|
231 |
+
'home_page': meta.get_url(),
|
232 |
+
'author': meta.get_contact(),
|
233 |
+
'author_email': meta.get_contact_email(),
|
234 |
+
'license': meta.get_licence(),
|
235 |
+
'description': meta.get_long_description(),
|
236 |
+
'keywords': meta.get_keywords(),
|
237 |
+
'platform': meta.get_platforms(),
|
238 |
+
'classifiers': meta.get_classifiers(),
|
239 |
+
'download_url': meta.get_download_url(),
|
240 |
+
# PEP 314
|
241 |
+
'provides': meta.get_provides(),
|
242 |
+
'requires': meta.get_requires(),
|
243 |
+
'obsoletes': meta.get_obsoletes(),
|
244 |
+
}
|
245 |
+
if data['provides'] or data['requires'] or data['obsoletes']:
|
246 |
+
data['metadata_version'] = '1.1'
|
247 |
+
return data
|
248 |
+
|
249 |
+
def post_to_server(self, data, auth=None):
|
250 |
+
''' Post a query to the server, and return a string response.
|
251 |
+
'''
|
252 |
+
if 'name' in data:
|
253 |
+
self.announce('Registering %s to %s' % (data['name'],
|
254 |
+
self.repository),
|
255 |
+
log.INFO)
|
256 |
+
# Build up the MIME payload for the urllib2 POST data
|
257 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
258 |
+
sep_boundary = '\n--' + boundary
|
259 |
+
end_boundary = sep_boundary + '--'
|
260 |
+
body = io.StringIO()
|
261 |
+
for key, value in data.items():
|
262 |
+
# handle multiple entries for the same name
|
263 |
+
if type(value) not in (type([]), type( () )):
|
264 |
+
value = [value]
|
265 |
+
for value in value:
|
266 |
+
value = str(value)
|
267 |
+
body.write(sep_boundary)
|
268 |
+
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
269 |
+
body.write("\n\n")
|
270 |
+
body.write(value)
|
271 |
+
if value and value[-1] == '\r':
|
272 |
+
body.write('\n') # write an extra newline (lurve Macs)
|
273 |
+
body.write(end_boundary)
|
274 |
+
body.write("\n")
|
275 |
+
body = body.getvalue().encode("utf-8")
|
276 |
+
|
277 |
+
# build the Request
|
278 |
+
headers = {
|
279 |
+
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
|
280 |
+
'Content-length': str(len(body))
|
281 |
+
}
|
282 |
+
req = urllib.request.Request(self.repository, body, headers)
|
283 |
+
|
284 |
+
# handle HTTP and include the Basic Auth handler
|
285 |
+
opener = urllib.request.build_opener(
|
286 |
+
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
287 |
+
)
|
288 |
+
data = ''
|
289 |
+
try:
|
290 |
+
result = opener.open(req)
|
291 |
+
except urllib.error.HTTPError as e:
|
292 |
+
if self.show_response:
|
293 |
+
data = e.fp.read()
|
294 |
+
result = e.code, e.msg
|
295 |
+
except urllib.error.URLError as e:
|
296 |
+
result = 500, str(e)
|
297 |
+
else:
|
298 |
+
if self.show_response:
|
299 |
+
data = self._read_pypi_response(result)
|
300 |
+
result = 200, 'OK'
|
301 |
+
if self.show_response:
|
302 |
+
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
303 |
+
self.announce(msg, log.INFO)
|
304 |
+
return result
|
llmeval-env/lib/python3.10/site-packages/setuptools/_distutils/command/upload.py
ADDED
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
distutils.command.upload
|
3 |
+
|
4 |
+
Implements the Distutils 'upload' subcommand (upload package to a package
|
5 |
+
index).
|
6 |
+
"""
|
7 |
+
|
8 |
+
import os
|
9 |
+
import io
|
10 |
+
import hashlib
|
11 |
+
from base64 import standard_b64encode
|
12 |
+
from urllib.request import urlopen, Request, HTTPError
|
13 |
+
from urllib.parse import urlparse
|
14 |
+
from distutils.errors import DistutilsError, DistutilsOptionError
|
15 |
+
from distutils.core import PyPIRCCommand
|
16 |
+
from distutils.spawn import spawn
|
17 |
+
from distutils import log
|
18 |
+
|
19 |
+
|
20 |
+
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
|
21 |
+
# https://bugs.python.org/issue40698
|
22 |
+
_FILE_CONTENT_DIGESTS = {
|
23 |
+
"md5_digest": getattr(hashlib, "md5", None),
|
24 |
+
"sha256_digest": getattr(hashlib, "sha256", None),
|
25 |
+
"blake2_256_digest": getattr(hashlib, "blake2b", None),
|
26 |
+
}
|
27 |
+
|
28 |
+
|
29 |
+
class upload(PyPIRCCommand):
|
30 |
+
|
31 |
+
description = "upload binary package to PyPI"
|
32 |
+
|
33 |
+
user_options = PyPIRCCommand.user_options + [
|
34 |
+
('sign', 's',
|
35 |
+
'sign files to upload using gpg'),
|
36 |
+
('identity=', 'i', 'GPG identity used to sign files'),
|
37 |
+
]
|
38 |
+
|
39 |
+
boolean_options = PyPIRCCommand.boolean_options + ['sign']
|
40 |
+
|
41 |
+
def initialize_options(self):
|
42 |
+
PyPIRCCommand.initialize_options(self)
|
43 |
+
self.username = ''
|
44 |
+
self.password = ''
|
45 |
+
self.show_response = 0
|
46 |
+
self.sign = False
|
47 |
+
self.identity = None
|
48 |
+
|
49 |
+
def finalize_options(self):
|
50 |
+
PyPIRCCommand.finalize_options(self)
|
51 |
+
if self.identity and not self.sign:
|
52 |
+
raise DistutilsOptionError(
|
53 |
+
"Must use --sign for --identity to have meaning"
|
54 |
+
)
|
55 |
+
config = self._read_pypirc()
|
56 |
+
if config != {}:
|
57 |
+
self.username = config['username']
|
58 |
+
self.password = config['password']
|
59 |
+
self.repository = config['repository']
|
60 |
+
self.realm = config['realm']
|
61 |
+
|
62 |
+
# getting the password from the distribution
|
63 |
+
# if previously set by the register command
|
64 |
+
if not self.password and self.distribution.password:
|
65 |
+
self.password = self.distribution.password
|
66 |
+
|
67 |
+
def run(self):
|
68 |
+
if not self.distribution.dist_files:
|
69 |
+
msg = ("Must create and upload files in one command "
|
70 |
+
"(e.g. setup.py sdist upload)")
|
71 |
+
raise DistutilsOptionError(msg)
|
72 |
+
for command, pyversion, filename in self.distribution.dist_files:
|
73 |
+
self.upload_file(command, pyversion, filename)
|
74 |
+
|
75 |
+
def upload_file(self, command, pyversion, filename):
|
76 |
+
# Makes sure the repository URL is compliant
|
77 |
+
schema, netloc, url, params, query, fragments = \
|
78 |
+
urlparse(self.repository)
|
79 |
+
if params or query or fragments:
|
80 |
+
raise AssertionError("Incompatible url %s" % self.repository)
|
81 |
+
|
82 |
+
if schema not in ('http', 'https'):
|
83 |
+
raise AssertionError("unsupported schema " + schema)
|
84 |
+
|
85 |
+
# Sign if requested
|
86 |
+
if self.sign:
|
87 |
+
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
88 |
+
if self.identity:
|
89 |
+
gpg_args[2:2] = ["--local-user", self.identity]
|
90 |
+
spawn(gpg_args,
|
91 |
+
dry_run=self.dry_run)
|
92 |
+
|
93 |
+
# Fill in the data - send all the meta-data in case we need to
|
94 |
+
# register a new release
|
95 |
+
f = open(filename,'rb')
|
96 |
+
try:
|
97 |
+
content = f.read()
|
98 |
+
finally:
|
99 |
+
f.close()
|
100 |
+
|
101 |
+
meta = self.distribution.metadata
|
102 |
+
data = {
|
103 |
+
# action
|
104 |
+
':action': 'file_upload',
|
105 |
+
'protocol_version': '1',
|
106 |
+
|
107 |
+
# identify release
|
108 |
+
'name': meta.get_name(),
|
109 |
+
'version': meta.get_version(),
|
110 |
+
|
111 |
+
# file content
|
112 |
+
'content': (os.path.basename(filename),content),
|
113 |
+
'filetype': command,
|
114 |
+
'pyversion': pyversion,
|
115 |
+
|
116 |
+
# additional meta-data
|
117 |
+
'metadata_version': '1.0',
|
118 |
+
'summary': meta.get_description(),
|
119 |
+
'home_page': meta.get_url(),
|
120 |
+
'author': meta.get_contact(),
|
121 |
+
'author_email': meta.get_contact_email(),
|
122 |
+
'license': meta.get_licence(),
|
123 |
+
'description': meta.get_long_description(),
|
124 |
+
'keywords': meta.get_keywords(),
|
125 |
+
'platform': meta.get_platforms(),
|
126 |
+
'classifiers': meta.get_classifiers(),
|
127 |
+
'download_url': meta.get_download_url(),
|
128 |
+
# PEP 314
|
129 |
+
'provides': meta.get_provides(),
|
130 |
+
'requires': meta.get_requires(),
|
131 |
+
'obsoletes': meta.get_obsoletes(),
|
132 |
+
}
|
133 |
+
|
134 |
+
data['comment'] = ''
|
135 |
+
|
136 |
+
# file content digests
|
137 |
+
for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
|
138 |
+
if digest_cons is None:
|
139 |
+
continue
|
140 |
+
try:
|
141 |
+
data[digest_name] = digest_cons(content).hexdigest()
|
142 |
+
except ValueError:
|
143 |
+
# hash digest not available or blocked by security policy
|
144 |
+
pass
|
145 |
+
|
146 |
+
if self.sign:
|
147 |
+
with open(filename + ".asc", "rb") as f:
|
148 |
+
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
|
149 |
+
f.read())
|
150 |
+
|
151 |
+
# set up the authentication
|
152 |
+
user_pass = (self.username + ":" + self.password).encode('ascii')
|
153 |
+
# The exact encoding of the authentication string is debated.
|
154 |
+
# Anyway PyPI only accepts ascii for both username or password.
|
155 |
+
auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
|
156 |
+
|
157 |
+
# Build up the MIME payload for the POST data
|
158 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
159 |
+
sep_boundary = b'\r\n--' + boundary.encode('ascii')
|
160 |
+
end_boundary = sep_boundary + b'--\r\n'
|
161 |
+
body = io.BytesIO()
|
162 |
+
for key, value in data.items():
|
163 |
+
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
164 |
+
# handle multiple entries for the same name
|
165 |
+
if not isinstance(value, list):
|
166 |
+
value = [value]
|
167 |
+
for value in value:
|
168 |
+
if type(value) is tuple:
|
169 |
+
title += '; filename="%s"' % value[0]
|
170 |
+
value = value[1]
|
171 |
+
else:
|
172 |
+
value = str(value).encode('utf-8')
|
173 |
+
body.write(sep_boundary)
|
174 |
+
body.write(title.encode('utf-8'))
|
175 |
+
body.write(b"\r\n\r\n")
|
176 |
+
body.write(value)
|
177 |
+
body.write(end_boundary)
|
178 |
+
body = body.getvalue()
|
179 |
+
|
180 |
+
msg = "Submitting %s to %s" % (filename, self.repository)
|
181 |
+
self.announce(msg, log.INFO)
|
182 |
+
|
183 |
+
# build the Request
|
184 |
+
headers = {
|
185 |
+
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
186 |
+
'Content-length': str(len(body)),
|
187 |
+
'Authorization': auth,
|
188 |
+
}
|
189 |
+
|
190 |
+
request = Request(self.repository, data=body,
|
191 |
+
headers=headers)
|
192 |
+
# send the data
|
193 |
+
try:
|
194 |
+
result = urlopen(request)
|
195 |
+
status = result.getcode()
|
196 |
+
reason = result.msg
|
197 |
+
except HTTPError as e:
|
198 |
+
status = e.code
|
199 |
+
reason = e.msg
|
200 |
+
except OSError as e:
|
201 |
+
self.announce(str(e), log.ERROR)
|
202 |
+
raise
|
203 |
+
|
204 |
+
if status == 200:
|
205 |
+
self.announce('Server response (%s): %s' % (status, reason),
|
206 |
+
log.INFO)
|
207 |
+
if self.show_response:
|
208 |
+
text = self._read_pypi_response(result)
|
209 |
+
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
210 |
+
self.announce(msg, log.INFO)
|
211 |
+
else:
|
212 |
+
msg = 'Upload failed (%s): %s' % (status, reason)
|
213 |
+
self.announce(msg, log.ERROR)
|
214 |
+
raise DistutilsError(msg)
|
llmeval-env/lib/python3.10/site-packages/transformers/activations.py
ADDED
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
import math
|
16 |
+
from collections import OrderedDict
|
17 |
+
|
18 |
+
import torch
|
19 |
+
from packaging import version
|
20 |
+
from torch import Tensor, nn
|
21 |
+
|
22 |
+
from .utils import logging
|
23 |
+
|
24 |
+
|
25 |
+
logger = logging.get_logger(__name__)
|
26 |
+
|
27 |
+
|
28 |
+
class PytorchGELUTanh(nn.Module):
|
29 |
+
"""
|
30 |
+
A fast C implementation of the tanh approximation of the GeLU activation function. See
|
31 |
+
https://arxiv.org/abs/1606.08415.
|
32 |
+
|
33 |
+
This implementation is equivalent to NewGELU and FastGELU but much faster. However, it is not an exact numerical
|
34 |
+
match due to rounding errors.
|
35 |
+
"""
|
36 |
+
|
37 |
+
def __init__(self):
|
38 |
+
super().__init__()
|
39 |
+
if version.parse(torch.__version__) < version.parse("1.12.0"):
|
40 |
+
raise ImportError(
|
41 |
+
f"You are using torch=={torch.__version__}, but torch>=1.12.0 is required to use "
|
42 |
+
"PytorchGELUTanh. Please upgrade torch."
|
43 |
+
)
|
44 |
+
|
45 |
+
def forward(self, input: Tensor) -> Tensor:
|
46 |
+
return nn.functional.gelu(input, approximate="tanh")
|
47 |
+
|
48 |
+
|
49 |
+
class NewGELUActivation(nn.Module):
|
50 |
+
"""
|
51 |
+
Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see
|
52 |
+
the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
|
53 |
+
"""
|
54 |
+
|
55 |
+
def forward(self, input: Tensor) -> Tensor:
|
56 |
+
return 0.5 * input * (1.0 + torch.tanh(math.sqrt(2.0 / math.pi) * (input + 0.044715 * torch.pow(input, 3.0))))
|
57 |
+
|
58 |
+
|
59 |
+
class GELUActivation(nn.Module):
|
60 |
+
"""
|
61 |
+
Original Implementation of the GELU activation function in Google BERT repo when initially created. For
|
62 |
+
information: OpenAI GPT's GELU is slightly different (and gives slightly different results): 0.5 * x * (1 +
|
63 |
+
torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) This is now written in C in nn.functional
|
64 |
+
Also see the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
|
65 |
+
"""
|
66 |
+
|
67 |
+
def __init__(self, use_gelu_python: bool = False):
|
68 |
+
super().__init__()
|
69 |
+
if use_gelu_python:
|
70 |
+
self.act = self._gelu_python
|
71 |
+
else:
|
72 |
+
self.act = nn.functional.gelu
|
73 |
+
|
74 |
+
def _gelu_python(self, input: Tensor) -> Tensor:
|
75 |
+
return input * 0.5 * (1.0 + torch.erf(input / math.sqrt(2.0)))
|
76 |
+
|
77 |
+
def forward(self, input: Tensor) -> Tensor:
|
78 |
+
return self.act(input)
|
79 |
+
|
80 |
+
|
81 |
+
class FastGELUActivation(nn.Module):
|
82 |
+
"""
|
83 |
+
Applies GELU approximation that is slower than QuickGELU but more accurate. See: https://github.com/hendrycks/GELUs
|
84 |
+
"""
|
85 |
+
|
86 |
+
def forward(self, input: Tensor) -> Tensor:
|
87 |
+
return 0.5 * input * (1.0 + torch.tanh(input * 0.7978845608 * (1.0 + 0.044715 * input * input)))
|
88 |
+
|
89 |
+
|
90 |
+
class QuickGELUActivation(nn.Module):
|
91 |
+
"""
|
92 |
+
Applies GELU approximation that is fast but somewhat inaccurate. See: https://github.com/hendrycks/GELUs
|
93 |
+
"""
|
94 |
+
|
95 |
+
def forward(self, input: Tensor) -> Tensor:
|
96 |
+
return input * torch.sigmoid(1.702 * input)
|
97 |
+
|
98 |
+
|
99 |
+
class ClippedGELUActivation(nn.Module):
|
100 |
+
"""
|
101 |
+
Clip the range of possible GeLU outputs between [min, max]. This is especially useful for quantization purpose, as
|
102 |
+
it allows mapping negatives values in the GeLU spectrum. For more information on this trick, please refer to
|
103 |
+
https://arxiv.org/abs/2004.09602.
|
104 |
+
|
105 |
+
Gaussian Error Linear Unit. Original Implementation of the gelu activation function in Google Bert repo when
|
106 |
+
initially created.
|
107 |
+
|
108 |
+
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): 0.5 * x * (1 +
|
109 |
+
torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))). See https://arxiv.org/abs/1606.08415
|
110 |
+
"""
|
111 |
+
|
112 |
+
def __init__(self, min: float, max: float):
|
113 |
+
if min > max:
|
114 |
+
raise ValueError(f"min should be < max (got min: {min}, max: {max})")
|
115 |
+
|
116 |
+
super().__init__()
|
117 |
+
self.min = min
|
118 |
+
self.max = max
|
119 |
+
|
120 |
+
def forward(self, x: Tensor) -> Tensor:
|
121 |
+
return torch.clip(gelu(x), self.min, self.max)
|
122 |
+
|
123 |
+
|
124 |
+
class AccurateGELUActivation(nn.Module):
|
125 |
+
"""
|
126 |
+
Applies GELU approximation that is faster than default and more accurate than QuickGELU. See:
|
127 |
+
https://github.com/hendrycks/GELUs
|
128 |
+
|
129 |
+
Implemented along with MEGA (Moving Average Equipped Gated Attention)
|
130 |
+
"""
|
131 |
+
|
132 |
+
def __init__(self):
|
133 |
+
super().__init__()
|
134 |
+
self.precomputed_constant = math.sqrt(2 / math.pi)
|
135 |
+
|
136 |
+
def forward(self, input: Tensor) -> Tensor:
|
137 |
+
return 0.5 * input * (1 + torch.tanh(self.precomputed_constant * (input + 0.044715 * torch.pow(input, 3))))
|
138 |
+
|
139 |
+
|
140 |
+
class MishActivation(nn.Module):
|
141 |
+
"""
|
142 |
+
See Mish: A Self-Regularized Non-Monotonic Activation Function (Misra., https://arxiv.org/abs/1908.08681). Also
|
143 |
+
visit the official repository for the paper: https://github.com/digantamisra98/Mish
|
144 |
+
"""
|
145 |
+
|
146 |
+
def __init__(self):
|
147 |
+
super().__init__()
|
148 |
+
if version.parse(torch.__version__) < version.parse("1.9.0"):
|
149 |
+
self.act = self._mish_python
|
150 |
+
else:
|
151 |
+
self.act = nn.functional.mish
|
152 |
+
|
153 |
+
def _mish_python(self, input: Tensor) -> Tensor:
|
154 |
+
return input * torch.tanh(nn.functional.softplus(input))
|
155 |
+
|
156 |
+
def forward(self, input: Tensor) -> Tensor:
|
157 |
+
return self.act(input)
|
158 |
+
|
159 |
+
|
160 |
+
class LinearActivation(nn.Module):
|
161 |
+
"""
|
162 |
+
Applies the linear activation function, i.e. forwarding input directly to output.
|
163 |
+
"""
|
164 |
+
|
165 |
+
def forward(self, input: Tensor) -> Tensor:
|
166 |
+
return input
|
167 |
+
|
168 |
+
|
169 |
+
class LaplaceActivation(nn.Module):
|
170 |
+
"""
|
171 |
+
Applies elementwise activation based on Laplace function, introduced in MEGA as an attention activation. See
|
172 |
+
https://arxiv.org/abs/2209.10655
|
173 |
+
|
174 |
+
Inspired by squared relu, but with bounded range and gradient for better stability
|
175 |
+
"""
|
176 |
+
|
177 |
+
def forward(self, input, mu=0.707107, sigma=0.282095):
|
178 |
+
input = (input - mu).div(sigma * math.sqrt(2.0))
|
179 |
+
return 0.5 * (1.0 + torch.erf(input))
|
180 |
+
|
181 |
+
|
182 |
+
class ReLUSquaredActivation(nn.Module):
|
183 |
+
"""
|
184 |
+
Applies the relu^2 activation introduced in https://arxiv.org/abs/2109.08668v2
|
185 |
+
"""
|
186 |
+
|
187 |
+
def forward(self, input):
|
188 |
+
relu_applied = nn.functional.relu(input)
|
189 |
+
squared = torch.square(relu_applied)
|
190 |
+
return squared
|
191 |
+
|
192 |
+
|
193 |
+
class ClassInstantier(OrderedDict):
|
194 |
+
def __getitem__(self, key):
|
195 |
+
content = super().__getitem__(key)
|
196 |
+
cls, kwargs = content if isinstance(content, tuple) else (content, {})
|
197 |
+
return cls(**kwargs)
|
198 |
+
|
199 |
+
|
200 |
+
ACT2CLS = {
|
201 |
+
"gelu": GELUActivation,
|
202 |
+
"gelu_10": (ClippedGELUActivation, {"min": -10, "max": 10}),
|
203 |
+
"gelu_fast": FastGELUActivation,
|
204 |
+
"gelu_new": NewGELUActivation,
|
205 |
+
"gelu_python": (GELUActivation, {"use_gelu_python": True}),
|
206 |
+
"gelu_pytorch_tanh": PytorchGELUTanh,
|
207 |
+
"gelu_accurate": AccurateGELUActivation,
|
208 |
+
"laplace": LaplaceActivation,
|
209 |
+
"leaky_relu": nn.LeakyReLU,
|
210 |
+
"linear": LinearActivation,
|
211 |
+
"mish": MishActivation,
|
212 |
+
"quick_gelu": QuickGELUActivation,
|
213 |
+
"relu": nn.ReLU,
|
214 |
+
"relu2": ReLUSquaredActivation,
|
215 |
+
"relu6": nn.ReLU6,
|
216 |
+
"sigmoid": nn.Sigmoid,
|
217 |
+
"silu": nn.SiLU,
|
218 |
+
"swish": nn.SiLU,
|
219 |
+
"tanh": nn.Tanh,
|
220 |
+
}
|
221 |
+
ACT2FN = ClassInstantier(ACT2CLS)
|
222 |
+
|
223 |
+
|
224 |
+
def get_activation(activation_string):
|
225 |
+
if activation_string in ACT2FN:
|
226 |
+
return ACT2FN[activation_string]
|
227 |
+
else:
|
228 |
+
raise KeyError(f"function {activation_string} not found in ACT2FN mapping {list(ACT2FN.keys())}")
|
229 |
+
|
230 |
+
|
231 |
+
# For backwards compatibility with: from activations import gelu_python
|
232 |
+
gelu_python = get_activation("gelu_python")
|
233 |
+
gelu_new = get_activation("gelu_new")
|
234 |
+
gelu = get_activation("gelu")
|
235 |
+
gelu_fast = get_activation("gelu_fast")
|
236 |
+
quick_gelu = get_activation("quick_gelu")
|
237 |
+
silu = get_activation("silu")
|
238 |
+
mish = get_activation("mish")
|
239 |
+
linear_act = get_activation("linear")
|
llmeval-env/lib/python3.10/site-packages/transformers/activations_tf.py
ADDED
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2020 The HuggingFace Team. All rights reserved.
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
import math
|
16 |
+
|
17 |
+
import tensorflow as tf
|
18 |
+
from packaging.version import parse
|
19 |
+
|
20 |
+
|
21 |
+
try:
|
22 |
+
import tf_keras as keras
|
23 |
+
except (ModuleNotFoundError, ImportError):
|
24 |
+
import keras
|
25 |
+
|
26 |
+
if parse(keras.__version__).major > 2:
|
27 |
+
raise ValueError(
|
28 |
+
"Your currently installed version of Keras is Keras 3, but this is not yet supported in "
|
29 |
+
"Transformers. Please install the backwards-compatible tf-keras package with "
|
30 |
+
"`pip install tf-keras`."
|
31 |
+
)
|
32 |
+
|
33 |
+
|
34 |
+
def _gelu(x):
|
35 |
+
"""
|
36 |
+
Gaussian Error Linear Unit. Original Implementation of the gelu activation function in Google Bert repo when
|
37 |
+
initially created. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
|
38 |
+
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see
|
39 |
+
https://arxiv.org/abs/1606.08415
|
40 |
+
"""
|
41 |
+
x = tf.convert_to_tensor(x)
|
42 |
+
cdf = 0.5 * (1.0 + tf.math.erf(x / tf.cast(tf.sqrt(2.0), x.dtype)))
|
43 |
+
|
44 |
+
return x * cdf
|
45 |
+
|
46 |
+
|
47 |
+
def _gelu_new(x):
|
48 |
+
"""
|
49 |
+
Gaussian Error Linear Unit. This is a smoother version of the GELU. Original paper: https://arxiv.org/abs/1606.0841
|
50 |
+
|
51 |
+
Args:
|
52 |
+
x: float Tensor to perform activation
|
53 |
+
|
54 |
+
Returns:
|
55 |
+
`x` with the GELU activation applied.
|
56 |
+
"""
|
57 |
+
x = tf.convert_to_tensor(x)
|
58 |
+
pi = tf.cast(math.pi, x.dtype)
|
59 |
+
coeff = tf.cast(0.044715, x.dtype)
|
60 |
+
cdf = 0.5 * (1.0 + tf.tanh(tf.sqrt(2.0 / pi) * (x + coeff * tf.pow(x, 3))))
|
61 |
+
|
62 |
+
return x * cdf
|
63 |
+
|
64 |
+
|
65 |
+
def mish(x):
|
66 |
+
x = tf.convert_to_tensor(x)
|
67 |
+
|
68 |
+
return x * tf.tanh(tf.math.softplus(x))
|
69 |
+
|
70 |
+
|
71 |
+
def gelu_fast(x):
|
72 |
+
x = tf.convert_to_tensor(x)
|
73 |
+
coeff1 = tf.cast(0.044715, x.dtype)
|
74 |
+
coeff2 = tf.cast(0.7978845608, x.dtype)
|
75 |
+
|
76 |
+
return 0.5 * x * (1.0 + tf.tanh(x * coeff2 * (1.0 + coeff1 * x * x)))
|
77 |
+
|
78 |
+
|
79 |
+
def quick_gelu(x):
|
80 |
+
x = tf.convert_to_tensor(x)
|
81 |
+
coeff = tf.cast(1.702, x.dtype)
|
82 |
+
return x * tf.math.sigmoid(coeff * x)
|
83 |
+
|
84 |
+
|
85 |
+
def gelu_10(x):
|
86 |
+
"""
|
87 |
+
Clip the range of possible GeLU outputs between [-10, 10]. This is especially useful for quantization purpose, as
|
88 |
+
it allows mapping 2 negatives values in the GeLU spectrum. For more information on this trick, please refer to
|
89 |
+
https://arxiv.org/abs/2004.09602
|
90 |
+
|
91 |
+
Gaussian Error Linear Unit. Original Implementation of the gelu activation function in Google Bert repo when
|
92 |
+
initially created. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
|
93 |
+
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see
|
94 |
+
https://arxiv.org/abs/1606.08415 :param x: :return:
|
95 |
+
"""
|
96 |
+
return tf.clip_by_value(_gelu(x), -10, 10)
|
97 |
+
|
98 |
+
|
99 |
+
def glu(x, axis=-1):
|
100 |
+
"""
|
101 |
+
Gated Linear Unit. Implementation as defined in the original paper (see https://arxiv.org/abs/1612.08083), where
|
102 |
+
the input `x` is split in two halves across a dimension (`axis`), A and B, returning A * sigmoid(B).
|
103 |
+
|
104 |
+
Args:
|
105 |
+
`x`: float Tensor to perform activation
|
106 |
+
`axis`: dimension across which `x` be split in half
|
107 |
+
|
108 |
+
Returns:
|
109 |
+
`x` with the GLU activation applied (with its size halved across the dimension `axis`).
|
110 |
+
"""
|
111 |
+
a, b = tf.split(x, 2, axis=axis)
|
112 |
+
return a * tf.math.sigmoid(b)
|
113 |
+
|
114 |
+
|
115 |
+
if parse(tf.version.VERSION) >= parse("2.4"):
|
116 |
+
|
117 |
+
def approximate_gelu_wrap(x):
|
118 |
+
return keras.activations.gelu(x, approximate=True)
|
119 |
+
|
120 |
+
gelu = keras.activations.gelu
|
121 |
+
gelu_new = approximate_gelu_wrap
|
122 |
+
else:
|
123 |
+
gelu = _gelu
|
124 |
+
gelu_new = _gelu_new
|
125 |
+
|
126 |
+
|
127 |
+
ACT2FN = {
|
128 |
+
"gelu": gelu,
|
129 |
+
"gelu_10": gelu_10,
|
130 |
+
"gelu_fast": gelu_fast,
|
131 |
+
"gelu_new": gelu_new,
|
132 |
+
"glu": glu,
|
133 |
+
"mish": mish,
|
134 |
+
"quick_gelu": quick_gelu,
|
135 |
+
"relu": keras.activations.relu,
|
136 |
+
"sigmoid": keras.activations.sigmoid,
|
137 |
+
"silu": keras.activations.swish,
|
138 |
+
"swish": keras.activations.swish,
|
139 |
+
"tanh": keras.activations.tanh,
|
140 |
+
}
|
141 |
+
|
142 |
+
|
143 |
+
def get_tf_activation(activation_string):
|
144 |
+
if activation_string in ACT2FN:
|
145 |
+
return ACT2FN[activation_string]
|
146 |
+
else:
|
147 |
+
raise KeyError(f"function {activation_string} not found in ACT2FN mapping {list(ACT2FN.keys())}")
|
llmeval-env/lib/python3.10/site-packages/transformers/audio_utils.py
ADDED
@@ -0,0 +1,825 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding=utf-8
|
2 |
+
# Copyright 2023 The HuggingFace Inc. team and the librosa & torchaudio authors.
|
3 |
+
#
|
4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
+
# you may not use this file except in compliance with the License.
|
6 |
+
# You may obtain a copy of the License at
|
7 |
+
#
|
8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
#
|
10 |
+
# Unless required by applicable law or agreed to in writing, software
|
11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
+
# See the License for the specific language governing permissions and
|
14 |
+
# limitations under the License.
|
15 |
+
"""
|
16 |
+
Audio processing functions to extract features from audio waveforms. This code is pure numpy to support all frameworks
|
17 |
+
and remove unnecessary dependencies.
|
18 |
+
"""
|
19 |
+
import warnings
|
20 |
+
from typing import Optional, Tuple, Union
|
21 |
+
|
22 |
+
import numpy as np
|
23 |
+
|
24 |
+
|
25 |
+
def hertz_to_mel(freq: Union[float, np.ndarray], mel_scale: str = "htk") -> Union[float, np.ndarray]:
|
26 |
+
"""
|
27 |
+
Convert frequency from hertz to mels.
|
28 |
+
|
29 |
+
Args:
|
30 |
+
freq (`float` or `np.ndarray`):
|
31 |
+
The frequency, or multiple frequencies, in hertz (Hz).
|
32 |
+
mel_scale (`str`, *optional*, defaults to `"htk"`):
|
33 |
+
The mel frequency scale to use, `"htk"`, `"kaldi"` or `"slaney"`.
|
34 |
+
|
35 |
+
Returns:
|
36 |
+
`float` or `np.ndarray`: The frequencies on the mel scale.
|
37 |
+
"""
|
38 |
+
|
39 |
+
if mel_scale not in ["slaney", "htk", "kaldi"]:
|
40 |
+
raise ValueError('mel_scale should be one of "htk", "slaney" or "kaldi".')
|
41 |
+
|
42 |
+
if mel_scale == "htk":
|
43 |
+
return 2595.0 * np.log10(1.0 + (freq / 700.0))
|
44 |
+
elif mel_scale == "kaldi":
|
45 |
+
return 1127.0 * np.log(1.0 + (freq / 700.0))
|
46 |
+
|
47 |
+
min_log_hertz = 1000.0
|
48 |
+
min_log_mel = 15.0
|
49 |
+
logstep = 27.0 / np.log(6.4)
|
50 |
+
mels = 3.0 * freq / 200.0
|
51 |
+
|
52 |
+
if isinstance(freq, np.ndarray):
|
53 |
+
log_region = freq >= min_log_hertz
|
54 |
+
mels[log_region] = min_log_mel + np.log(freq[log_region] / min_log_hertz) * logstep
|
55 |
+
elif freq >= min_log_hertz:
|
56 |
+
mels = min_log_mel + np.log(freq / min_log_hertz) * logstep
|
57 |
+
|
58 |
+
return mels
|
59 |
+
|
60 |
+
|
61 |
+
def mel_to_hertz(mels: Union[float, np.ndarray], mel_scale: str = "htk") -> Union[float, np.ndarray]:
|
62 |
+
"""
|
63 |
+
Convert frequency from mels to hertz.
|
64 |
+
|
65 |
+
Args:
|
66 |
+
mels (`float` or `np.ndarray`):
|
67 |
+
The frequency, or multiple frequencies, in mels.
|
68 |
+
mel_scale (`str`, *optional*, `"htk"`):
|
69 |
+
The mel frequency scale to use, `"htk"`, `"kaldi"` or `"slaney"`.
|
70 |
+
|
71 |
+
Returns:
|
72 |
+
`float` or `np.ndarray`: The frequencies in hertz.
|
73 |
+
"""
|
74 |
+
|
75 |
+
if mel_scale not in ["slaney", "htk", "kaldi"]:
|
76 |
+
raise ValueError('mel_scale should be one of "htk", "slaney" or "kaldi".')
|
77 |
+
|
78 |
+
if mel_scale == "htk":
|
79 |
+
return 700.0 * (np.power(10, mels / 2595.0) - 1.0)
|
80 |
+
elif mel_scale == "kaldi":
|
81 |
+
return 700.0 * (np.exp(mels / 1127.0) - 1.0)
|
82 |
+
|
83 |
+
min_log_hertz = 1000.0
|
84 |
+
min_log_mel = 15.0
|
85 |
+
logstep = np.log(6.4) / 27.0
|
86 |
+
freq = 200.0 * mels / 3.0
|
87 |
+
|
88 |
+
if isinstance(mels, np.ndarray):
|
89 |
+
log_region = mels >= min_log_mel
|
90 |
+
freq[log_region] = min_log_hertz * np.exp(logstep * (mels[log_region] - min_log_mel))
|
91 |
+
elif mels >= min_log_mel:
|
92 |
+
freq = min_log_hertz * np.exp(logstep * (mels - min_log_mel))
|
93 |
+
|
94 |
+
return freq
|
95 |
+
|
96 |
+
|
97 |
+
def hertz_to_octave(
|
98 |
+
freq: Union[float, np.ndarray], tuning: Optional[float] = 0.0, bins_per_octave: Optional[int] = 12
|
99 |
+
):
|
100 |
+
"""
|
101 |
+
Convert frequency from hertz to fractional octave numbers.
|
102 |
+
Adapted from *librosa*.
|
103 |
+
|
104 |
+
Args:
|
105 |
+
freq (`float` or `np.ndarray`):
|
106 |
+
The frequency, or multiple frequencies, in hertz (Hz).
|
107 |
+
tuning (`float`, defaults to `0.`):
|
108 |
+
Tuning deviation from the Stuttgart pitch (A440) in (fractional) bins per octave.
|
109 |
+
bins_per_octave (`int`, defaults to `12`):
|
110 |
+
Number of bins per octave.
|
111 |
+
|
112 |
+
Returns:
|
113 |
+
`float` or `np.ndarray`: The frequencies on the octave scale.
|
114 |
+
"""
|
115 |
+
stuttgart_pitch = 440.0 * 2.0 ** (tuning / bins_per_octave)
|
116 |
+
octave = np.log2(freq / (float(stuttgart_pitch) / 16))
|
117 |
+
return octave
|
118 |
+
|
119 |
+
|
120 |
+
def _create_triangular_filter_bank(fft_freqs: np.ndarray, filter_freqs: np.ndarray) -> np.ndarray:
|
121 |
+
"""
|
122 |
+
Creates a triangular filter bank.
|
123 |
+
|
124 |
+
Adapted from *torchaudio* and *librosa*.
|
125 |
+
|
126 |
+
Args:
|
127 |
+
fft_freqs (`np.ndarray` of shape `(num_frequency_bins,)`):
|
128 |
+
Discrete frequencies of the FFT bins in Hz.
|
129 |
+
filter_freqs (`np.ndarray` of shape `(num_mel_filters,)`):
|
130 |
+
Center frequencies of the triangular filters to create, in Hz.
|
131 |
+
|
132 |
+
Returns:
|
133 |
+
`np.ndarray` of shape `(num_frequency_bins, num_mel_filters)`
|
134 |
+
"""
|
135 |
+
filter_diff = np.diff(filter_freqs)
|
136 |
+
slopes = np.expand_dims(filter_freqs, 0) - np.expand_dims(fft_freqs, 1)
|
137 |
+
down_slopes = -slopes[:, :-2] / filter_diff[:-1]
|
138 |
+
up_slopes = slopes[:, 2:] / filter_diff[1:]
|
139 |
+
return np.maximum(np.zeros(1), np.minimum(down_slopes, up_slopes))
|
140 |
+
|
141 |
+
|
142 |
+
def chroma_filter_bank(
|
143 |
+
num_frequency_bins: int,
|
144 |
+
num_chroma: int,
|
145 |
+
sampling_rate: int,
|
146 |
+
tuning: float = 0.0,
|
147 |
+
power: Optional[float] = 2.0,
|
148 |
+
weighting_parameters: Optional[Tuple[float]] = (5.0, 2),
|
149 |
+
start_at_c_chroma: Optional[bool] = True,
|
150 |
+
):
|
151 |
+
"""
|
152 |
+
Creates a chroma filter bank, i.e a linear transformation to project spectrogram bins onto chroma bins.
|
153 |
+
|
154 |
+
Adapted from *librosa*.
|
155 |
+
|
156 |
+
Args:
|
157 |
+
num_frequency_bins (`int`):
|
158 |
+
Number of frequencies used to compute the spectrogram (should be the same as in `stft`).
|
159 |
+
num_chroma (`int`):
|
160 |
+
Number of chroma bins (i.e pitch classes).
|
161 |
+
sampling_rate (`float`):
|
162 |
+
Sample rate of the audio waveform.
|
163 |
+
tuning (`float`):
|
164 |
+
Tuning deviation from A440 in fractions of a chroma bin.
|
165 |
+
power (`float`, *optional*, defaults to 2.0):
|
166 |
+
If 12.0, normalizes each column with their L2 norm. If 1.0, normalizes each column with their L1 norm.
|
167 |
+
weighting_parameters (`Tuple[float]`, *optional*, defaults to `(5., 2.)`):
|
168 |
+
If specified, apply a Gaussian weighting parameterized by the first element of the tuple being the center and
|
169 |
+
the second element being the Gaussian half-width.
|
170 |
+
start_at_c_chroma (`float`, *optional*, defaults to `True`):
|
171 |
+
If True, the filter bank will start at the 'C' pitch class. Otherwise, it will start at 'A'.
|
172 |
+
Returns:
|
173 |
+
`np.ndarray` of shape `(num_frequency_bins, num_chroma)`
|
174 |
+
"""
|
175 |
+
# Get the FFT bins, not counting the DC component
|
176 |
+
frequencies = np.linspace(0, sampling_rate, num_frequency_bins, endpoint=False)[1:]
|
177 |
+
|
178 |
+
freq_bins = num_chroma * hertz_to_octave(frequencies, tuning=tuning, bins_per_octave=num_chroma)
|
179 |
+
|
180 |
+
# make up a value for the 0 Hz bin = 1.5 octaves below bin 1
|
181 |
+
# (so chroma is 50% rotated from bin 1, and bin width is broad)
|
182 |
+
freq_bins = np.concatenate(([freq_bins[0] - 1.5 * num_chroma], freq_bins))
|
183 |
+
|
184 |
+
bins_width = np.concatenate((np.maximum(freq_bins[1:] - freq_bins[:-1], 1.0), [1]))
|
185 |
+
|
186 |
+
chroma_filters = np.subtract.outer(freq_bins, np.arange(0, num_chroma, dtype="d")).T
|
187 |
+
|
188 |
+
num_chroma2 = np.round(float(num_chroma) / 2)
|
189 |
+
|
190 |
+
# Project into range -num_chroma/2 .. num_chroma/2
|
191 |
+
# add on fixed offset of 10*num_chroma to ensure all values passed to
|
192 |
+
# rem are positive
|
193 |
+
chroma_filters = np.remainder(chroma_filters + num_chroma2 + 10 * num_chroma, num_chroma) - num_chroma2
|
194 |
+
|
195 |
+
# Gaussian bumps - 2*D to make them narrower
|
196 |
+
chroma_filters = np.exp(-0.5 * (2 * chroma_filters / np.tile(bins_width, (num_chroma, 1))) ** 2)
|
197 |
+
|
198 |
+
# normalize each column
|
199 |
+
if power is not None:
|
200 |
+
chroma_filters = chroma_filters / np.sum(chroma_filters**power, axis=0, keepdims=True) ** (1.0 / power)
|
201 |
+
|
202 |
+
# Maybe apply scaling for fft bins
|
203 |
+
if weighting_parameters is not None:
|
204 |
+
center, half_width = weighting_parameters
|
205 |
+
chroma_filters *= np.tile(
|
206 |
+
np.exp(-0.5 * (((freq_bins / num_chroma - center) / half_width) ** 2)),
|
207 |
+
(num_chroma, 1),
|
208 |
+
)
|
209 |
+
|
210 |
+
if start_at_c_chroma:
|
211 |
+
chroma_filters = np.roll(chroma_filters, -3 * (num_chroma // 12), axis=0)
|
212 |
+
|
213 |
+
# remove aliasing columns, copy to ensure row-contiguity
|
214 |
+
return np.ascontiguousarray(chroma_filters[:, : int(1 + num_frequency_bins / 2)])
|
215 |
+
|
216 |
+
|
217 |
+
def mel_filter_bank(
|
218 |
+
num_frequency_bins: int,
|
219 |
+
num_mel_filters: int,
|
220 |
+
min_frequency: float,
|
221 |
+
max_frequency: float,
|
222 |
+
sampling_rate: int,
|
223 |
+
norm: Optional[str] = None,
|
224 |
+
mel_scale: str = "htk",
|
225 |
+
triangularize_in_mel_space: bool = False,
|
226 |
+
) -> np.ndarray:
|
227 |
+
"""
|
228 |
+
Creates a frequency bin conversion matrix used to obtain a mel spectrogram. This is called a *mel filter bank*, and
|
229 |
+
various implementation exist, which differ in the number of filters, the shape of the filters, the way the filters
|
230 |
+
are spaced, the bandwidth of the filters, and the manner in which the spectrum is warped. The goal of these
|
231 |
+
features is to approximate the non-linear human perception of the variation in pitch with respect to the frequency.
|
232 |
+
|
233 |
+
Different banks of mel filters were introduced in the literature. The following variations are supported:
|
234 |
+
|
235 |
+
- MFCC FB-20: introduced in 1980 by Davis and Mermelstein, it assumes a sampling frequency of 10 kHz and a speech
|
236 |
+
bandwidth of `[0, 4600]` Hz.
|
237 |
+
- MFCC FB-24 HTK: from the Cambridge HMM Toolkit (HTK) (1995) uses a filter bank of 24 filters for a speech
|
238 |
+
bandwidth of `[0, 8000]` Hz. This assumes sampling rate ≥ 16 kHz.
|
239 |
+
- MFCC FB-40: from the Auditory Toolbox for MATLAB written by Slaney in 1998, assumes a sampling rate of 16 kHz and
|
240 |
+
speech bandwidth of `[133, 6854]` Hz. This version also includes area normalization.
|
241 |
+
- HFCC-E FB-29 (Human Factor Cepstral Coefficients) of Skowronski and Harris (2004), assumes a sampling rate of
|
242 |
+
12.5 kHz and speech bandwidth of `[0, 6250]` Hz.
|
243 |
+
|
244 |
+
This code is adapted from *torchaudio* and *librosa*. Note that the default parameters of torchaudio's
|
245 |
+
`melscale_fbanks` implement the `"htk"` filters while librosa uses the `"slaney"` implementation.
|
246 |
+
|
247 |
+
Args:
|
248 |
+
num_frequency_bins (`int`):
|
249 |
+
Number of frequencies used to compute the spectrogram (should be the same as in `stft`).
|
250 |
+
num_mel_filters (`int`):
|
251 |
+
Number of mel filters to generate.
|
252 |
+
min_frequency (`float`):
|
253 |
+
Lowest frequency of interest in Hz.
|
254 |
+
max_frequency (`float`):
|
255 |
+
Highest frequency of interest in Hz. This should not exceed `sampling_rate / 2`.
|
256 |
+
sampling_rate (`int`):
|
257 |
+
Sample rate of the audio waveform.
|
258 |
+
norm (`str`, *optional*):
|
259 |
+
If `"slaney"`, divide the triangular mel weights by the width of the mel band (area normalization).
|
260 |
+
mel_scale (`str`, *optional*, defaults to `"htk"`):
|
261 |
+
The mel frequency scale to use, `"htk"`, `"kaldi"` or `"slaney"`.
|
262 |
+
triangularize_in_mel_space (`bool`, *optional*, defaults to `False`):
|
263 |
+
If this option is enabled, the triangular filter is applied in mel space rather than frequency space. This
|
264 |
+
should be set to `true` in order to get the same results as `torchaudio` when computing mel filters.
|
265 |
+
|
266 |
+
Returns:
|
267 |
+
`np.ndarray` of shape (`num_frequency_bins`, `num_mel_filters`): Triangular filter bank matrix. This is a
|
268 |
+
projection matrix to go from a spectrogram to a mel spectrogram.
|
269 |
+
"""
|
270 |
+
if norm is not None and norm != "slaney":
|
271 |
+
raise ValueError('norm must be one of None or "slaney"')
|
272 |
+
|
273 |
+
# center points of the triangular mel filters
|
274 |
+
mel_min = hertz_to_mel(min_frequency, mel_scale=mel_scale)
|
275 |
+
mel_max = hertz_to_mel(max_frequency, mel_scale=mel_scale)
|
276 |
+
mel_freqs = np.linspace(mel_min, mel_max, num_mel_filters + 2)
|
277 |
+
filter_freqs = mel_to_hertz(mel_freqs, mel_scale=mel_scale)
|
278 |
+
|
279 |
+
if triangularize_in_mel_space:
|
280 |
+
# frequencies of FFT bins in Hz, but filters triangularized in mel space
|
281 |
+
fft_bin_width = sampling_rate / (num_frequency_bins * 2)
|
282 |
+
fft_freqs = hertz_to_mel(fft_bin_width * np.arange(num_frequency_bins), mel_scale=mel_scale)
|
283 |
+
filter_freqs = mel_freqs
|
284 |
+
else:
|
285 |
+
# frequencies of FFT bins in Hz
|
286 |
+
fft_freqs = np.linspace(0, sampling_rate // 2, num_frequency_bins)
|
287 |
+
|
288 |
+
mel_filters = _create_triangular_filter_bank(fft_freqs, filter_freqs)
|
289 |
+
|
290 |
+
if norm is not None and norm == "slaney":
|
291 |
+
# Slaney-style mel is scaled to be approx constant energy per channel
|
292 |
+
enorm = 2.0 / (filter_freqs[2 : num_mel_filters + 2] - filter_freqs[:num_mel_filters])
|
293 |
+
mel_filters *= np.expand_dims(enorm, 0)
|
294 |
+
|
295 |
+
if (mel_filters.max(axis=0) == 0.0).any():
|
296 |
+
warnings.warn(
|
297 |
+
"At least one mel filter has all zero values. "
|
298 |
+
f"The value for `num_mel_filters` ({num_mel_filters}) may be set too high. "
|
299 |
+
f"Or, the value for `num_frequency_bins` ({num_frequency_bins}) may be set too low."
|
300 |
+
)
|
301 |
+
|
302 |
+
return mel_filters
|
303 |
+
|
304 |
+
|
305 |
+
def optimal_fft_length(window_length: int) -> int:
|
306 |
+
"""
|
307 |
+
Finds the best FFT input size for a given `window_length`. This function takes a given window length and, if not
|
308 |
+
already a power of two, rounds it up to the next power or two.
|
309 |
+
|
310 |
+
The FFT algorithm works fastest when the length of the input is a power of two, which may be larger than the size
|
311 |
+
of the window or analysis frame. For example, if the window is 400 samples, using an FFT input size of 512 samples
|
312 |
+
is more optimal than an FFT size of 400 samples. Using a larger FFT size does not affect the detected frequencies,
|
313 |
+
it simply gives a higher frequency resolution (i.e. the frequency bins are smaller).
|
314 |
+
"""
|
315 |
+
return 2 ** int(np.ceil(np.log2(window_length)))
|
316 |
+
|
317 |
+
|
318 |
+
def window_function(
|
319 |
+
window_length: int,
|
320 |
+
name: str = "hann",
|
321 |
+
periodic: bool = True,
|
322 |
+
frame_length: Optional[int] = None,
|
323 |
+
center: bool = True,
|
324 |
+
) -> np.ndarray:
|
325 |
+
"""
|
326 |
+
Returns an array containing the specified window. This window is intended to be used with `stft`.
|
327 |
+
|
328 |
+
The following window types are supported:
|
329 |
+
|
330 |
+
- `"boxcar"`: a rectangular window
|
331 |
+
- `"hamming"`: the Hamming window
|
332 |
+
- `"hann"`: the Hann window
|
333 |
+
- `"povey"`: the Povey window
|
334 |
+
|
335 |
+
Args:
|
336 |
+
window_length (`int`):
|
337 |
+
The length of the window in samples.
|
338 |
+
name (`str`, *optional*, defaults to `"hann"`):
|
339 |
+
The name of the window function.
|
340 |
+
periodic (`bool`, *optional*, defaults to `True`):
|
341 |
+
Whether the window is periodic or symmetric.
|
342 |
+
frame_length (`int`, *optional*):
|
343 |
+
The length of the analysis frames in samples. Provide a value for `frame_length` if the window is smaller
|
344 |
+
than the frame length, so that it will be zero-padded.
|
345 |
+
center (`bool`, *optional*, defaults to `True`):
|
346 |
+
Whether to center the window inside the FFT buffer. Only used when `frame_length` is provided.
|
347 |
+
|
348 |
+
Returns:
|
349 |
+
`np.ndarray` of shape `(window_length,)` or `(frame_length,)` containing the window.
|
350 |
+
"""
|
351 |
+
length = window_length + 1 if periodic else window_length
|
352 |
+
|
353 |
+
if name == "boxcar":
|
354 |
+
window = np.ones(length)
|
355 |
+
elif name in ["hamming", "hamming_window"]:
|
356 |
+
window = np.hamming(length)
|
357 |
+
elif name in ["hann", "hann_window"]:
|
358 |
+
window = np.hanning(length)
|
359 |
+
elif name in ["povey"]:
|
360 |
+
window = np.power(np.hanning(length), 0.85)
|
361 |
+
else:
|
362 |
+
raise ValueError(f"Unknown window function '{name}'")
|
363 |
+
|
364 |
+
if periodic:
|
365 |
+
window = window[:-1]
|
366 |
+
|
367 |
+
if frame_length is None:
|
368 |
+
return window
|
369 |
+
|
370 |
+
if window_length > frame_length:
|
371 |
+
raise ValueError(
|
372 |
+
f"Length of the window ({window_length}) may not be larger than frame_length ({frame_length})"
|
373 |
+
)
|
374 |
+
|
375 |
+
padded_window = np.zeros(frame_length)
|
376 |
+
offset = (frame_length - window_length) // 2 if center else 0
|
377 |
+
padded_window[offset : offset + window_length] = window
|
378 |
+
return padded_window
|
379 |
+
|
380 |
+
|
381 |
+
# TODO This method does not support batching yet as we are mainly focused on inference.
|
382 |
+
def spectrogram(
|
383 |
+
waveform: np.ndarray,
|
384 |
+
window: np.ndarray,
|
385 |
+
frame_length: int,
|
386 |
+
hop_length: int,
|
387 |
+
fft_length: Optional[int] = None,
|
388 |
+
power: Optional[float] = 1.0,
|
389 |
+
center: bool = True,
|
390 |
+
pad_mode: str = "reflect",
|
391 |
+
onesided: bool = True,
|
392 |
+
preemphasis: Optional[float] = None,
|
393 |
+
mel_filters: Optional[np.ndarray] = None,
|
394 |
+
mel_floor: float = 1e-10,
|
395 |
+
log_mel: Optional[str] = None,
|
396 |
+
reference: float = 1.0,
|
397 |
+
min_value: float = 1e-10,
|
398 |
+
db_range: Optional[float] = None,
|
399 |
+
remove_dc_offset: Optional[bool] = None,
|
400 |
+
dtype: np.dtype = np.float32,
|
401 |
+
) -> np.ndarray:
|
402 |
+
"""
|
403 |
+
Calculates a spectrogram over one waveform using the Short-Time Fourier Transform.
|
404 |
+
|
405 |
+
This function can create the following kinds of spectrograms:
|
406 |
+
|
407 |
+
- amplitude spectrogram (`power = 1.0`)
|
408 |
+
- power spectrogram (`power = 2.0`)
|
409 |
+
- complex-valued spectrogram (`power = None`)
|
410 |
+
- log spectrogram (use `log_mel` argument)
|
411 |
+
- mel spectrogram (provide `mel_filters`)
|
412 |
+
- log-mel spectrogram (provide `mel_filters` and `log_mel`)
|
413 |
+
|
414 |
+
How this works:
|
415 |
+
|
416 |
+
1. The input waveform is split into frames of size `frame_length` that are partially overlapping by `frame_length
|
417 |
+
- hop_length` samples.
|
418 |
+
2. Each frame is multiplied by the window and placed into a buffer of size `fft_length`.
|
419 |
+
3. The DFT is taken of each windowed frame.
|
420 |
+
4. The results are stacked into a spectrogram.
|
421 |
+
|
422 |
+
We make a distinction between the following "blocks" of sample data, each of which may have a different lengths:
|
423 |
+
|
424 |
+
- The analysis frame. This is the size of the time slices that the input waveform is split into.
|
425 |
+
- The window. Each analysis frame is multiplied by the window to avoid spectral leakage.
|
426 |
+
- The FFT input buffer. The length of this determines how many frequency bins are in the spectrogram.
|
427 |
+
|
428 |
+
In this implementation, the window is assumed to be zero-padded to have the same size as the analysis frame. A
|
429 |
+
padded window can be obtained from `window_function()`. The FFT input buffer may be larger than the analysis frame,
|
430 |
+
typically the next power of two.
|
431 |
+
|
432 |
+
Note: This function is not optimized for speed yet. It should be mostly compatible with `librosa.stft` and
|
433 |
+
`torchaudio.functional.transforms.Spectrogram`, although it is more flexible due to the different ways spectrograms
|
434 |
+
can be constructed.
|
435 |
+
|
436 |
+
Args:
|
437 |
+
waveform (`np.ndarray` of shape `(length,)`):
|
438 |
+
The input waveform. This must be a single real-valued, mono waveform.
|
439 |
+
window (`np.ndarray` of shape `(frame_length,)`):
|
440 |
+
The windowing function to apply, including zero-padding if necessary. The actual window length may be
|
441 |
+
shorter than `frame_length`, but we're assuming the array has already been zero-padded.
|
442 |
+
frame_length (`int`):
|
443 |
+
The length of the analysis frames in samples. With librosa this is always equal to `fft_length` but we also
|
444 |
+
allow smaller sizes.
|
445 |
+
hop_length (`int`):
|
446 |
+
The stride between successive analysis frames in samples.
|
447 |
+
fft_length (`int`, *optional*):
|
448 |
+
The size of the FFT buffer in samples. This determines how many frequency bins the spectrogram will have.
|
449 |
+
For optimal speed, this should be a power of two. If `None`, uses `frame_length`.
|
450 |
+
power (`float`, *optional*, defaults to 1.0):
|
451 |
+
If 1.0, returns the amplitude spectrogram. If 2.0, returns the power spectrogram. If `None`, returns
|
452 |
+
complex numbers.
|
453 |
+
center (`bool`, *optional*, defaults to `True`):
|
454 |
+
Whether to pad the waveform so that frame `t` is centered around time `t * hop_length`. If `False`, frame
|
455 |
+
`t` will start at time `t * hop_length`.
|
456 |
+
pad_mode (`str`, *optional*, defaults to `"reflect"`):
|
457 |
+
Padding mode used when `center` is `True`. Possible values are: `"constant"` (pad with zeros), `"edge"`
|
458 |
+
(pad with edge values), `"reflect"` (pads with mirrored values).
|
459 |
+
onesided (`bool`, *optional*, defaults to `True`):
|
460 |
+
If True, only computes the positive frequencies and returns a spectrogram containing `fft_length // 2 + 1`
|
461 |
+
frequency bins. If False, also computes the negative frequencies and returns `fft_length` frequency bins.
|
462 |
+
preemphasis (`float`, *optional*)
|
463 |
+
Coefficient for a low-pass filter that applies pre-emphasis before the DFT.
|
464 |
+
mel_filters (`np.ndarray` of shape `(num_freq_bins, num_mel_filters)`, *optional*):
|
465 |
+
The mel filter bank. If supplied, applies a this filter bank to create a mel spectrogram.
|
466 |
+
mel_floor (`float`, *optional*, defaults to 1e-10):
|
467 |
+
Minimum value of mel frequency banks.
|
468 |
+
log_mel (`str`, *optional*):
|
469 |
+
How to convert the spectrogram to log scale. Possible options are: `None` (don't convert), `"log"` (take
|
470 |
+
the natural logarithm) `"log10"` (take the base-10 logarithm), `"dB"` (convert to decibels). Can only be
|
471 |
+
used when `power` is not `None`.
|
472 |
+
reference (`float`, *optional*, defaults to 1.0):
|
473 |
+
Sets the input spectrogram value that corresponds to 0 dB. For example, use `np.max(spectrogram)` to set
|
474 |
+
the loudest part to 0 dB. Must be greater than zero.
|
475 |
+
min_value (`float`, *optional*, defaults to `1e-10`):
|
476 |
+
The spectrogram will be clipped to this minimum value before conversion to decibels, to avoid taking
|
477 |
+
`log(0)`. For a power spectrogram, the default of `1e-10` corresponds to a minimum of -100 dB. For an
|
478 |
+
amplitude spectrogram, the value `1e-5` corresponds to -100 dB. Must be greater than zero.
|
479 |
+
db_range (`float`, *optional*):
|
480 |
+
Sets the maximum dynamic range in decibels. For example, if `db_range = 80`, the difference between the
|
481 |
+
peak value and the smallest value will never be more than 80 dB. Must be greater than zero.
|
482 |
+
remove_dc_offset (`bool`, *optional*):
|
483 |
+
Subtract mean from waveform on each frame, applied before pre-emphasis. This should be set to `true` in
|
484 |
+
order to get the same results as `torchaudio.compliance.kaldi.fbank` when computing mel filters.
|
485 |
+
dtype (`np.dtype`, *optional*, defaults to `np.float32`):
|
486 |
+
Data type of the spectrogram tensor. If `power` is None, this argument is ignored and the dtype will be
|
487 |
+
`np.complex64`.
|
488 |
+
|
489 |
+
Returns:
|
490 |
+
`nd.array` containing a spectrogram of shape `(num_frequency_bins, length)` for a regular spectrogram or shape
|
491 |
+
`(num_mel_filters, length)` for a mel spectrogram.
|
492 |
+
"""
|
493 |
+
window_length = len(window)
|
494 |
+
|
495 |
+
if fft_length is None:
|
496 |
+
fft_length = frame_length
|
497 |
+
|
498 |
+
if frame_length > fft_length:
|
499 |
+
raise ValueError(f"frame_length ({frame_length}) may not be larger than fft_length ({fft_length})")
|
500 |
+
|
501 |
+
if window_length != frame_length:
|
502 |
+
raise ValueError(f"Length of the window ({window_length}) must equal frame_length ({frame_length})")
|
503 |
+
|
504 |
+
if hop_length <= 0:
|
505 |
+
raise ValueError("hop_length must be greater than zero")
|
506 |
+
|
507 |
+
if waveform.ndim != 1:
|
508 |
+
raise ValueError(f"Input waveform must have only one dimension, shape is {waveform.shape}")
|
509 |
+
|
510 |
+
if np.iscomplexobj(waveform):
|
511 |
+
raise ValueError("Complex-valued input waveforms are not currently supported")
|
512 |
+
|
513 |
+
if power is None and mel_filters is not None:
|
514 |
+
raise ValueError(
|
515 |
+
"You have provided `mel_filters` but `power` is `None`. Mel spectrogram computation is not yet supported for complex-valued spectrogram."
|
516 |
+
"Specify `power` to fix this issue."
|
517 |
+
)
|
518 |
+
|
519 |
+
# center pad the waveform
|
520 |
+
if center:
|
521 |
+
padding = [(int(frame_length // 2), int(frame_length // 2))]
|
522 |
+
waveform = np.pad(waveform, padding, mode=pad_mode)
|
523 |
+
|
524 |
+
# promote to float64, since np.fft uses float64 internally
|
525 |
+
waveform = waveform.astype(np.float64)
|
526 |
+
window = window.astype(np.float64)
|
527 |
+
|
528 |
+
# split waveform into frames of frame_length size
|
529 |
+
num_frames = int(1 + np.floor((waveform.size - frame_length) / hop_length))
|
530 |
+
|
531 |
+
num_frequency_bins = (fft_length // 2) + 1 if onesided else fft_length
|
532 |
+
spectrogram = np.empty((num_frames, num_frequency_bins), dtype=np.complex64)
|
533 |
+
|
534 |
+
# rfft is faster than fft
|
535 |
+
fft_func = np.fft.rfft if onesided else np.fft.fft
|
536 |
+
buffer = np.zeros(fft_length)
|
537 |
+
|
538 |
+
timestep = 0
|
539 |
+
for frame_idx in range(num_frames):
|
540 |
+
buffer[:frame_length] = waveform[timestep : timestep + frame_length]
|
541 |
+
|
542 |
+
if remove_dc_offset:
|
543 |
+
buffer[:frame_length] = buffer[:frame_length] - buffer[:frame_length].mean()
|
544 |
+
|
545 |
+
if preemphasis is not None:
|
546 |
+
buffer[1:frame_length] -= preemphasis * buffer[: frame_length - 1]
|
547 |
+
buffer[0] *= 1 - preemphasis
|
548 |
+
|
549 |
+
buffer[:frame_length] *= window
|
550 |
+
|
551 |
+
spectrogram[frame_idx] = fft_func(buffer)
|
552 |
+
timestep += hop_length
|
553 |
+
|
554 |
+
# note: ** is much faster than np.power
|
555 |
+
if power is not None:
|
556 |
+
spectrogram = np.abs(spectrogram, dtype=np.float64) ** power
|
557 |
+
|
558 |
+
spectrogram = spectrogram.T
|
559 |
+
|
560 |
+
if mel_filters is not None:
|
561 |
+
spectrogram = np.maximum(mel_floor, np.dot(mel_filters.T, spectrogram))
|
562 |
+
|
563 |
+
if power is not None and log_mel is not None:
|
564 |
+
if log_mel == "log":
|
565 |
+
spectrogram = np.log(spectrogram)
|
566 |
+
elif log_mel == "log10":
|
567 |
+
spectrogram = np.log10(spectrogram)
|
568 |
+
elif log_mel == "dB":
|
569 |
+
if power == 1.0:
|
570 |
+
spectrogram = amplitude_to_db(spectrogram, reference, min_value, db_range)
|
571 |
+
elif power == 2.0:
|
572 |
+
spectrogram = power_to_db(spectrogram, reference, min_value, db_range)
|
573 |
+
else:
|
574 |
+
raise ValueError(f"Cannot use log_mel option '{log_mel}' with power {power}")
|
575 |
+
else:
|
576 |
+
raise ValueError(f"Unknown log_mel option: {log_mel}")
|
577 |
+
|
578 |
+
spectrogram = np.asarray(spectrogram, dtype)
|
579 |
+
|
580 |
+
return spectrogram
|
581 |
+
|
582 |
+
|
583 |
+
def power_to_db(
|
584 |
+
spectrogram: np.ndarray,
|
585 |
+
reference: float = 1.0,
|
586 |
+
min_value: float = 1e-10,
|
587 |
+
db_range: Optional[float] = None,
|
588 |
+
) -> np.ndarray:
|
589 |
+
"""
|
590 |
+
Converts a power spectrogram to the decibel scale. This computes `10 * log10(spectrogram / reference)`, using basic
|
591 |
+
logarithm properties for numerical stability.
|
592 |
+
|
593 |
+
The motivation behind applying the log function on the (mel) spectrogram is that humans do not hear loudness on a
|
594 |
+
linear scale. Generally to double the perceived volume of a sound we need to put 8 times as much energy into it.
|
595 |
+
This means that large variations in energy may not sound all that different if the sound is loud to begin with.
|
596 |
+
This compression operation makes the (mel) spectrogram features match more closely what humans actually hear.
|
597 |
+
|
598 |
+
Based on the implementation of `librosa.power_to_db`.
|
599 |
+
|
600 |
+
Args:
|
601 |
+
spectrogram (`np.ndarray`):
|
602 |
+
The input power (mel) spectrogram. Note that a power spectrogram has the amplitudes squared!
|
603 |
+
reference (`float`, *optional*, defaults to 1.0):
|
604 |
+
Sets the input spectrogram value that corresponds to 0 dB. For example, use `np.max(spectrogram)` to set
|
605 |
+
the loudest part to 0 dB. Must be greater than zero.
|
606 |
+
min_value (`float`, *optional*, defaults to `1e-10`):
|
607 |
+
The spectrogram will be clipped to this minimum value before conversion to decibels, to avoid taking
|
608 |
+
`log(0)`. The default of `1e-10` corresponds to a minimum of -100 dB. Must be greater than zero.
|
609 |
+
db_range (`float`, *optional*):
|
610 |
+
Sets the maximum dynamic range in decibels. For example, if `db_range = 80`, the difference between the
|
611 |
+
peak value and the smallest value will never be more than 80 dB. Must be greater than zero.
|
612 |
+
|
613 |
+
Returns:
|
614 |
+
`np.ndarray`: the spectrogram in decibels
|
615 |
+
"""
|
616 |
+
if reference <= 0.0:
|
617 |
+
raise ValueError("reference must be greater than zero")
|
618 |
+
if min_value <= 0.0:
|
619 |
+
raise ValueError("min_value must be greater than zero")
|
620 |
+
|
621 |
+
reference = max(min_value, reference)
|
622 |
+
|
623 |
+
spectrogram = np.clip(spectrogram, a_min=min_value, a_max=None)
|
624 |
+
spectrogram = 10.0 * (np.log10(spectrogram) - np.log10(reference))
|
625 |
+
|
626 |
+
if db_range is not None:
|
627 |
+
if db_range <= 0.0:
|
628 |
+
raise ValueError("db_range must be greater than zero")
|
629 |
+
spectrogram = np.clip(spectrogram, a_min=spectrogram.max() - db_range, a_max=None)
|
630 |
+
|
631 |
+
return spectrogram
|
632 |
+
|
633 |
+
|
634 |
+
def amplitude_to_db(
|
635 |
+
spectrogram: np.ndarray,
|
636 |
+
reference: float = 1.0,
|
637 |
+
min_value: float = 1e-5,
|
638 |
+
db_range: Optional[float] = None,
|
639 |
+
) -> np.ndarray:
|
640 |
+
"""
|
641 |
+
Converts an amplitude spectrogram to the decibel scale. This computes `20 * log10(spectrogram / reference)`, using
|
642 |
+
basic logarithm properties for numerical stability.
|
643 |
+
|
644 |
+
The motivation behind applying the log function on the (mel) spectrogram is that humans do not hear loudness on a
|
645 |
+
linear scale. Generally to double the perceived volume of a sound we need to put 8 times as much energy into it.
|
646 |
+
This means that large variations in energy may not sound all that different if the sound is loud to begin with.
|
647 |
+
This compression operation makes the (mel) spectrogram features match more closely what humans actually hear.
|
648 |
+
|
649 |
+
Args:
|
650 |
+
spectrogram (`np.ndarray`):
|
651 |
+
The input amplitude (mel) spectrogram.
|
652 |
+
reference (`float`, *optional*, defaults to 1.0):
|
653 |
+
Sets the input spectrogram value that corresponds to 0 dB. For example, use `np.max(spectrogram)` to set
|
654 |
+
the loudest part to 0 dB. Must be greater than zero.
|
655 |
+
min_value (`float`, *optional*, defaults to `1e-5`):
|
656 |
+
The spectrogram will be clipped to this minimum value before conversion to decibels, to avoid taking
|
657 |
+
`log(0)`. The default of `1e-5` corresponds to a minimum of -100 dB. Must be greater than zero.
|
658 |
+
db_range (`float`, *optional*):
|
659 |
+
Sets the maximum dynamic range in decibels. For example, if `db_range = 80`, the difference between the
|
660 |
+
peak value and the smallest value will never be more than 80 dB. Must be greater than zero.
|
661 |
+
|
662 |
+
Returns:
|
663 |
+
`np.ndarray`: the spectrogram in decibels
|
664 |
+
"""
|
665 |
+
if reference <= 0.0:
|
666 |
+
raise ValueError("reference must be greater than zero")
|
667 |
+
if min_value <= 0.0:
|
668 |
+
raise ValueError("min_value must be greater than zero")
|
669 |
+
|
670 |
+
reference = max(min_value, reference)
|
671 |
+
|
672 |
+
spectrogram = np.clip(spectrogram, a_min=min_value, a_max=None)
|
673 |
+
spectrogram = 20.0 * (np.log10(spectrogram) - np.log10(reference))
|
674 |
+
|
675 |
+
if db_range is not None:
|
676 |
+
if db_range <= 0.0:
|
677 |
+
raise ValueError("db_range must be greater than zero")
|
678 |
+
spectrogram = np.clip(spectrogram, a_min=spectrogram.max() - db_range, a_max=None)
|
679 |
+
|
680 |
+
return spectrogram
|
681 |
+
|
682 |
+
|
683 |
+
### deprecated functions below this line ###
|
684 |
+
|
685 |
+
|
686 |
+
def get_mel_filter_banks(
|
687 |
+
nb_frequency_bins: int,
|
688 |
+
nb_mel_filters: int,
|
689 |
+
frequency_min: float,
|
690 |
+
frequency_max: float,
|
691 |
+
sample_rate: int,
|
692 |
+
norm: Optional[str] = None,
|
693 |
+
mel_scale: str = "htk",
|
694 |
+
) -> np.array:
|
695 |
+
warnings.warn(
|
696 |
+
"The function `get_mel_filter_banks` is deprecated and will be removed in version 4.31.0 of Transformers",
|
697 |
+
FutureWarning,
|
698 |
+
)
|
699 |
+
return mel_filter_bank(
|
700 |
+
num_frequency_bins=nb_frequency_bins,
|
701 |
+
num_mel_filters=nb_mel_filters,
|
702 |
+
min_frequency=frequency_min,
|
703 |
+
max_frequency=frequency_max,
|
704 |
+
sampling_rate=sample_rate,
|
705 |
+
norm=norm,
|
706 |
+
mel_scale=mel_scale,
|
707 |
+
)
|
708 |
+
|
709 |
+
|
710 |
+
def fram_wave(waveform: np.array, hop_length: int = 160, fft_window_size: int = 400, center: bool = True):
|
711 |
+
"""
|
712 |
+
In order to compute the short time fourier transform, the waveform needs to be split in overlapping windowed
|
713 |
+
segments called `frames`.
|
714 |
+
|
715 |
+
The window length (window_length) defines how much of the signal is contained in each frame, while the hop length
|
716 |
+
defines the step between the beginning of each new frame.
|
717 |
+
|
718 |
+
|
719 |
+
Args:
|
720 |
+
waveform (`np.array` of shape `(sample_length,)`):
|
721 |
+
The raw waveform which will be split into smaller chunks.
|
722 |
+
hop_length (`int`, *optional*, defaults to 160):
|
723 |
+
Step between each window of the waveform.
|
724 |
+
fft_window_size (`int`, *optional*, defaults to 400):
|
725 |
+
Defines the size of the window.
|
726 |
+
center (`bool`, defaults to `True`):
|
727 |
+
Whether or not to center each frame around the middle of the frame. Centering is done by reflecting the
|
728 |
+
waveform on the left and on the right.
|
729 |
+
|
730 |
+
Return:
|
731 |
+
framed_waveform (`np.array` of shape `(waveform.shape // hop_length , fft_window_size)`):
|
732 |
+
The framed waveforms that can be fed to `np.fft`.
|
733 |
+
"""
|
734 |
+
warnings.warn(
|
735 |
+
"The function `fram_wave` is deprecated and will be removed in version 4.31.0 of Transformers",
|
736 |
+
FutureWarning,
|
737 |
+
)
|
738 |
+
frames = []
|
739 |
+
for i in range(0, waveform.shape[0] + 1, hop_length):
|
740 |
+
if center:
|
741 |
+
half_window = (fft_window_size - 1) // 2 + 1
|
742 |
+
start = i - half_window if i > half_window else 0
|
743 |
+
end = i + half_window if i < waveform.shape[0] - half_window else waveform.shape[0]
|
744 |
+
frame = waveform[start:end]
|
745 |
+
if start == 0:
|
746 |
+
padd_width = (-i + half_window, 0)
|
747 |
+
frame = np.pad(frame, pad_width=padd_width, mode="reflect")
|
748 |
+
|
749 |
+
elif end == waveform.shape[0]:
|
750 |
+
padd_width = (0, (i - waveform.shape[0] + half_window))
|
751 |
+
frame = np.pad(frame, pad_width=padd_width, mode="reflect")
|
752 |
+
|
753 |
+
else:
|
754 |
+
frame = waveform[i : i + fft_window_size]
|
755 |
+
frame_width = frame.shape[0]
|
756 |
+
if frame_width < waveform.shape[0]:
|
757 |
+
frame = np.lib.pad(
|
758 |
+
frame, pad_width=(0, fft_window_size - frame_width), mode="constant", constant_values=0
|
759 |
+
)
|
760 |
+
frames.append(frame)
|
761 |
+
|
762 |
+
frames = np.stack(frames, 0)
|
763 |
+
return frames
|
764 |
+
|
765 |
+
|
766 |
+
def stft(frames: np.array, windowing_function: np.array, fft_window_size: int = None):
|
767 |
+
"""
|
768 |
+
Calculates the complex Short-Time Fourier Transform (STFT) of the given framed signal. Should give the same results
|
769 |
+
as `torch.stft`.
|
770 |
+
|
771 |
+
Args:
|
772 |
+
frames (`np.array` of dimension `(num_frames, fft_window_size)`):
|
773 |
+
A framed audio signal obtained using `audio_utils.fram_wav`.
|
774 |
+
windowing_function (`np.array` of dimension `(nb_frequency_bins, nb_mel_filters)`:
|
775 |
+
A array reprensenting the function that will be used to reduces the amplitude of the discontinuities at the
|
776 |
+
boundaries of each frame when computing the STFT. Each frame will be multiplied by the windowing_function.
|
777 |
+
For more information on the discontinuities, called *Spectral leakage*, refer to [this
|
778 |
+
tutorial]https://download.ni.com/evaluation/pxi/Understanding%20FFTs%20and%20Windowing.pdf
|
779 |
+
fft_window_size (`int`, *optional*):
|
780 |
+
Size of the window om which the Fourier transform is applied. This controls the frequency resolution of the
|
781 |
+
spectrogram. 400 means that the fourrier transform is computed on windows of 400 samples. The number of
|
782 |
+
frequency bins (`nb_frequency_bins`) used to divide the window into equal strips is equal to
|
783 |
+
`(1+fft_window_size)//2`. An increase of the fft_window_size slows the calculus time proportionnally.
|
784 |
+
|
785 |
+
Example:
|
786 |
+
|
787 |
+
```python
|
788 |
+
>>> from transformers.audio_utils import stft, fram_wave
|
789 |
+
>>> import numpy as np
|
790 |
+
|
791 |
+
>>> audio = np.random.rand(50)
|
792 |
+
>>> fft_window_size = 10
|
793 |
+
>>> hop_length = 2
|
794 |
+
>>> framed_audio = fram_wave(audio, hop_length, fft_window_size)
|
795 |
+
>>> spectrogram = stft(framed_audio, np.hanning(fft_window_size + 1))
|
796 |
+
```
|
797 |
+
|
798 |
+
Returns:
|
799 |
+
spectrogram (`np.ndarray`):
|
800 |
+
A spectrogram of shape `(num_frames, nb_frequency_bins)` obtained using the STFT algorithm
|
801 |
+
"""
|
802 |
+
warnings.warn(
|
803 |
+
"The function `stft` is deprecated and will be removed in version 4.31.0 of Transformers",
|
804 |
+
FutureWarning,
|
805 |
+
)
|
806 |
+
frame_size = frames.shape[1]
|
807 |
+
|
808 |
+
if fft_window_size is None:
|
809 |
+
fft_window_size = frame_size
|
810 |
+
|
811 |
+
if fft_window_size < frame_size:
|
812 |
+
raise ValueError("FFT size must greater or equal the frame size")
|
813 |
+
# number of FFT bins to store
|
814 |
+
nb_frequency_bins = (fft_window_size >> 1) + 1
|
815 |
+
|
816 |
+
spectrogram = np.empty((len(frames), nb_frequency_bins), dtype=np.complex64)
|
817 |
+
fft_signal = np.zeros(fft_window_size)
|
818 |
+
|
819 |
+
for f, frame in enumerate(frames):
|
820 |
+
if windowing_function is not None:
|
821 |
+
np.multiply(frame, windowing_function, out=fft_signal[:frame_size])
|
822 |
+
else:
|
823 |
+
fft_signal[:frame_size] = frame
|
824 |
+
spectrogram[f] = np.fft.fft(fft_signal, axis=0)[:nb_frequency_bins]
|
825 |
+
return spectrogram.T
|